From 465eb361d39d08029f30b36c769252c9f83e7949 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 12 Aug 2020 13:33:50 -0700 Subject: [PATCH] feat!: migrate to use microgen (#34) * feat!: migrate to use microgen * update sample * update sample * update sample * Update UPGRADING.md Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- .coveragerc | 13 +- README.rst | 7 +- UPGRADING.md | 169 + docs/UPGRADING.md | 1 + docs/datalabeling_v1beta1/services.rst | 6 + docs/datalabeling_v1beta1/types.rst | 5 + docs/gapic/v1beta1/api.rst | 6 - docs/gapic/v1beta1/types.rst | 5 - docs/index.rst | 16 +- google/cloud/datalabeling.py | 29 - google/cloud/datalabeling/__init__.py | 460 + google/cloud/datalabeling/py.typed | 2 + google/cloud/datalabeling_v1beta1/__init__.py | 292 +- .../datalabeling_v1beta1/gapic/__init__.py | 0 .../gapic/data_labeling_service_client.py | 3380 ------ .../data_labeling_service_client_config.py | 212 - .../cloud/datalabeling_v1beta1/gapic/enums.py | 228 - .../gapic/transports/__init__.py | 0 .../data_labeling_service_grpc_transport.py | 580 - .../datalabeling_v1beta1/proto/__init__.py | 0 .../proto/annotation_pb2.py | 2583 ----- .../proto/annotation_pb2_grpc.py | 3 - .../proto/annotation_spec_set_pb2.py | 278 - .../proto/annotation_spec_set_pb2_grpc.py | 3 - .../proto/data_labeling_service_pb2.py | 5074 --------- .../proto/data_labeling_service_pb2_grpc.py | 1571 --- .../proto/data_payloads_pb2.py | 448 - .../proto/data_payloads_pb2_grpc.py | 3 - .../datalabeling_v1beta1/proto/dataset_pb2.py | 2278 ---- .../proto/dataset_pb2_grpc.py | 3 - .../proto/evaluation_job_pb2.py | 1036 -- .../proto/evaluation_job_pb2_grpc.py | 3 - .../proto/evaluation_pb2.py | 1280 --- .../proto/evaluation_pb2_grpc.py | 3 - .../proto/human_annotation_config_pb2.py | 1326 --- .../proto/human_annotation_config_pb2_grpc.py | 3 - .../proto/instruction_pb2.py | 414 - .../proto/instruction_pb2_grpc.py | 3 - .../proto/operations_pb2.py | 1918 ---- .../proto/operations_pb2_grpc.py | 3 - google/cloud/datalabeling_v1beta1/py.typed | 2 + .../services}/__init__.py | 14 +- .../data_labeling_service}/__init__.py | 18 +- .../data_labeling_service/async_client.py | 3140 ++++++ .../services/data_labeling_service/client.py | 3227 ++++++ .../services/data_labeling_service/pagers.py | 1209 +++ .../transports/__init__.py | 38 + .../data_labeling_service/transports/base.py | 800 ++ .../data_labeling_service/transports/grpc.py | 1201 ++ .../transports/grpc_asyncio.py | 1231 +++ google/cloud/datalabeling_v1beta1/types.py | 80 - .../datalabeling_v1beta1/types/__init__.py | 301 + .../datalabeling_v1beta1/types/annotation.py | 588 + .../types/annotation_spec_set.py | 92 + .../types/data_labeling_service.py | 1168 ++ .../types/data_payloads.py | 112 + .../datalabeling_v1beta1/types/dataset.py | 555 + .../datalabeling_v1beta1/types/evaluation.py | 334 + .../types/evaluation_job.py | 339 + .../types/human_annotation_config.py | 326 + .../datalabeling_v1beta1/types/instruction.py | 115 + .../datalabeling_v1beta1/types/operations.py | 494 + mypy.ini | 3 + noxfile.py | 8 +- .../snippets/create_annotation_spec_set.py | 53 +- .../create_annotation_spec_set_test.py | 10 +- samples/snippets/create_instruction.py | 73 +- samples/snippets/create_instruction_test.py | 17 +- samples/snippets/export_data.py | 62 +- samples/snippets/import_data.py | 44 +- samples/snippets/import_data_test.py | 17 +- samples/snippets/label_image.py | 64 +- samples/snippets/label_image_test.py | 34 +- samples/snippets/label_text.py | 65 +- samples/snippets/label_text_test.py | 30 +- samples/snippets/label_video.py | 61 +- samples/snippets/label_video_test.py | 34 +- samples/snippets/manage_dataset.py | 131 +- samples/snippets/manage_dataset_test.py | 18 +- samples/snippets/noxfile.py | 26 +- samples/snippets/testing_lib.py | 16 +- .../fixup_datalabeling_v1beta1_keywords.py | 211 + setup.py | 15 +- synth.metadata | 6 +- synth.py | 56 +- .../gapic/datalabeling_v1beta1/__init__.py | 1 + .../test_data_labeling_service.py | 9667 +++++++++++++++++ ...st_data_labeling_service_client_v1beta1.py | 1712 --- 88 files changed, 26526 insertions(+), 24936 deletions(-) create mode 100644 UPGRADING.md create mode 120000 docs/UPGRADING.md create mode 100644 docs/datalabeling_v1beta1/services.rst create mode 100644 docs/datalabeling_v1beta1/types.rst delete mode 100644 docs/gapic/v1beta1/api.rst delete mode 100644 docs/gapic/v1beta1/types.rst delete mode 100644 google/cloud/datalabeling.py create mode 100644 google/cloud/datalabeling/__init__.py create mode 100644 google/cloud/datalabeling/py.typed delete mode 100644 google/cloud/datalabeling_v1beta1/gapic/__init__.py delete mode 100644 google/cloud/datalabeling_v1beta1/gapic/data_labeling_service_client.py delete mode 100644 google/cloud/datalabeling_v1beta1/gapic/data_labeling_service_client_config.py delete mode 100644 google/cloud/datalabeling_v1beta1/gapic/enums.py delete mode 100644 google/cloud/datalabeling_v1beta1/gapic/transports/__init__.py delete mode 100644 google/cloud/datalabeling_v1beta1/gapic/transports/data_labeling_service_grpc_transport.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/__init__.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/annotation_pb2.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/annotation_pb2_grpc.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/annotation_spec_set_pb2.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/annotation_spec_set_pb2_grpc.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/data_labeling_service_pb2.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/data_labeling_service_pb2_grpc.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/data_payloads_pb2.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/data_payloads_pb2_grpc.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/dataset_pb2.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/dataset_pb2_grpc.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/evaluation_job_pb2.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/evaluation_job_pb2_grpc.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/evaluation_pb2.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/evaluation_pb2_grpc.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/human_annotation_config_pb2.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/human_annotation_config_pb2_grpc.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/instruction_pb2.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/instruction_pb2_grpc.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/operations_pb2.py delete mode 100644 google/cloud/datalabeling_v1beta1/proto/operations_pb2_grpc.py create mode 100644 google/cloud/datalabeling_v1beta1/py.typed rename google/{ => cloud/datalabeling_v1beta1/services}/__init__.py (71%) rename google/cloud/{ => datalabeling_v1beta1/services/data_labeling_service}/__init__.py (70%) create mode 100644 google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py create mode 100644 google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py create mode 100644 google/cloud/datalabeling_v1beta1/services/data_labeling_service/pagers.py create mode 100644 google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/__init__.py create mode 100644 google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/base.py create mode 100644 google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/grpc.py create mode 100644 google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/grpc_asyncio.py delete mode 100644 google/cloud/datalabeling_v1beta1/types.py create mode 100644 google/cloud/datalabeling_v1beta1/types/__init__.py create mode 100644 google/cloud/datalabeling_v1beta1/types/annotation.py create mode 100644 google/cloud/datalabeling_v1beta1/types/annotation_spec_set.py create mode 100644 google/cloud/datalabeling_v1beta1/types/data_labeling_service.py create mode 100644 google/cloud/datalabeling_v1beta1/types/data_payloads.py create mode 100644 google/cloud/datalabeling_v1beta1/types/dataset.py create mode 100644 google/cloud/datalabeling_v1beta1/types/evaluation.py create mode 100644 google/cloud/datalabeling_v1beta1/types/evaluation_job.py create mode 100644 google/cloud/datalabeling_v1beta1/types/human_annotation_config.py create mode 100644 google/cloud/datalabeling_v1beta1/types/instruction.py create mode 100644 google/cloud/datalabeling_v1beta1/types/operations.py create mode 100644 mypy.ini create mode 100644 scripts/fixup_datalabeling_v1beta1_keywords.py create mode 100644 tests/unit/gapic/datalabeling_v1beta1/__init__.py create mode 100644 tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py delete mode 100644 tests/unit/gapic/v1beta1/test_data_labeling_service_client_v1beta1.py diff --git a/.coveragerc b/.coveragerc index dd39c85..a70f56b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -21,15 +21,14 @@ branch = True [report] fail_under = 100 show_missing = True +omit = google/cloud/datalabeling/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound \ No newline at end of file diff --git a/README.rst b/README.rst index 068e157..53a3f13 100644 --- a/README.rst +++ b/README.rst @@ -44,14 +44,13 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ - -Python >= 3.5 - +Python >= 3.6 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python == 2.7. -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +The last version of this library compatible with Python 2.7 is google-cloud-datalabeling==0.4.1. Mac/Linux diff --git a/UPGRADING.md b/UPGRADING.md new file mode 100644 index 0000000..b7f71dd --- /dev/null +++ b/UPGRADING.md @@ -0,0 +1,169 @@ +# 1.0.0 Migration Guide + +The 1.0 release of the `google-cloud-datalabeling` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-datalabeling/issues). + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 1.0.0 release requires Python 3.6+. + + +## Method Calls + +> **WARNING**: Breaking change + +Methods expect request objects. We provide a script that will convert most common use cases. + +* Install the library + +```py +python3 -m pip install google-cloud-datalabeling +``` + +* The script `fixup_datalabeling_v1_keywords.py` is shipped with the library. It expects an input directory (with the code to convert) and an empty destination directory. + +```sh +$ fixup_datalabeling_v1_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +from google.cloud import datalabeling + +client = datalabeling.DataLabelingServiceClient() + +datasets = client.list_datasets(parent="projects/project") +``` + + +**After:** +```py +from google.cloud import datalabeling + +client = datalabeling.DataLabelingServiceClient() + +datasets = client.list_datasets(request={"parent": "projects/project"}) +``` + +### More Details + +In `google-cloud-datalabeling<1.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters. + +**Before:** +```py + def create_dataset( + self, + parent, + dataset, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 1.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. + +Some methods have additional keyword only parameters. The available parameters depend on the [`google.api.method_signature` annotation](https://github.com/googleapis/googleapis/blob/master/google/cloud/datalabeling/v1beta1/data_labeling_service.proto#L48) specified by the API producer. + + +**After:** +```py + def create_dataset( + self, + request: data_labeling_service.CreateDatasetRequest = None, + *, + parent: str = None, + dataset: gcd_dataset.Dataset = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_dataset.Dataset: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive. +> Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.create_dataset( + request={ + "parent": parent, + "dataset": dataset + } +) +``` + +```py +response = client.create_dataset( + parent=parent, + dataset=dataset +) +``` + +This call is invalid because it mixes `request` with a keyword argument `dataset`. Executing this code +will result in an error. + +```py +response = client.create_dataset( + request={ + "parent": parent + }, + dataset=dataset +) +``` + + + +## Enums and Types + + +> **WARNING**: Breaking change + +The submodules `enums` and `types` have been removed. + +**Before:** +```py +from google.cloud import datalabeling + +data_type = datalabeling.enums.DataType.IMAGE +dataset = datalabeling.types.Dataset(display_name="name") +``` + + +**After:** +```py +from google.cloud import datalabeling + +data_type = datalabeling.DataType.IMAGE +dataset = datalabeling.Dataset(display_name="name") +``` + +## Path Helper Methods +The following path helper methods have been removed. Please construct the paths manually. + +```py +project="project" +dataset="dataset" +annotated_dataset="annotated_dataset" +annotation_spec_set="annotation_spec_set" +data_item="data_item" +evaluation="evaluation" +evaluation_job="evaluation_job" +example="example" +instruction="instruction" + +annotated_dataset_path = f'projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}' +annotation_spec_set_path = f'projects/{project}/annotationSpecSets/{annotation_spec_set}' +data_item_path=f'projects/{project}/datasets/{dataset}/dataItems/{data_item}' +dataset_path=f'projects/{project}/datasets/{dataset}' +evaluation_path=f'projects/{project}/datasets/{dataset}/evaluations/{evaluation}' +evaluation_job_path=f'projects/{project}/evaluationJobs/{evaluation_job}' +example_path=f'projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}/examples/{example}' +instruction_path=f'projects/{project}/instructions/{instruction}' +project_path=f'projects/{project}' +``` diff --git a/docs/UPGRADING.md b/docs/UPGRADING.md new file mode 120000 index 0000000..01097c8 --- /dev/null +++ b/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/docs/datalabeling_v1beta1/services.rst b/docs/datalabeling_v1beta1/services.rst new file mode 100644 index 0000000..29f66fb --- /dev/null +++ b/docs/datalabeling_v1beta1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Datalabeling v1beta1 API +================================================== + +.. automodule:: google.cloud.datalabeling_v1beta1.services.data_labeling_service + :members: + :inherited-members: diff --git a/docs/datalabeling_v1beta1/types.rst b/docs/datalabeling_v1beta1/types.rst new file mode 100644 index 0000000..634c148 --- /dev/null +++ b/docs/datalabeling_v1beta1/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Datalabeling v1beta1 API +=============================================== + +.. automodule:: google.cloud.datalabeling_v1beta1.types + :members: diff --git a/docs/gapic/v1beta1/api.rst b/docs/gapic/v1beta1/api.rst deleted file mode 100644 index 78258fa..0000000 --- a/docs/gapic/v1beta1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Data Labeling API -============================ - -.. automodule:: google.cloud.datalabeling_v1beta1 - :members: - :inherited-members: \ No newline at end of file diff --git a/docs/gapic/v1beta1/types.rst b/docs/gapic/v1beta1/types.rst deleted file mode 100644 index 07d64dc..0000000 --- a/docs/gapic/v1beta1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Data Labeling API Client -================================== - -.. automodule:: google.cloud.datalabeling_v1beta1.types - :members: \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index 09f14ae..8e23d1a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,9 +7,19 @@ Api Reference .. toctree:: :maxdepth: 2 - gapic/v1beta1/api - gapic/v1beta1/types - + datalabeling_v1beta1/services + datalabeling_v1beta1/types + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 1.x release of this library. + +.. toctree:: + :maxdepth: 2 + + UPGRADING + Changelog --------- diff --git a/google/cloud/datalabeling.py b/google/cloud/datalabeling.py deleted file mode 100644 index 60fddbd..0000000 --- a/google/cloud/datalabeling.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import - -from google.cloud.datalabeling_v1beta1 import DataLabelingServiceClient -from google.cloud.datalabeling_v1beta1 import enums -from google.cloud.datalabeling_v1beta1 import types - - -__all__ = ( - "enums", - "types", - "DataLabelingServiceClient", -) diff --git a/google/cloud/datalabeling/__init__.py b/google/cloud/datalabeling/__init__.py new file mode 100644 index 0000000..98edb7c --- /dev/null +++ b/google/cloud/datalabeling/__init__.py @@ -0,0 +1,460 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.datalabeling_v1beta1.services.data_labeling_service.async_client import ( + DataLabelingServiceAsyncClient, +) +from google.cloud.datalabeling_v1beta1.services.data_labeling_service.client import ( + DataLabelingServiceClient, +) +from google.cloud.datalabeling_v1beta1.types.annotation import Annotation +from google.cloud.datalabeling_v1beta1.types.annotation import AnnotationMetadata +from google.cloud.datalabeling_v1beta1.types.annotation import AnnotationSentiment +from google.cloud.datalabeling_v1beta1.types.annotation import AnnotationSource +from google.cloud.datalabeling_v1beta1.types.annotation import AnnotationType +from google.cloud.datalabeling_v1beta1.types.annotation import AnnotationValue +from google.cloud.datalabeling_v1beta1.types.annotation import BoundingPoly +from google.cloud.datalabeling_v1beta1.types.annotation import ( + ImageBoundingPolyAnnotation, +) +from google.cloud.datalabeling_v1beta1.types.annotation import ( + ImageClassificationAnnotation, +) +from google.cloud.datalabeling_v1beta1.types.annotation import ImagePolylineAnnotation +from google.cloud.datalabeling_v1beta1.types.annotation import ( + ImageSegmentationAnnotation, +) +from google.cloud.datalabeling_v1beta1.types.annotation import NormalizedBoundingPoly +from google.cloud.datalabeling_v1beta1.types.annotation import NormalizedPolyline +from google.cloud.datalabeling_v1beta1.types.annotation import NormalizedVertex +from google.cloud.datalabeling_v1beta1.types.annotation import ObjectTrackingFrame +from google.cloud.datalabeling_v1beta1.types.annotation import OperatorMetadata +from google.cloud.datalabeling_v1beta1.types.annotation import Polyline +from google.cloud.datalabeling_v1beta1.types.annotation import SequentialSegment +from google.cloud.datalabeling_v1beta1.types.annotation import ( + TextClassificationAnnotation, +) +from google.cloud.datalabeling_v1beta1.types.annotation import ( + TextEntityExtractionAnnotation, +) +from google.cloud.datalabeling_v1beta1.types.annotation import TimeSegment +from google.cloud.datalabeling_v1beta1.types.annotation import Vertex +from google.cloud.datalabeling_v1beta1.types.annotation import ( + VideoClassificationAnnotation, +) +from google.cloud.datalabeling_v1beta1.types.annotation import VideoEventAnnotation +from google.cloud.datalabeling_v1beta1.types.annotation import ( + VideoObjectTrackingAnnotation, +) +from google.cloud.datalabeling_v1beta1.types.annotation_spec_set import AnnotationSpec +from google.cloud.datalabeling_v1beta1.types.annotation_spec_set import ( + AnnotationSpecSet, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + CreateAnnotationSpecSetRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + CreateDatasetRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + CreateEvaluationJobRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + CreateInstructionRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + DeleteAnnotatedDatasetRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + DeleteAnnotationSpecSetRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + DeleteDatasetRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + DeleteEvaluationJobRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + DeleteInstructionRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ExportDataRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + GetAnnotatedDatasetRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + GetAnnotationSpecSetRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + GetDataItemRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + GetDatasetRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + GetEvaluationJobRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + GetEvaluationRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + GetExampleRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + GetInstructionRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ImportDataRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + LabelImageRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + LabelTextRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + LabelVideoRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListAnnotatedDatasetsRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListAnnotatedDatasetsResponse, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListAnnotationSpecSetsRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListAnnotationSpecSetsResponse, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListDataItemsRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListDataItemsResponse, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListDatasetsRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListDatasetsResponse, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListEvaluationJobsRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListEvaluationJobsResponse, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListExamplesRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListExamplesResponse, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListInstructionsRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ListInstructionsResponse, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + PauseEvaluationJobRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + ResumeEvaluationJobRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + SearchEvaluationsRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + SearchEvaluationsResponse, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + SearchExampleComparisonsRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + SearchExampleComparisonsResponse, +) +from google.cloud.datalabeling_v1beta1.types.data_labeling_service import ( + UpdateEvaluationJobRequest, +) +from google.cloud.datalabeling_v1beta1.types.data_payloads import ImagePayload +from google.cloud.datalabeling_v1beta1.types.data_payloads import TextPayload +from google.cloud.datalabeling_v1beta1.types.data_payloads import VideoPayload +from google.cloud.datalabeling_v1beta1.types.data_payloads import VideoThumbnail +from google.cloud.datalabeling_v1beta1.types.dataset import AnnotatedDataset +from google.cloud.datalabeling_v1beta1.types.dataset import AnnotatedDatasetMetadata +from google.cloud.datalabeling_v1beta1.types.dataset import BigQuerySource +from google.cloud.datalabeling_v1beta1.types.dataset import ClassificationMetadata +from google.cloud.datalabeling_v1beta1.types.dataset import DataItem +from google.cloud.datalabeling_v1beta1.types.dataset import DataType +from google.cloud.datalabeling_v1beta1.types.dataset import Dataset +from google.cloud.datalabeling_v1beta1.types.dataset import Example +from google.cloud.datalabeling_v1beta1.types.dataset import GcsDestination +from google.cloud.datalabeling_v1beta1.types.dataset import GcsFolderDestination +from google.cloud.datalabeling_v1beta1.types.dataset import GcsSource +from google.cloud.datalabeling_v1beta1.types.dataset import InputConfig +from google.cloud.datalabeling_v1beta1.types.dataset import LabelStats +from google.cloud.datalabeling_v1beta1.types.dataset import OutputConfig +from google.cloud.datalabeling_v1beta1.types.dataset import TextMetadata +from google.cloud.datalabeling_v1beta1.types.evaluation import ( + BoundingBoxEvaluationOptions, +) +from google.cloud.datalabeling_v1beta1.types.evaluation import ClassificationMetrics +from google.cloud.datalabeling_v1beta1.types.evaluation import ConfusionMatrix +from google.cloud.datalabeling_v1beta1.types.evaluation import Evaluation +from google.cloud.datalabeling_v1beta1.types.evaluation import EvaluationConfig +from google.cloud.datalabeling_v1beta1.types.evaluation import EvaluationMetrics +from google.cloud.datalabeling_v1beta1.types.evaluation import ObjectDetectionMetrics +from google.cloud.datalabeling_v1beta1.types.evaluation import PrCurve +from google.cloud.datalabeling_v1beta1.types.evaluation_job import Attempt +from google.cloud.datalabeling_v1beta1.types.evaluation_job import EvaluationJob +from google.cloud.datalabeling_v1beta1.types.evaluation_job import ( + EvaluationJobAlertConfig, +) +from google.cloud.datalabeling_v1beta1.types.evaluation_job import EvaluationJobConfig +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + BoundingPolyConfig, +) +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import EventConfig +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + HumanAnnotationConfig, +) +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + ImageClassificationConfig, +) +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + ObjectDetectionConfig, +) +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + ObjectTrackingConfig, +) +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + PolylineConfig, +) +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + SegmentationConfig, +) +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + SentimentConfig, +) +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + StringAggregationType, +) +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + TextClassificationConfig, +) +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + TextEntityExtractionConfig, +) +from google.cloud.datalabeling_v1beta1.types.human_annotation_config import ( + VideoClassificationConfig, +) +from google.cloud.datalabeling_v1beta1.types.instruction import CsvInstruction +from google.cloud.datalabeling_v1beta1.types.instruction import Instruction +from google.cloud.datalabeling_v1beta1.types.instruction import PdfInstruction +from google.cloud.datalabeling_v1beta1.types.operations import CreateInstructionMetadata +from google.cloud.datalabeling_v1beta1.types.operations import ( + ExportDataOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + ExportDataOperationResponse, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + ImportDataOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + ImportDataOperationResponse, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelImageBoundingBoxOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelImageBoundingPolyOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelImageClassificationOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelImageOrientedBoundingBoxOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelImagePolylineOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelImageSegmentationOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import LabelOperationMetadata +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelTextClassificationOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelTextEntityExtractionOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelVideoClassificationOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelVideoEventOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelVideoObjectDetectionOperationMetadata, +) +from google.cloud.datalabeling_v1beta1.types.operations import ( + LabelVideoObjectTrackingOperationMetadata, +) + +__all__ = ( + "AnnotatedDataset", + "AnnotatedDatasetMetadata", + "Annotation", + "AnnotationMetadata", + "AnnotationSentiment", + "AnnotationSource", + "AnnotationSpec", + "AnnotationSpecSet", + "AnnotationType", + "AnnotationValue", + "Attempt", + "BigQuerySource", + "BoundingBoxEvaluationOptions", + "BoundingPoly", + "BoundingPolyConfig", + "ClassificationMetadata", + "ClassificationMetrics", + "ConfusionMatrix", + "CreateAnnotationSpecSetRequest", + "CreateDatasetRequest", + "CreateEvaluationJobRequest", + "CreateInstructionMetadata", + "CreateInstructionRequest", + "CsvInstruction", + "DataItem", + "DataLabelingServiceAsyncClient", + "DataLabelingServiceClient", + "DataType", + "Dataset", + "DeleteAnnotatedDatasetRequest", + "DeleteAnnotationSpecSetRequest", + "DeleteDatasetRequest", + "DeleteEvaluationJobRequest", + "DeleteInstructionRequest", + "Evaluation", + "EvaluationConfig", + "EvaluationJob", + "EvaluationJobAlertConfig", + "EvaluationJobConfig", + "EvaluationMetrics", + "EventConfig", + "Example", + "ExportDataOperationMetadata", + "ExportDataOperationResponse", + "ExportDataRequest", + "GcsDestination", + "GcsFolderDestination", + "GcsSource", + "GetAnnotatedDatasetRequest", + "GetAnnotationSpecSetRequest", + "GetDataItemRequest", + "GetDatasetRequest", + "GetEvaluationJobRequest", + "GetEvaluationRequest", + "GetExampleRequest", + "GetInstructionRequest", + "HumanAnnotationConfig", + "ImageBoundingPolyAnnotation", + "ImageClassificationAnnotation", + "ImageClassificationConfig", + "ImagePayload", + "ImagePolylineAnnotation", + "ImageSegmentationAnnotation", + "ImportDataOperationMetadata", + "ImportDataOperationResponse", + "ImportDataRequest", + "InputConfig", + "Instruction", + "LabelImageBoundingBoxOperationMetadata", + "LabelImageBoundingPolyOperationMetadata", + "LabelImageClassificationOperationMetadata", + "LabelImageOrientedBoundingBoxOperationMetadata", + "LabelImagePolylineOperationMetadata", + "LabelImageRequest", + "LabelImageSegmentationOperationMetadata", + "LabelOperationMetadata", + "LabelStats", + "LabelTextClassificationOperationMetadata", + "LabelTextEntityExtractionOperationMetadata", + "LabelTextRequest", + "LabelVideoClassificationOperationMetadata", + "LabelVideoEventOperationMetadata", + "LabelVideoObjectDetectionOperationMetadata", + "LabelVideoObjectTrackingOperationMetadata", + "LabelVideoRequest", + "ListAnnotatedDatasetsRequest", + "ListAnnotatedDatasetsResponse", + "ListAnnotationSpecSetsRequest", + "ListAnnotationSpecSetsResponse", + "ListDataItemsRequest", + "ListDataItemsResponse", + "ListDatasetsRequest", + "ListDatasetsResponse", + "ListEvaluationJobsRequest", + "ListEvaluationJobsResponse", + "ListExamplesRequest", + "ListExamplesResponse", + "ListInstructionsRequest", + "ListInstructionsResponse", + "NormalizedBoundingPoly", + "NormalizedPolyline", + "NormalizedVertex", + "ObjectDetectionConfig", + "ObjectDetectionMetrics", + "ObjectTrackingConfig", + "ObjectTrackingFrame", + "OperatorMetadata", + "OutputConfig", + "PauseEvaluationJobRequest", + "PdfInstruction", + "Polyline", + "PolylineConfig", + "PrCurve", + "ResumeEvaluationJobRequest", + "SearchEvaluationsRequest", + "SearchEvaluationsResponse", + "SearchExampleComparisonsRequest", + "SearchExampleComparisonsResponse", + "SegmentationConfig", + "SentimentConfig", + "SequentialSegment", + "StringAggregationType", + "TextClassificationAnnotation", + "TextClassificationConfig", + "TextEntityExtractionAnnotation", + "TextEntityExtractionConfig", + "TextMetadata", + "TextPayload", + "TimeSegment", + "UpdateEvaluationJobRequest", + "Vertex", + "VideoClassificationAnnotation", + "VideoClassificationConfig", + "VideoEventAnnotation", + "VideoObjectTrackingAnnotation", + "VideoPayload", + "VideoThumbnail", +) diff --git a/google/cloud/datalabeling/py.typed b/google/cloud/datalabeling/py.typed new file mode 100644 index 0000000..1d27d78 --- /dev/null +++ b/google/cloud/datalabeling/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-datalabeling package uses inline types. diff --git a/google/cloud/datalabeling_v1beta1/__init__.py b/google/cloud/datalabeling_v1beta1/__init__.py index 908930d..ac2fd2c 100644 --- a/google/cloud/datalabeling_v1beta1/__init__.py +++ b/google/cloud/datalabeling_v1beta1/__init__.py @@ -1,29 +1,293 @@ # -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC + +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -from __future__ import absolute_import - -from google.cloud.datalabeling_v1beta1 import types -from google.cloud.datalabeling_v1beta1.gapic import data_labeling_service_client -from google.cloud.datalabeling_v1beta1.gapic import enums - - -class DataLabelingServiceClient(data_labeling_service_client.DataLabelingServiceClient): - __doc__ = data_labeling_service_client.DataLabelingServiceClient.__doc__ - enums = enums +from .services.data_labeling_service import DataLabelingServiceClient +from .types.annotation import Annotation +from .types.annotation import AnnotationMetadata +from .types.annotation import AnnotationSentiment +from .types.annotation import AnnotationSource +from .types.annotation import AnnotationType +from .types.annotation import AnnotationValue +from .types.annotation import BoundingPoly +from .types.annotation import ImageBoundingPolyAnnotation +from .types.annotation import ImageClassificationAnnotation +from .types.annotation import ImagePolylineAnnotation +from .types.annotation import ImageSegmentationAnnotation +from .types.annotation import NormalizedBoundingPoly +from .types.annotation import NormalizedPolyline +from .types.annotation import NormalizedVertex +from .types.annotation import ObjectTrackingFrame +from .types.annotation import OperatorMetadata +from .types.annotation import Polyline +from .types.annotation import SequentialSegment +from .types.annotation import TextClassificationAnnotation +from .types.annotation import TextEntityExtractionAnnotation +from .types.annotation import TimeSegment +from .types.annotation import Vertex +from .types.annotation import VideoClassificationAnnotation +from .types.annotation import VideoEventAnnotation +from .types.annotation import VideoObjectTrackingAnnotation +from .types.annotation_spec_set import AnnotationSpec +from .types.annotation_spec_set import AnnotationSpecSet +from .types.data_labeling_service import CreateAnnotationSpecSetRequest +from .types.data_labeling_service import CreateDatasetRequest +from .types.data_labeling_service import CreateEvaluationJobRequest +from .types.data_labeling_service import CreateInstructionRequest +from .types.data_labeling_service import DeleteAnnotatedDatasetRequest +from .types.data_labeling_service import DeleteAnnotationSpecSetRequest +from .types.data_labeling_service import DeleteDatasetRequest +from .types.data_labeling_service import DeleteEvaluationJobRequest +from .types.data_labeling_service import DeleteInstructionRequest +from .types.data_labeling_service import ExportDataRequest +from .types.data_labeling_service import GetAnnotatedDatasetRequest +from .types.data_labeling_service import GetAnnotationSpecSetRequest +from .types.data_labeling_service import GetDataItemRequest +from .types.data_labeling_service import GetDatasetRequest +from .types.data_labeling_service import GetEvaluationJobRequest +from .types.data_labeling_service import GetEvaluationRequest +from .types.data_labeling_service import GetExampleRequest +from .types.data_labeling_service import GetInstructionRequest +from .types.data_labeling_service import ImportDataRequest +from .types.data_labeling_service import LabelImageRequest +from .types.data_labeling_service import LabelTextRequest +from .types.data_labeling_service import LabelVideoRequest +from .types.data_labeling_service import ListAnnotatedDatasetsRequest +from .types.data_labeling_service import ListAnnotatedDatasetsResponse +from .types.data_labeling_service import ListAnnotationSpecSetsRequest +from .types.data_labeling_service import ListAnnotationSpecSetsResponse +from .types.data_labeling_service import ListDataItemsRequest +from .types.data_labeling_service import ListDataItemsResponse +from .types.data_labeling_service import ListDatasetsRequest +from .types.data_labeling_service import ListDatasetsResponse +from .types.data_labeling_service import ListEvaluationJobsRequest +from .types.data_labeling_service import ListEvaluationJobsResponse +from .types.data_labeling_service import ListExamplesRequest +from .types.data_labeling_service import ListExamplesResponse +from .types.data_labeling_service import ListInstructionsRequest +from .types.data_labeling_service import ListInstructionsResponse +from .types.data_labeling_service import PauseEvaluationJobRequest +from .types.data_labeling_service import ResumeEvaluationJobRequest +from .types.data_labeling_service import SearchEvaluationsRequest +from .types.data_labeling_service import SearchEvaluationsResponse +from .types.data_labeling_service import SearchExampleComparisonsRequest +from .types.data_labeling_service import SearchExampleComparisonsResponse +from .types.data_labeling_service import UpdateEvaluationJobRequest +from .types.data_payloads import ImagePayload +from .types.data_payloads import TextPayload +from .types.data_payloads import VideoPayload +from .types.data_payloads import VideoThumbnail +from .types.dataset import AnnotatedDataset +from .types.dataset import AnnotatedDatasetMetadata +from .types.dataset import BigQuerySource +from .types.dataset import ClassificationMetadata +from .types.dataset import DataItem +from .types.dataset import DataType +from .types.dataset import Dataset +from .types.dataset import Example +from .types.dataset import GcsDestination +from .types.dataset import GcsFolderDestination +from .types.dataset import GcsSource +from .types.dataset import InputConfig +from .types.dataset import LabelStats +from .types.dataset import OutputConfig +from .types.dataset import TextMetadata +from .types.evaluation import BoundingBoxEvaluationOptions +from .types.evaluation import ClassificationMetrics +from .types.evaluation import ConfusionMatrix +from .types.evaluation import Evaluation +from .types.evaluation import EvaluationConfig +from .types.evaluation import EvaluationMetrics +from .types.evaluation import ObjectDetectionMetrics +from .types.evaluation import PrCurve +from .types.evaluation_job import Attempt +from .types.evaluation_job import EvaluationJob +from .types.evaluation_job import EvaluationJobAlertConfig +from .types.evaluation_job import EvaluationJobConfig +from .types.human_annotation_config import BoundingPolyConfig +from .types.human_annotation_config import EventConfig +from .types.human_annotation_config import HumanAnnotationConfig +from .types.human_annotation_config import ImageClassificationConfig +from .types.human_annotation_config import ObjectDetectionConfig +from .types.human_annotation_config import ObjectTrackingConfig +from .types.human_annotation_config import PolylineConfig +from .types.human_annotation_config import SegmentationConfig +from .types.human_annotation_config import SentimentConfig +from .types.human_annotation_config import StringAggregationType +from .types.human_annotation_config import TextClassificationConfig +from .types.human_annotation_config import TextEntityExtractionConfig +from .types.human_annotation_config import VideoClassificationConfig +from .types.instruction import CsvInstruction +from .types.instruction import Instruction +from .types.instruction import PdfInstruction +from .types.operations import CreateInstructionMetadata +from .types.operations import ExportDataOperationMetadata +from .types.operations import ExportDataOperationResponse +from .types.operations import ImportDataOperationMetadata +from .types.operations import ImportDataOperationResponse +from .types.operations import LabelImageBoundingBoxOperationMetadata +from .types.operations import LabelImageBoundingPolyOperationMetadata +from .types.operations import LabelImageClassificationOperationMetadata +from .types.operations import LabelImageOrientedBoundingBoxOperationMetadata +from .types.operations import LabelImagePolylineOperationMetadata +from .types.operations import LabelImageSegmentationOperationMetadata +from .types.operations import LabelOperationMetadata +from .types.operations import LabelTextClassificationOperationMetadata +from .types.operations import LabelTextEntityExtractionOperationMetadata +from .types.operations import LabelVideoClassificationOperationMetadata +from .types.operations import LabelVideoEventOperationMetadata +from .types.operations import LabelVideoObjectDetectionOperationMetadata +from .types.operations import LabelVideoObjectTrackingOperationMetadata -__all__ = ("enums", "types", "DataLabelingServiceClient") +__all__ = ( + "AnnotatedDataset", + "AnnotatedDatasetMetadata", + "Annotation", + "AnnotationMetadata", + "AnnotationSentiment", + "AnnotationSource", + "AnnotationSpec", + "AnnotationSpecSet", + "AnnotationType", + "AnnotationValue", + "Attempt", + "BigQuerySource", + "BoundingBoxEvaluationOptions", + "BoundingPoly", + "BoundingPolyConfig", + "ClassificationMetadata", + "ClassificationMetrics", + "ConfusionMatrix", + "CreateAnnotationSpecSetRequest", + "CreateDatasetRequest", + "CreateEvaluationJobRequest", + "CreateInstructionMetadata", + "CreateInstructionRequest", + "CsvInstruction", + "DataItem", + "DataType", + "Dataset", + "DeleteAnnotatedDatasetRequest", + "DeleteAnnotationSpecSetRequest", + "DeleteDatasetRequest", + "DeleteEvaluationJobRequest", + "DeleteInstructionRequest", + "Evaluation", + "EvaluationConfig", + "EvaluationJob", + "EvaluationJobAlertConfig", + "EvaluationJobConfig", + "EvaluationMetrics", + "EventConfig", + "Example", + "ExportDataOperationMetadata", + "ExportDataOperationResponse", + "ExportDataRequest", + "GcsDestination", + "GcsFolderDestination", + "GcsSource", + "GetAnnotatedDatasetRequest", + "GetAnnotationSpecSetRequest", + "GetDataItemRequest", + "GetDatasetRequest", + "GetEvaluationJobRequest", + "GetEvaluationRequest", + "GetExampleRequest", + "GetInstructionRequest", + "HumanAnnotationConfig", + "ImageBoundingPolyAnnotation", + "ImageClassificationAnnotation", + "ImageClassificationConfig", + "ImagePayload", + "ImagePolylineAnnotation", + "ImageSegmentationAnnotation", + "ImportDataOperationMetadata", + "ImportDataOperationResponse", + "ImportDataRequest", + "InputConfig", + "Instruction", + "LabelImageBoundingBoxOperationMetadata", + "LabelImageBoundingPolyOperationMetadata", + "LabelImageClassificationOperationMetadata", + "LabelImageOrientedBoundingBoxOperationMetadata", + "LabelImagePolylineOperationMetadata", + "LabelImageRequest", + "LabelImageSegmentationOperationMetadata", + "LabelOperationMetadata", + "LabelStats", + "LabelTextClassificationOperationMetadata", + "LabelTextEntityExtractionOperationMetadata", + "LabelTextRequest", + "LabelVideoClassificationOperationMetadata", + "LabelVideoEventOperationMetadata", + "LabelVideoObjectDetectionOperationMetadata", + "LabelVideoObjectTrackingOperationMetadata", + "LabelVideoRequest", + "ListAnnotatedDatasetsRequest", + "ListAnnotatedDatasetsResponse", + "ListAnnotationSpecSetsRequest", + "ListAnnotationSpecSetsResponse", + "ListDataItemsRequest", + "ListDataItemsResponse", + "ListDatasetsRequest", + "ListDatasetsResponse", + "ListEvaluationJobsRequest", + "ListEvaluationJobsResponse", + "ListExamplesRequest", + "ListExamplesResponse", + "ListInstructionsRequest", + "ListInstructionsResponse", + "NormalizedBoundingPoly", + "NormalizedPolyline", + "NormalizedVertex", + "ObjectDetectionConfig", + "ObjectDetectionMetrics", + "ObjectTrackingConfig", + "ObjectTrackingFrame", + "OperatorMetadata", + "OutputConfig", + "PauseEvaluationJobRequest", + "PdfInstruction", + "Polyline", + "PolylineConfig", + "PrCurve", + "ResumeEvaluationJobRequest", + "SearchEvaluationsRequest", + "SearchEvaluationsResponse", + "SearchExampleComparisonsRequest", + "SearchExampleComparisonsResponse", + "SegmentationConfig", + "SentimentConfig", + "SequentialSegment", + "StringAggregationType", + "TextClassificationAnnotation", + "TextClassificationConfig", + "TextEntityExtractionAnnotation", + "TextEntityExtractionConfig", + "TextMetadata", + "TextPayload", + "TimeSegment", + "UpdateEvaluationJobRequest", + "Vertex", + "VideoClassificationAnnotation", + "VideoClassificationConfig", + "VideoEventAnnotation", + "VideoObjectTrackingAnnotation", + "VideoPayload", + "VideoThumbnail", + "DataLabelingServiceClient", +) diff --git a/google/cloud/datalabeling_v1beta1/gapic/__init__.py b/google/cloud/datalabeling_v1beta1/gapic/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/google/cloud/datalabeling_v1beta1/gapic/data_labeling_service_client.py b/google/cloud/datalabeling_v1beta1/gapic/data_labeling_service_client.py deleted file mode 100644 index 8336571..0000000 --- a/google/cloud/datalabeling_v1beta1/gapic/data_labeling_service_client.py +++ /dev/null @@ -1,3380 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.datalabeling.v1beta1 DataLabelingService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.operation -import google.api_core.operations_v1 -import google.api_core.page_iterator -import google.api_core.path_template -import google.api_core.protobuf_helpers -import grpc - -from google.cloud.datalabeling_v1beta1.gapic import data_labeling_service_client_config -from google.cloud.datalabeling_v1beta1.gapic import enums -from google.cloud.datalabeling_v1beta1.gapic.transports import ( - data_labeling_service_grpc_transport, -) -from google.cloud.datalabeling_v1beta1.proto import annotation_spec_set_pb2 -from google.cloud.datalabeling_v1beta1.proto import data_labeling_service_pb2 -from google.cloud.datalabeling_v1beta1.proto import data_labeling_service_pb2_grpc -from google.cloud.datalabeling_v1beta1.proto import dataset_pb2 -from google.cloud.datalabeling_v1beta1.proto import evaluation_job_pb2 -from google.cloud.datalabeling_v1beta1.proto import evaluation_pb2 -from google.cloud.datalabeling_v1beta1.proto import human_annotation_config_pb2 -from google.cloud.datalabeling_v1beta1.proto import instruction_pb2 -from google.cloud.datalabeling_v1beta1.proto import ( - operations_pb2 as proto_operations_pb2, -) -from google.longrunning import operations_pb2 as longrunning_operations_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-datalabeling", -).version - - -class DataLabelingServiceClient(object): - SERVICE_ADDRESS = "datalabeling.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.datalabeling.v1beta1.DataLabelingService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataLabelingServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def annotated_dataset_path(cls, project, dataset, annotated_dataset): - """Return a fully-qualified annotated_dataset string.""" - return google.api_core.path_template.expand( - "projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}", - project=project, - dataset=dataset, - annotated_dataset=annotated_dataset, - ) - - @classmethod - def annotation_spec_set_path(cls, project, annotation_spec_set): - """Return a fully-qualified annotation_spec_set string.""" - return google.api_core.path_template.expand( - "projects/{project}/annotationSpecSets/{annotation_spec_set}", - project=project, - annotation_spec_set=annotation_spec_set, - ) - - @classmethod - def data_item_path(cls, project, dataset, data_item): - """Return a fully-qualified data_item string.""" - return google.api_core.path_template.expand( - "projects/{project}/datasets/{dataset}/dataItems/{data_item}", - project=project, - dataset=dataset, - data_item=data_item, - ) - - @classmethod - def dataset_path(cls, project, dataset): - """Return a fully-qualified dataset string.""" - return google.api_core.path_template.expand( - "projects/{project}/datasets/{dataset}", project=project, dataset=dataset, - ) - - @classmethod - def evaluation_path(cls, project, dataset, evaluation): - """Return a fully-qualified evaluation string.""" - return google.api_core.path_template.expand( - "projects/{project}/datasets/{dataset}/evaluations/{evaluation}", - project=project, - dataset=dataset, - evaluation=evaluation, - ) - - @classmethod - def evaluation_job_path(cls, project, evaluation_job): - """Return a fully-qualified evaluation_job string.""" - return google.api_core.path_template.expand( - "projects/{project}/evaluationJobs/{evaluation_job}", - project=project, - evaluation_job=evaluation_job, - ) - - @classmethod - def example_path(cls, project, dataset, annotated_dataset, example): - """Return a fully-qualified example string.""" - return google.api_core.path_template.expand( - "projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}/examples/{example}", - project=project, - dataset=dataset, - annotated_dataset=annotated_dataset, - example=example, - ) - - @classmethod - def instruction_path(cls, project, instruction): - """Return a fully-qualified instruction string.""" - return google.api_core.path_template.expand( - "projects/{project}/instructions/{instruction}", - project=project, - instruction=instruction, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.DataLabelingServiceGrpcTransport, - Callable[[~.Credentials, type], ~.DataLabelingServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = data_labeling_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=data_labeling_service_grpc_transport.DataLabelingServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = data_labeling_service_grpc_transport.DataLabelingServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_dataset( - self, - parent, - dataset, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates dataset. If success return a Dataset resource. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `dataset`: - >>> dataset = {} - >>> - >>> response = client.create_dataset(parent, dataset) - - Args: - parent (str): Required. Dataset resource parent, format: projects/{project_id} - dataset (Union[dict, ~google.cloud.datalabeling_v1beta1.types.Dataset]): Required. The dataset to be created. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.Dataset` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.Dataset` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_dataset" not in self._inner_api_calls: - self._inner_api_calls[ - "create_dataset" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_dataset, - default_retry=self._method_configs["CreateDataset"].retry, - default_timeout=self._method_configs["CreateDataset"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.CreateDatasetRequest( - parent=parent, dataset=dataset, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_dataset"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_dataset( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets dataset by resource name. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.dataset_path('[PROJECT]', '[DATASET]') - >>> - >>> response = client.get_dataset(name) - - Args: - name (str): Required. Dataset resource name, format: - projects/{project_id}/datasets/{dataset_id} - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.Dataset` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_dataset" not in self._inner_api_calls: - self._inner_api_calls[ - "get_dataset" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_dataset, - default_retry=self._method_configs["GetDataset"].retry, - default_timeout=self._method_configs["GetDataset"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.GetDatasetRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_dataset"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_datasets( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists datasets under a project. Pagination is supported. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_datasets(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_datasets(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Dataset resource parent, format: projects/{project_id} - filter_ (str): Optional. Filter on dataset is not supported at this moment. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.datalabeling_v1beta1.types.Dataset` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_datasets" not in self._inner_api_calls: - self._inner_api_calls[ - "list_datasets" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_datasets, - default_retry=self._method_configs["ListDatasets"].retry, - default_timeout=self._method_configs["ListDatasets"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.ListDatasetsRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_datasets"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="datasets", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_dataset( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a dataset by resource name. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.dataset_path('[PROJECT]', '[DATASET]') - >>> - >>> client.delete_dataset(name) - - Args: - name (str): Required. Dataset resource name, format: - projects/{project_id}/datasets/{dataset_id} - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_dataset" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_dataset" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_dataset, - default_retry=self._method_configs["DeleteDataset"].retry, - default_timeout=self._method_configs["DeleteDataset"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.DeleteDatasetRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_dataset"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def import_data( - self, - name, - input_config, - user_email_address=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Imports data into dataset based on source locations defined in request. - It can be called multiple times for the same dataset. Each dataset can - only have one long running operation running on it. For example, no - labeling task (also long running operation) can be started while - importing is still ongoing. Vice versa. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.dataset_path('[PROJECT]', '[DATASET]') - >>> - >>> # TODO: Initialize `input_config`: - >>> input_config = {} - >>> - >>> response = client.import_data(name, input_config) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. Dataset resource name, format: - projects/{project_id}/datasets/{dataset_id} - input_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.InputConfig]): Required. Specify the input source of the data. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.InputConfig` - user_email_address (str): Email of the user who started the import task and should be notified by - email. If empty no notification will be sent. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "import_data" not in self._inner_api_calls: - self._inner_api_calls[ - "import_data" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.import_data, - default_retry=self._method_configs["ImportData"].retry, - default_timeout=self._method_configs["ImportData"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.ImportDataRequest( - name=name, input_config=input_config, user_email_address=user_email_address, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["import_data"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - proto_operations_pb2.ImportDataOperationResponse, - metadata_type=proto_operations_pb2.ImportDataOperationMetadata, - ) - - def export_data( - self, - name, - annotated_dataset, - output_config, - filter_=None, - user_email_address=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Exports data and annotations from dataset. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.dataset_path('[PROJECT]', '[DATASET]') - >>> annotated_dataset = client.annotated_dataset_path('[PROJECT]', '[DATASET]', '[ANNOTATED_DATASET]') - >>> - >>> # TODO: Initialize `output_config`: - >>> output_config = {} - >>> - >>> response = client.export_data(name, annotated_dataset, output_config) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. Dataset resource name, format: - projects/{project_id}/datasets/{dataset_id} - annotated_dataset (str): Required. Annotated dataset resource name. DataItem in Dataset and - their annotations in specified annotated dataset will be exported. It's - in format of - projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ - {annotated_dataset_id} - output_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.OutputConfig]): Required. Specify the output destination. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.OutputConfig` - filter_ (str): Optional. Filter is not supported at this moment. - user_email_address (str): Email of the user who started the export task and should be notified by - email. If empty no notification will be sent. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "export_data" not in self._inner_api_calls: - self._inner_api_calls[ - "export_data" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.export_data, - default_retry=self._method_configs["ExportData"].retry, - default_timeout=self._method_configs["ExportData"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.ExportDataRequest( - name=name, - annotated_dataset=annotated_dataset, - output_config=output_config, - filter=filter_, - user_email_address=user_email_address, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["export_data"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - proto_operations_pb2.ExportDataOperationResponse, - metadata_type=proto_operations_pb2.ExportDataOperationMetadata, - ) - - def get_data_item( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a data item in a dataset by resource name. This API can be - called after data are imported into dataset. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.data_item_path('[PROJECT]', '[DATASET]', '[DATA_ITEM]') - >>> - >>> response = client.get_data_item(name) - - Args: - name (str): Required. The name of the data item to get, format: - projects/{project_id}/datasets/{dataset_id}/dataItems/{data_item_id} - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.DataItem` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_data_item" not in self._inner_api_calls: - self._inner_api_calls[ - "get_data_item" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_data_item, - default_retry=self._method_configs["GetDataItem"].retry, - default_timeout=self._method_configs["GetDataItem"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.GetDataItemRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_data_item"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_data_items( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists data items in a dataset. This API can be called after data - are imported into dataset. Pagination is supported. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.dataset_path('[PROJECT]', '[DATASET]') - >>> - >>> # Iterate over all results - >>> for element in client.list_data_items(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_data_items(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Name of the dataset to list data items, format: - projects/{project_id}/datasets/{dataset_id} - filter_ (str): Optional. Filter is not supported at this moment. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.datalabeling_v1beta1.types.DataItem` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_data_items" not in self._inner_api_calls: - self._inner_api_calls[ - "list_data_items" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_data_items, - default_retry=self._method_configs["ListDataItems"].retry, - default_timeout=self._method_configs["ListDataItems"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.ListDataItemsRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_data_items"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="data_items", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_annotated_dataset( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets an annotated dataset by resource name. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.annotated_dataset_path('[PROJECT]', '[DATASET]', '[ANNOTATED_DATASET]') - >>> - >>> response = client.get_annotated_dataset(name) - - Args: - name (str): Required. Name of the annotated dataset to get, format: - projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ - {annotated_dataset_id} - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.AnnotatedDataset` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_annotated_dataset" not in self._inner_api_calls: - self._inner_api_calls[ - "get_annotated_dataset" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_annotated_dataset, - default_retry=self._method_configs["GetAnnotatedDataset"].retry, - default_timeout=self._method_configs["GetAnnotatedDataset"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.GetAnnotatedDatasetRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_annotated_dataset"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_annotated_datasets( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists annotated datasets for a dataset. Pagination is supported. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.dataset_path('[PROJECT]', '[DATASET]') - >>> - >>> # Iterate over all results - >>> for element in client.list_annotated_datasets(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_annotated_datasets(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Name of the dataset to list annotated datasets, format: - projects/{project_id}/datasets/{dataset_id} - filter_ (str): Optional. Filter is not supported at this moment. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.datalabeling_v1beta1.types.AnnotatedDataset` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_annotated_datasets" not in self._inner_api_calls: - self._inner_api_calls[ - "list_annotated_datasets" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_annotated_datasets, - default_retry=self._method_configs["ListAnnotatedDatasets"].retry, - default_timeout=self._method_configs["ListAnnotatedDatasets"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.ListAnnotatedDatasetsRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_annotated_datasets"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="annotated_datasets", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_annotated_dataset( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an annotated dataset by resource name. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.annotated_dataset_path('[PROJECT]', '[DATASET]', '[ANNOTATED_DATASET]') - >>> - >>> client.delete_annotated_dataset(name) - - Args: - name (str): Required. Name of the annotated dataset to delete, format: - projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ - {annotated_dataset_id} - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_annotated_dataset" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_annotated_dataset" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_annotated_dataset, - default_retry=self._method_configs["DeleteAnnotatedDataset"].retry, - default_timeout=self._method_configs["DeleteAnnotatedDataset"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.DeleteAnnotatedDatasetRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_annotated_dataset"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def label_image( - self, - parent, - basic_config, - feature, - image_classification_config=None, - bounding_poly_config=None, - polyline_config=None, - segmentation_config=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Starts a labeling task for image. The type of image labeling task is - configured by feature in the request. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> from google.cloud.datalabeling_v1beta1 import enums - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.dataset_path('[PROJECT]', '[DATASET]') - >>> - >>> # TODO: Initialize `basic_config`: - >>> basic_config = {} - >>> - >>> # TODO: Initialize `feature`: - >>> feature = enums.LabelImageRequest.Feature.FEATURE_UNSPECIFIED - >>> - >>> response = client.label_image(parent, basic_config, feature) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - parent (str): Required. Name of the dataset to request labeling task, format: - projects/{project_id}/datasets/{dataset_id} - basic_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig]): Required. Basic human annotation config. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig` - feature (~google.cloud.datalabeling_v1beta1.types.Feature): Required. The type of image labeling task. - image_classification_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.ImageClassificationConfig]): Configuration for image classification task. One of - image_classification_config, bounding_poly_config, polyline_config and - segmentation_config are required. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.ImageClassificationConfig` - bounding_poly_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.BoundingPolyConfig]): Configuration for bounding box and bounding poly task. One of - image_classification_config, bounding_poly_config, polyline_config and - segmentation_config are required. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.BoundingPolyConfig` - polyline_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.PolylineConfig]): Configuration for polyline task. One of image_classification_config, - bounding_poly_config, polyline_config and segmentation_config are - required. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.PolylineConfig` - segmentation_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.SegmentationConfig]): Configuration for segmentation task. One of - image_classification_config, bounding_poly_config, polyline_config and - segmentation_config are required. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.SegmentationConfig` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "label_image" not in self._inner_api_calls: - self._inner_api_calls[ - "label_image" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.label_image, - default_retry=self._method_configs["LabelImage"].retry, - default_timeout=self._method_configs["LabelImage"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - image_classification_config=image_classification_config, - bounding_poly_config=bounding_poly_config, - polyline_config=polyline_config, - segmentation_config=segmentation_config, - ) - - request = data_labeling_service_pb2.LabelImageRequest( - parent=parent, - basic_config=basic_config, - feature=feature, - image_classification_config=image_classification_config, - bounding_poly_config=bounding_poly_config, - polyline_config=polyline_config, - segmentation_config=segmentation_config, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["label_image"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - dataset_pb2.AnnotatedDataset, - metadata_type=proto_operations_pb2.LabelOperationMetadata, - ) - - def label_video( - self, - parent, - basic_config, - feature, - video_classification_config=None, - object_detection_config=None, - object_tracking_config=None, - event_config=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Starts a labeling task for video. The type of video labeling task is - configured by feature in the request. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> from google.cloud.datalabeling_v1beta1 import enums - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.dataset_path('[PROJECT]', '[DATASET]') - >>> - >>> # TODO: Initialize `basic_config`: - >>> basic_config = {} - >>> - >>> # TODO: Initialize `feature`: - >>> feature = enums.LabelVideoRequest.Feature.FEATURE_UNSPECIFIED - >>> - >>> response = client.label_video(parent, basic_config, feature) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - parent (str): Required. Name of the dataset to request labeling task, format: - projects/{project_id}/datasets/{dataset_id} - basic_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig]): Required. Basic human annotation config. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig` - feature (~google.cloud.datalabeling_v1beta1.types.Feature): Required. The type of video labeling task. - video_classification_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.VideoClassificationConfig]): Configuration for video classification task. One of - video_classification_config, object_detection_config, - object_tracking_config and event_config is required. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.VideoClassificationConfig` - object_detection_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.ObjectDetectionConfig]): Configuration for video object detection task. One of - video_classification_config, object_detection_config, - object_tracking_config and event_config is required. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.ObjectDetectionConfig` - object_tracking_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.ObjectTrackingConfig]): Configuration for video object tracking task. One of - video_classification_config, object_detection_config, - object_tracking_config and event_config is required. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.ObjectTrackingConfig` - event_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.EventConfig]): Configuration for video event task. One of - video_classification_config, object_detection_config, - object_tracking_config and event_config is required. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.EventConfig` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "label_video" not in self._inner_api_calls: - self._inner_api_calls[ - "label_video" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.label_video, - default_retry=self._method_configs["LabelVideo"].retry, - default_timeout=self._method_configs["LabelVideo"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - video_classification_config=video_classification_config, - object_detection_config=object_detection_config, - object_tracking_config=object_tracking_config, - event_config=event_config, - ) - - request = data_labeling_service_pb2.LabelVideoRequest( - parent=parent, - basic_config=basic_config, - feature=feature, - video_classification_config=video_classification_config, - object_detection_config=object_detection_config, - object_tracking_config=object_tracking_config, - event_config=event_config, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["label_video"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - dataset_pb2.AnnotatedDataset, - metadata_type=proto_operations_pb2.LabelOperationMetadata, - ) - - def label_text( - self, - parent, - basic_config, - feature, - text_classification_config=None, - text_entity_extraction_config=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Starts a labeling task for text. The type of text labeling task is - configured by feature in the request. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> from google.cloud.datalabeling_v1beta1 import enums - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.dataset_path('[PROJECT]', '[DATASET]') - >>> - >>> # TODO: Initialize `basic_config`: - >>> basic_config = {} - >>> - >>> # TODO: Initialize `feature`: - >>> feature = enums.LabelTextRequest.Feature.FEATURE_UNSPECIFIED - >>> - >>> response = client.label_text(parent, basic_config, feature) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - parent (str): Required. Name of the data set to request labeling task, format: - projects/{project_id}/datasets/{dataset_id} - basic_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig]): Required. Basic human annotation config. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.HumanAnnotationConfig` - feature (~google.cloud.datalabeling_v1beta1.types.Feature): Required. The type of text labeling task. - text_classification_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.TextClassificationConfig]): Configuration for text classification task. One of - text_classification_config and text_entity_extraction_config is - required. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.TextClassificationConfig` - text_entity_extraction_config (Union[dict, ~google.cloud.datalabeling_v1beta1.types.TextEntityExtractionConfig]): Configuration for entity extraction task. One of - text_classification_config and text_entity_extraction_config is - required. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.TextEntityExtractionConfig` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "label_text" not in self._inner_api_calls: - self._inner_api_calls[ - "label_text" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.label_text, - default_retry=self._method_configs["LabelText"].retry, - default_timeout=self._method_configs["LabelText"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - text_classification_config=text_classification_config, - text_entity_extraction_config=text_entity_extraction_config, - ) - - request = data_labeling_service_pb2.LabelTextRequest( - parent=parent, - basic_config=basic_config, - feature=feature, - text_classification_config=text_classification_config, - text_entity_extraction_config=text_entity_extraction_config, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["label_text"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - dataset_pb2.AnnotatedDataset, - metadata_type=proto_operations_pb2.LabelOperationMetadata, - ) - - def get_example( - self, - name, - filter_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets an example by resource name, including both data and annotation. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.example_path('[PROJECT]', '[DATASET]', '[ANNOTATED_DATASET]', '[EXAMPLE]') - >>> - >>> response = client.get_example(name) - - Args: - name (str): Required. Name of example, format: - projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ - {annotated_dataset_id}/examples/{example_id} - filter_ (str): Optional. An expression for filtering Examples. Filter by - annotation_spec.display_name is supported. Format - "annotation_spec.display_name = {display_name}" - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.Example` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_example" not in self._inner_api_calls: - self._inner_api_calls[ - "get_example" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_example, - default_retry=self._method_configs["GetExample"].retry, - default_timeout=self._method_configs["GetExample"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.GetExampleRequest( - name=name, filter=filter_, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_example"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_examples( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists examples in an annotated dataset. Pagination is supported. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.annotated_dataset_path('[PROJECT]', '[DATASET]', '[ANNOTATED_DATASET]') - >>> - >>> # Iterate over all results - >>> for element in client.list_examples(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_examples(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Example resource parent. - filter_ (str): Optional. An expression for filtering Examples. For annotated - datasets that have annotation spec set, filter by - annotation_spec.display_name is supported. Format - "annotation_spec.display_name = {display_name}" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.datalabeling_v1beta1.types.Example` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_examples" not in self._inner_api_calls: - self._inner_api_calls[ - "list_examples" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_examples, - default_retry=self._method_configs["ListExamples"].retry, - default_timeout=self._method_configs["ListExamples"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.ListExamplesRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_examples"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="examples", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def create_annotation_spec_set( - self, - parent, - annotation_spec_set, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates an annotation spec set by providing a set of labels. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `annotation_spec_set`: - >>> annotation_spec_set = {} - >>> - >>> response = client.create_annotation_spec_set(parent, annotation_spec_set) - - Args: - parent (str): Required. AnnotationSpecSet resource parent, format: - projects/{project_id} - annotation_spec_set (Union[dict, ~google.cloud.datalabeling_v1beta1.types.AnnotationSpecSet]): Required. Annotation spec set to create. Annotation specs must be - included. Only one annotation spec will be accepted for annotation specs - with same display_name. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.AnnotationSpecSet` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.AnnotationSpecSet` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_annotation_spec_set" not in self._inner_api_calls: - self._inner_api_calls[ - "create_annotation_spec_set" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_annotation_spec_set, - default_retry=self._method_configs["CreateAnnotationSpecSet"].retry, - default_timeout=self._method_configs["CreateAnnotationSpecSet"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.CreateAnnotationSpecSetRequest( - parent=parent, annotation_spec_set=annotation_spec_set, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_annotation_spec_set"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_annotation_spec_set( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets an annotation spec set by resource name. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.annotation_spec_set_path('[PROJECT]', '[ANNOTATION_SPEC_SET]') - >>> - >>> response = client.get_annotation_spec_set(name) - - Args: - name (str): Required. AnnotationSpecSet resource name, format: - projects/{project_id}/annotationSpecSets/{annotation_spec_set_id} - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.AnnotationSpecSet` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_annotation_spec_set" not in self._inner_api_calls: - self._inner_api_calls[ - "get_annotation_spec_set" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_annotation_spec_set, - default_retry=self._method_configs["GetAnnotationSpecSet"].retry, - default_timeout=self._method_configs["GetAnnotationSpecSet"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.GetAnnotationSpecSetRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_annotation_spec_set"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_annotation_spec_sets( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists annotation spec sets for a project. Pagination is supported. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_annotation_spec_sets(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_annotation_spec_sets(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Parent of AnnotationSpecSet resource, format: - projects/{project_id} - filter_ (str): Optional. Filter is not supported at this moment. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.datalabeling_v1beta1.types.AnnotationSpecSet` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_annotation_spec_sets" not in self._inner_api_calls: - self._inner_api_calls[ - "list_annotation_spec_sets" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_annotation_spec_sets, - default_retry=self._method_configs["ListAnnotationSpecSets"].retry, - default_timeout=self._method_configs["ListAnnotationSpecSets"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.ListAnnotationSpecSetsRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_annotation_spec_sets"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="annotation_spec_sets", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_annotation_spec_set( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an annotation spec set by resource name. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.annotation_spec_set_path('[PROJECT]', '[ANNOTATION_SPEC_SET]') - >>> - >>> client.delete_annotation_spec_set(name) - - Args: - name (str): Required. AnnotationSpec resource name, format: - ``projects/{project_id}/annotationSpecSets/{annotation_spec_set_id}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_annotation_spec_set" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_annotation_spec_set" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_annotation_spec_set, - default_retry=self._method_configs["DeleteAnnotationSpecSet"].retry, - default_timeout=self._method_configs["DeleteAnnotationSpecSet"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.DeleteAnnotationSpecSetRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_annotation_spec_set"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_instruction( - self, - parent, - instruction, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates an instruction for how data should be labeled. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `instruction`: - >>> instruction = {} - >>> - >>> response = client.create_instruction(parent, instruction) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - parent (str): Required. Instruction resource parent, format: projects/{project_id} - instruction (Union[dict, ~google.cloud.datalabeling_v1beta1.types.Instruction]): Required. Instruction of how to perform the labeling task. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.Instruction` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_instruction" not in self._inner_api_calls: - self._inner_api_calls[ - "create_instruction" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_instruction, - default_retry=self._method_configs["CreateInstruction"].retry, - default_timeout=self._method_configs["CreateInstruction"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.CreateInstructionRequest( - parent=parent, instruction=instruction, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["create_instruction"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - instruction_pb2.Instruction, - metadata_type=proto_operations_pb2.CreateInstructionMetadata, - ) - - def get_instruction( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets an instruction by resource name. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.instruction_path('[PROJECT]', '[INSTRUCTION]') - >>> - >>> response = client.get_instruction(name) - - Args: - name (str): Required. Instruction resource name, format: - projects/{project_id}/instructions/{instruction_id} - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.Instruction` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_instruction" not in self._inner_api_calls: - self._inner_api_calls[ - "get_instruction" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_instruction, - default_retry=self._method_configs["GetInstruction"].retry, - default_timeout=self._method_configs["GetInstruction"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.GetInstructionRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_instruction"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_instructions( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists instructions for a project. Pagination is supported. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_instructions(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_instructions(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Instruction resource parent, format: projects/{project_id} - filter_ (str): Optional. Filter is not supported at this moment. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.datalabeling_v1beta1.types.Instruction` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_instructions" not in self._inner_api_calls: - self._inner_api_calls[ - "list_instructions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_instructions, - default_retry=self._method_configs["ListInstructions"].retry, - default_timeout=self._method_configs["ListInstructions"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.ListInstructionsRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_instructions"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="instructions", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_instruction( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an instruction object by resource name. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.instruction_path('[PROJECT]', '[INSTRUCTION]') - >>> - >>> client.delete_instruction(name) - - Args: - name (str): Required. Instruction resource name, format: - projects/{project_id}/instructions/{instruction_id} - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_instruction" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_instruction" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_instruction, - default_retry=self._method_configs["DeleteInstruction"].retry, - default_timeout=self._method_configs["DeleteInstruction"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.DeleteInstructionRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_instruction"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_evaluation( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets an evaluation by resource name (to search, use - ``projects.evaluations.search``). - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.evaluation_path('[PROJECT]', '[DATASET]', '[EVALUATION]') - >>> - >>> response = client.get_evaluation(name) - - Args: - name (str): Required. Name of the evaluation. Format: - - "projects/{project_id}/datasets/{dataset_id}/evaluations/{evaluation_id}' - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.Evaluation` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_evaluation" not in self._inner_api_calls: - self._inner_api_calls[ - "get_evaluation" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_evaluation, - default_retry=self._method_configs["GetEvaluation"].retry, - default_timeout=self._method_configs["GetEvaluation"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.GetEvaluationRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_evaluation"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def search_evaluations( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Searches ``evaluations`` within a project. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.evaluation_path('[PROJECT]', '[DATASET]', '[EVALUATION]') - >>> - >>> # Iterate over all results - >>> for element in client.search_evaluations(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.search_evaluations(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Evaluation search parent (project ID). Format: - "projects/{project_id}" - filter_ (str): Optional. To search evaluations, you can filter by the following: - - - evaluation\_job.evaluation_job_id (the last part of - ``EvaluationJob.name``) - - evaluation\_job.model_id (the {model_name} portion of - ``EvaluationJob.modelVersion``) - - evaluation\_job.evaluation_job_run_time_start (Minimum threshold for - the ``evaluationJobRunTime`` that created the evaluation) - - evaluation\_job.evaluation_job_run_time_end (Maximum threshold for - the ``evaluationJobRunTime`` that created the evaluation) - - evaluation\_job.job_state (``EvaluationJob.state``) - - annotation\_spec.display_name (the Evaluation contains a metric for - the annotation spec with this ``displayName``) - - To filter by multiple critiera, use the ``AND`` operator or the ``OR`` - operator. The following examples shows a string that filters by several - critiera: - - "evaluation\ *job.evaluation_job_id = {evaluation_job_id} AND - evaluation*\ job.model_id = {model_name} AND - evaluation\ *job.evaluation_job_run_time_start = {timestamp_1} AND - evaluation*\ job.evaluation_job_run_time_end = {timestamp_2} AND - annotation\_spec.display_name = {display_name}" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.datalabeling_v1beta1.types.Evaluation` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "search_evaluations" not in self._inner_api_calls: - self._inner_api_calls[ - "search_evaluations" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.search_evaluations, - default_retry=self._method_configs["SearchEvaluations"].retry, - default_timeout=self._method_configs["SearchEvaluations"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.SearchEvaluationsRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["search_evaluations"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="evaluations", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def search_example_comparisons( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Searches example comparisons from an evaluation. The return format is a - list of example comparisons that show ground truth and prediction(s) for - a single input. Search by providing an evaluation ID. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.evaluation_path('[PROJECT]', '[DATASET]', '[EVALUATION]') - >>> - >>> # Iterate over all results - >>> for element in client.search_example_comparisons(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.search_example_comparisons(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Name of the ``Evaluation`` resource to search for example - comparisons from. Format: - - "projects/{project_id}/datasets/{dataset_id}/evaluations/{evaluation_id}" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.datalabeling_v1beta1.types.ExampleComparison` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "search_example_comparisons" not in self._inner_api_calls: - self._inner_api_calls[ - "search_example_comparisons" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.search_example_comparisons, - default_retry=self._method_configs["SearchExampleComparisons"].retry, - default_timeout=self._method_configs[ - "SearchExampleComparisons" - ].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.SearchExampleComparisonsRequest( - parent=parent, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["search_example_comparisons"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="example_comparisons", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def create_evaluation_job( - self, - parent, - job, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates an evaluation job. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `job`: - >>> job = {} - >>> - >>> response = client.create_evaluation_job(parent, job) - - Args: - parent (str): Required. Evaluation job resource parent. Format: - "projects/{project_id}" - job (Union[dict, ~google.cloud.datalabeling_v1beta1.types.EvaluationJob]): Required. The evaluation job to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.EvaluationJob` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.EvaluationJob` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_evaluation_job" not in self._inner_api_calls: - self._inner_api_calls[ - "create_evaluation_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_evaluation_job, - default_retry=self._method_configs["CreateEvaluationJob"].retry, - default_timeout=self._method_configs["CreateEvaluationJob"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.CreateEvaluationJobRequest( - parent=parent, job=job, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_evaluation_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_evaluation_job( - self, - evaluation_job, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an evaluation job. You can only update certain fields of the - job's ``EvaluationJobConfig``: ``humanAnnotationConfig.instruction``, - ``exampleCount``, and ``exampleSamplePercentage``. - - If you want to change any other aspect of the evaluation job, you must - delete the job and create a new one. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> # TODO: Initialize `evaluation_job`: - >>> evaluation_job = {} - >>> - >>> response = client.update_evaluation_job(evaluation_job) - - Args: - evaluation_job (Union[dict, ~google.cloud.datalabeling_v1beta1.types.EvaluationJob]): Required. Evaluation job that is going to be updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.EvaluationJob` - update_mask (Union[dict, ~google.cloud.datalabeling_v1beta1.types.FieldMask]): Optional. Mask for which fields to update. You can only provide the - following fields: - - - ``evaluationJobConfig.humanAnnotationConfig.instruction`` - - ``evaluationJobConfig.exampleCount`` - - ``evaluationJobConfig.exampleSamplePercentage`` - - You can provide more than one of these fields by separating them with - commas. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.datalabeling_v1beta1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.EvaluationJob` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_evaluation_job" not in self._inner_api_calls: - self._inner_api_calls[ - "update_evaluation_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_evaluation_job, - default_retry=self._method_configs["UpdateEvaluationJob"].retry, - default_timeout=self._method_configs["UpdateEvaluationJob"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.UpdateEvaluationJobRequest( - evaluation_job=evaluation_job, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("evaluation_job.name", evaluation_job.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_evaluation_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_evaluation_job( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets an evaluation job by resource name. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.evaluation_job_path('[PROJECT]', '[EVALUATION_JOB]') - >>> - >>> response = client.get_evaluation_job(name) - - Args: - name (str): Required. Name of the evaluation job. Format: - - "projects/{project_id}/evaluationJobs/{evaluation_job_id}" - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.datalabeling_v1beta1.types.EvaluationJob` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_evaluation_job" not in self._inner_api_calls: - self._inner_api_calls[ - "get_evaluation_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_evaluation_job, - default_retry=self._method_configs["GetEvaluationJob"].retry, - default_timeout=self._method_configs["GetEvaluationJob"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.GetEvaluationJobRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_evaluation_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def pause_evaluation_job( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Pauses an evaluation job. Pausing an evaluation job that is already - in a ``PAUSED`` state is a no-op. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.evaluation_job_path('[PROJECT]', '[EVALUATION_JOB]') - >>> - >>> client.pause_evaluation_job(name) - - Args: - name (str): Required. Name of the evaluation job that is going to be paused. - Format: - - "projects/{project_id}/evaluationJobs/{evaluation_job_id}" - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "pause_evaluation_job" not in self._inner_api_calls: - self._inner_api_calls[ - "pause_evaluation_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.pause_evaluation_job, - default_retry=self._method_configs["PauseEvaluationJob"].retry, - default_timeout=self._method_configs["PauseEvaluationJob"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.PauseEvaluationJobRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["pause_evaluation_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def resume_evaluation_job( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Resumes a paused evaluation job. A deleted evaluation job can't be resumed. - Resuming a running or scheduled evaluation job is a no-op. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.evaluation_job_path('[PROJECT]', '[EVALUATION_JOB]') - >>> - >>> client.resume_evaluation_job(name) - - Args: - name (str): Required. Name of the evaluation job that is going to be resumed. - Format: - - "projects/{project_id}/evaluationJobs/{evaluation_job_id}" - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "resume_evaluation_job" not in self._inner_api_calls: - self._inner_api_calls[ - "resume_evaluation_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.resume_evaluation_job, - default_retry=self._method_configs["ResumeEvaluationJob"].retry, - default_timeout=self._method_configs["ResumeEvaluationJob"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.ResumeEvaluationJobRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["resume_evaluation_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_evaluation_job( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Stops and deletes an evaluation job. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> name = client.evaluation_job_path('[PROJECT]', '[EVALUATION_JOB]') - >>> - >>> client.delete_evaluation_job(name) - - Args: - name (str): Required. Name of the evaluation job that is going to be deleted. - Format: - - "projects/{project_id}/evaluationJobs/{evaluation_job_id}" - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_evaluation_job" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_evaluation_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_evaluation_job, - default_retry=self._method_configs["DeleteEvaluationJob"].retry, - default_timeout=self._method_configs["DeleteEvaluationJob"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.DeleteEvaluationJobRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_evaluation_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_evaluation_jobs( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists all evaluation jobs within a project with possible filters. - Pagination is supported. - - Example: - >>> from google.cloud import datalabeling_v1beta1 - >>> - >>> client = datalabeling_v1beta1.DataLabelingServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_evaluation_jobs(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_evaluation_jobs(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Evaluation job resource parent. Format: - "projects/{project_id}" - filter_ (str): Optional. You can filter the jobs to list by model_id (also known as - model_name, as described in ``EvaluationJob.modelVersion``) or by - evaluation job state (as described in ``EvaluationJob.state``). To - filter by both criteria, use the ``AND`` operator or the ``OR`` - operator. For example, you can use the following string for your filter: - "evaluation\ *job.model_id = {model_name} AND evaluation*\ job.state = - {evaluation_job_state}" - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.datalabeling_v1beta1.types.EvaluationJob` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_evaluation_jobs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_evaluation_jobs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_evaluation_jobs, - default_retry=self._method_configs["ListEvaluationJobs"].retry, - default_timeout=self._method_configs["ListEvaluationJobs"].timeout, - client_info=self._client_info, - ) - - request = data_labeling_service_pb2.ListEvaluationJobsRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_evaluation_jobs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="evaluation_jobs", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator diff --git a/google/cloud/datalabeling_v1beta1/gapic/data_labeling_service_client_config.py b/google/cloud/datalabeling_v1beta1/gapic/data_labeling_service_client_config.py deleted file mode 100644 index 3414078..0000000 --- a/google/cloud/datalabeling_v1beta1/gapic/data_labeling_service_client_config.py +++ /dev/null @@ -1,212 +0,0 @@ -config = { - "interfaces": { - "google.cloud.datalabeling.v1beta1.DataLabelingService": { - "retry_codes": { - "retry_policy_1_codes": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "no_retry_codes": [], - "no_retry_1_codes": [], - }, - "retry_params": { - "retry_policy_1_params": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 30000, - "initial_rpc_timeout_millis": 30000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 30000, - "total_timeout_millis": 30000, - }, - "no_retry_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 0, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 0, - "total_timeout_millis": 0, - }, - "no_retry_1_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 30000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 30000, - "total_timeout_millis": 30000, - }, - }, - "methods": { - "CreateDataset": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetDataset": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListDatasets": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "DeleteDataset": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ImportData": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "ExportData": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "GetDataItem": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListDataItems": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "GetAnnotatedDataset": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListAnnotatedDatasets": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "DeleteAnnotatedDataset": { - "timeout_millis": 60000, - "retry_codes_name": "no_retry_codes", - "retry_params_name": "no_retry_params", - }, - "LabelImage": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "LabelVideo": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "LabelText": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetExample": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListExamples": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateAnnotationSpecSet": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetAnnotationSpecSet": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListAnnotationSpecSets": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "DeleteAnnotationSpecSet": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateInstruction": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetInstruction": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListInstructions": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "DeleteInstruction": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "GetEvaluation": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "SearchEvaluations": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "SearchExampleComparisons": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "CreateEvaluationJob": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "UpdateEvaluationJob": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetEvaluationJob": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "PauseEvaluationJob": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "ResumeEvaluationJob": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "DeleteEvaluationJob": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListEvaluationJobs": { - "timeout_millis": 30000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - }, - } - } -} diff --git a/google/cloud/datalabeling_v1beta1/gapic/enums.py b/google/cloud/datalabeling_v1beta1/gapic/enums.py deleted file mode 100644 index 0d661f3..0000000 --- a/google/cloud/datalabeling_v1beta1/gapic/enums.py +++ /dev/null @@ -1,228 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class AnnotationSentiment(enum.IntEnum): - """ - Attributes: - ANNOTATION_SENTIMENT_UNSPECIFIED (int) - NEGATIVE (int): This annotation describes negatively about the data. - POSITIVE (int): This label describes positively about the data. - """ - - ANNOTATION_SENTIMENT_UNSPECIFIED = 0 - NEGATIVE = 1 - POSITIVE = 2 - - -class AnnotationSource(enum.IntEnum): - """ - Specifies where the annotation comes from (whether it was provided by a - human labeler or a different source). - - Attributes: - ANNOTATION_SOURCE_UNSPECIFIED (int) - OPERATOR (int): Answer is provided by a human contributor. - """ - - ANNOTATION_SOURCE_UNSPECIFIED = 0 - OPERATOR = 3 - - -class AnnotationType(enum.IntEnum): - """ - Attributes: - ANNOTATION_TYPE_UNSPECIFIED (int) - IMAGE_CLASSIFICATION_ANNOTATION (int): Classification annotations in an image. Allowed for continuous evaluation. - IMAGE_BOUNDING_BOX_ANNOTATION (int): Bounding box annotations in an image. A form of image object detection. - Allowed for continuous evaluation. - IMAGE_ORIENTED_BOUNDING_BOX_ANNOTATION (int): Oriented bounding box. The box does not have to be parallel to horizontal - line. - IMAGE_BOUNDING_POLY_ANNOTATION (int): Bounding poly annotations in an image. - IMAGE_POLYLINE_ANNOTATION (int): Polyline annotations in an image. - IMAGE_SEGMENTATION_ANNOTATION (int): Segmentation annotations in an image. - VIDEO_SHOTS_CLASSIFICATION_ANNOTATION (int): Classification annotations in video shots. - VIDEO_OBJECT_TRACKING_ANNOTATION (int): Video object tracking annotation. - VIDEO_OBJECT_DETECTION_ANNOTATION (int): Video object detection annotation. - VIDEO_EVENT_ANNOTATION (int): Video event annotation. - TEXT_CLASSIFICATION_ANNOTATION (int): Classification for text. Allowed for continuous evaluation. - TEXT_ENTITY_EXTRACTION_ANNOTATION (int): Entity extraction for text. - GENERAL_CLASSIFICATION_ANNOTATION (int): General classification. Allowed for continuous evaluation. - """ - - ANNOTATION_TYPE_UNSPECIFIED = 0 - IMAGE_CLASSIFICATION_ANNOTATION = 1 - IMAGE_BOUNDING_BOX_ANNOTATION = 2 - IMAGE_ORIENTED_BOUNDING_BOX_ANNOTATION = 13 - IMAGE_BOUNDING_POLY_ANNOTATION = 10 - IMAGE_POLYLINE_ANNOTATION = 11 - IMAGE_SEGMENTATION_ANNOTATION = 12 - VIDEO_SHOTS_CLASSIFICATION_ANNOTATION = 3 - VIDEO_OBJECT_TRACKING_ANNOTATION = 4 - VIDEO_OBJECT_DETECTION_ANNOTATION = 5 - VIDEO_EVENT_ANNOTATION = 6 - TEXT_CLASSIFICATION_ANNOTATION = 8 - TEXT_ENTITY_EXTRACTION_ANNOTATION = 9 - GENERAL_CLASSIFICATION_ANNOTATION = 14 - - -class DataType(enum.IntEnum): - """ - Attributes: - DATA_TYPE_UNSPECIFIED (int) - IMAGE (int): Allowed for continuous evaluation. - VIDEO (int) - TEXT (int): Allowed for continuous evaluation. - GENERAL_DATA (int): Allowed for continuous evaluation. - """ - - DATA_TYPE_UNSPECIFIED = 0 - IMAGE = 1 - VIDEO = 2 - TEXT = 4 - GENERAL_DATA = 6 - - -class StringAggregationType(enum.IntEnum): - """ - Attributes: - STRING_AGGREGATION_TYPE_UNSPECIFIED (int) - MAJORITY_VOTE (int): Majority vote to aggregate answers. - UNANIMOUS_VOTE (int): Unanimous answers will be adopted. - NO_AGGREGATION (int): Preserve all answers by crowd compute. - """ - - STRING_AGGREGATION_TYPE_UNSPECIFIED = 0 - MAJORITY_VOTE = 1 - UNANIMOUS_VOTE = 2 - NO_AGGREGATION = 3 - - -class EvaluationJob(object): - class State(enum.IntEnum): - """ - State of the job. - - Attributes: - STATE_UNSPECIFIED (int) - SCHEDULED (int): The job is scheduled to run at the ``configured interval``. You can - ``pause`` or ``delete`` the job. - - When the job is in this state, it samples prediction input and output - from your model version into your BigQuery table as predictions occur. - RUNNING (int): The job is currently running. When the job runs, Data Labeling - Service does several things: - - 1. If you have configured your job to use Data Labeling Service for - ground truth labeling, the service creates a ``Dataset`` and a - labeling task for all data sampled since the last time the job ran. - Human labelers provide ground truth labels for your data. Human - labeling may take hours, or even days, depending on how much data has - been sampled. The job remains in the ``RUNNING`` state during this - time, and it can even be running multiple times in parallel if it - gets triggered again (for example 24 hours later) before the earlier - run has completed. When human labelers have finished labeling the - data, the next step occurs. If you have configured your job to - provide your own ground truth labels, Data Labeling Service still - creates a ``Dataset`` for newly sampled data, but it expects that you - have already added ground truth labels to the BigQuery table by this - time. The next step occurs immediately. - - 2. Data Labeling Service creates an ``Evaluation`` by comparing your - model version's predictions with the ground truth labels. - - If the job remains in this state for a long time, it continues to sample - prediction data into your BigQuery table and will run again at the next - interval, even if it causes the job to run multiple times in parallel. - PAUSED (int): The job is not sampling prediction input and output into your - BigQuery table and it will not run according to its schedule. You can - ``resume`` the job. - STOPPED (int): The job has this state right before it is deleted. - """ - - STATE_UNSPECIFIED = 0 - SCHEDULED = 1 - RUNNING = 2 - PAUSED = 3 - STOPPED = 4 - - -class LabelImageRequest(object): - class Feature(enum.IntEnum): - """ - Image labeling task feature. - - Attributes: - FEATURE_UNSPECIFIED (int) - CLASSIFICATION (int): Label whole image with one or more of labels. - BOUNDING_BOX (int): Label image with bounding boxes for labels. - ORIENTED_BOUNDING_BOX (int): Label oriented bounding box. The box does not have to be parallel to - horizontal line. - BOUNDING_POLY (int): Label images with bounding poly. A bounding poly is a plane figure that - is bounded by a finite chain of straight line segments closing in a loop. - POLYLINE (int): Label images with polyline. Polyline is formed by connected line segments - which are not in closed form. - SEGMENTATION (int): Label images with segmentation. Segmentation is different from bounding - poly since it is more fine-grained, pixel level annotation. - """ - - FEATURE_UNSPECIFIED = 0 - CLASSIFICATION = 1 - BOUNDING_BOX = 2 - ORIENTED_BOUNDING_BOX = 6 - BOUNDING_POLY = 3 - POLYLINE = 4 - SEGMENTATION = 5 - - -class LabelTextRequest(object): - class Feature(enum.IntEnum): - """ - Text labeling task feature. - - Attributes: - FEATURE_UNSPECIFIED (int) - TEXT_CLASSIFICATION (int): Label text content to one of more labels. - TEXT_ENTITY_EXTRACTION (int): Label entities and their span in text. - """ - - FEATURE_UNSPECIFIED = 0 - TEXT_CLASSIFICATION = 1 - TEXT_ENTITY_EXTRACTION = 2 - - -class LabelVideoRequest(object): - class Feature(enum.IntEnum): - """ - Video labeling task feature. - - Attributes: - FEATURE_UNSPECIFIED (int) - CLASSIFICATION (int): Label whole video or video segment with one or more labels. - OBJECT_DETECTION (int): Label objects with bounding box on image frames extracted from the video. - OBJECT_TRACKING (int): Label and track objects in video. - EVENT (int): Label the range of video for the specified events. - """ - - FEATURE_UNSPECIFIED = 0 - CLASSIFICATION = 1 - OBJECT_DETECTION = 2 - OBJECT_TRACKING = 3 - EVENT = 4 diff --git a/google/cloud/datalabeling_v1beta1/gapic/transports/__init__.py b/google/cloud/datalabeling_v1beta1/gapic/transports/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/google/cloud/datalabeling_v1beta1/gapic/transports/data_labeling_service_grpc_transport.py b/google/cloud/datalabeling_v1beta1/gapic/transports/data_labeling_service_grpc_transport.py deleted file mode 100644 index 676202f..0000000 --- a/google/cloud/datalabeling_v1beta1/gapic/transports/data_labeling_service_grpc_transport.py +++ /dev/null @@ -1,580 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers -import google.api_core.operations_v1 - -from google.cloud.datalabeling_v1beta1.proto import data_labeling_service_pb2_grpc - - -class DataLabelingServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.datalabeling.v1beta1 DataLabelingService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="datalabeling.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "data_labeling_service_stub": data_labeling_service_pb2_grpc.DataLabelingServiceStub( - channel - ), - } - - # Because this API includes a method that returns a - # long-running operation (proto: google.longrunning.Operation), - # instantiate an LRO client. - self._operations_client = google.api_core.operations_v1.OperationsClient( - channel - ) - - @classmethod - def create_channel( - cls, address="datalabeling.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_dataset(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.create_dataset`. - - Creates dataset. If success return a Dataset resource. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].CreateDataset - - @property - def get_dataset(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.get_dataset`. - - Gets dataset by resource name. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].GetDataset - - @property - def list_datasets(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.list_datasets`. - - Lists datasets under a project. Pagination is supported. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].ListDatasets - - @property - def delete_dataset(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.delete_dataset`. - - Deletes a dataset by resource name. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].DeleteDataset - - @property - def import_data(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.import_data`. - - Imports data into dataset based on source locations defined in request. - It can be called multiple times for the same dataset. Each dataset can - only have one long running operation running on it. For example, no - labeling task (also long running operation) can be started while - importing is still ongoing. Vice versa. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].ImportData - - @property - def export_data(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.export_data`. - - Exports data and annotations from dataset. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].ExportData - - @property - def get_data_item(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.get_data_item`. - - Gets a data item in a dataset by resource name. This API can be - called after data are imported into dataset. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].GetDataItem - - @property - def list_data_items(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.list_data_items`. - - Lists data items in a dataset. This API can be called after data - are imported into dataset. Pagination is supported. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].ListDataItems - - @property - def get_annotated_dataset(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.get_annotated_dataset`. - - Gets an annotated dataset by resource name. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].GetAnnotatedDataset - - @property - def list_annotated_datasets(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.list_annotated_datasets`. - - Lists annotated datasets for a dataset. Pagination is supported. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].ListAnnotatedDatasets - - @property - def delete_annotated_dataset(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.delete_annotated_dataset`. - - Deletes an annotated dataset by resource name. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].DeleteAnnotatedDataset - - @property - def label_image(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.label_image`. - - Starts a labeling task for image. The type of image labeling task is - configured by feature in the request. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].LabelImage - - @property - def label_video(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.label_video`. - - Starts a labeling task for video. The type of video labeling task is - configured by feature in the request. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].LabelVideo - - @property - def label_text(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.label_text`. - - Starts a labeling task for text. The type of text labeling task is - configured by feature in the request. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].LabelText - - @property - def get_example(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.get_example`. - - Gets an example by resource name, including both data and annotation. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].GetExample - - @property - def list_examples(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.list_examples`. - - Lists examples in an annotated dataset. Pagination is supported. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].ListExamples - - @property - def create_annotation_spec_set(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.create_annotation_spec_set`. - - Creates an annotation spec set by providing a set of labels. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].CreateAnnotationSpecSet - - @property - def get_annotation_spec_set(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.get_annotation_spec_set`. - - Gets an annotation spec set by resource name. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].GetAnnotationSpecSet - - @property - def list_annotation_spec_sets(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.list_annotation_spec_sets`. - - Lists annotation spec sets for a project. Pagination is supported. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].ListAnnotationSpecSets - - @property - def delete_annotation_spec_set(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.delete_annotation_spec_set`. - - Deletes an annotation spec set by resource name. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].DeleteAnnotationSpecSet - - @property - def create_instruction(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.create_instruction`. - - Creates an instruction for how data should be labeled. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].CreateInstruction - - @property - def get_instruction(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.get_instruction`. - - Gets an instruction by resource name. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].GetInstruction - - @property - def list_instructions(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.list_instructions`. - - Lists instructions for a project. Pagination is supported. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].ListInstructions - - @property - def delete_instruction(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.delete_instruction`. - - Deletes an instruction object by resource name. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].DeleteInstruction - - @property - def get_evaluation(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.get_evaluation`. - - Gets an evaluation by resource name (to search, use - ``projects.evaluations.search``). - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].GetEvaluation - - @property - def search_evaluations(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.search_evaluations`. - - Searches ``evaluations`` within a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].SearchEvaluations - - @property - def search_example_comparisons(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.search_example_comparisons`. - - Searches example comparisons from an evaluation. The return format is a - list of example comparisons that show ground truth and prediction(s) for - a single input. Search by providing an evaluation ID. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].SearchExampleComparisons - - @property - def create_evaluation_job(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.create_evaluation_job`. - - Creates an evaluation job. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].CreateEvaluationJob - - @property - def update_evaluation_job(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.update_evaluation_job`. - - Updates an evaluation job. You can only update certain fields of the - job's ``EvaluationJobConfig``: ``humanAnnotationConfig.instruction``, - ``exampleCount``, and ``exampleSamplePercentage``. - - If you want to change any other aspect of the evaluation job, you must - delete the job and create a new one. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].UpdateEvaluationJob - - @property - def get_evaluation_job(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.get_evaluation_job`. - - Gets an evaluation job by resource name. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].GetEvaluationJob - - @property - def pause_evaluation_job(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.pause_evaluation_job`. - - Pauses an evaluation job. Pausing an evaluation job that is already - in a ``PAUSED`` state is a no-op. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].PauseEvaluationJob - - @property - def resume_evaluation_job(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.resume_evaluation_job`. - - Resumes a paused evaluation job. A deleted evaluation job can't be resumed. - Resuming a running or scheduled evaluation job is a no-op. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].ResumeEvaluationJob - - @property - def delete_evaluation_job(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.delete_evaluation_job`. - - Stops and deletes an evaluation job. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].DeleteEvaluationJob - - @property - def list_evaluation_jobs(self): - """Return the gRPC stub for :meth:`DataLabelingServiceClient.list_evaluation_jobs`. - - Lists all evaluation jobs within a project with possible filters. - Pagination is supported. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_labeling_service_stub"].ListEvaluationJobs diff --git a/google/cloud/datalabeling_v1beta1/proto/__init__.py b/google/cloud/datalabeling_v1beta1/proto/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/google/cloud/datalabeling_v1beta1/proto/annotation_pb2.py b/google/cloud/datalabeling_v1beta1/proto/annotation_pb2.py deleted file mode 100644 index c7303c4..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/annotation_pb2.py +++ /dev/null @@ -1,2583 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datalabeling_v1beta1/proto/annotation.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.datalabeling_v1beta1.proto import ( - annotation_spec_set_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datalabeling_v1beta1/proto/annotation.proto", - package="google.cloud.datalabeling.v1beta1", - syntax="proto3", - serialized_options=b"\n%com.google.cloud.datalabeling.v1beta1P\001ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabeling", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n8google/cloud/datalabeling_v1beta1/proto/annotation.proto\x12!google.cloud.datalabeling.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x41google/cloud/datalabeling_v1beta1/proto/annotation_spec_set.proto\x1a\x1egoogle/protobuf/duration.proto"\xe2\x02\n\nAnnotation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\x11\x61nnotation_source\x18\x02 \x01(\x0e\x32\x33.google.cloud.datalabeling.v1beta1.AnnotationSource\x12L\n\x10\x61nnotation_value\x18\x03 \x01(\x0b\x32\x32.google.cloud.datalabeling.v1beta1.AnnotationValue\x12R\n\x13\x61nnotation_metadata\x18\x04 \x01(\x0b\x32\x35.google.cloud.datalabeling.v1beta1.AnnotationMetadata\x12T\n\x14\x61nnotation_sentiment\x18\x06 \x01(\x0e\x32\x36.google.cloud.datalabeling.v1beta1.AnnotationSentiment"\xd1\x07\n\x0f\x41nnotationValue\x12k\n\x1fimage_classification_annotation\x18\x01 \x01(\x0b\x32@.google.cloud.datalabeling.v1beta1.ImageClassificationAnnotationH\x00\x12h\n\x1eimage_bounding_poly_annotation\x18\x02 \x01(\x0b\x32>.google.cloud.datalabeling.v1beta1.ImageBoundingPolyAnnotationH\x00\x12_\n\x19image_polyline_annotation\x18\x08 \x01(\x0b\x32:.google.cloud.datalabeling.v1beta1.ImagePolylineAnnotationH\x00\x12g\n\x1dimage_segmentation_annotation\x18\t \x01(\x0b\x32>.google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotationH\x00\x12i\n\x1etext_classification_annotation\x18\x03 \x01(\x0b\x32?.google.cloud.datalabeling.v1beta1.TextClassificationAnnotationH\x00\x12n\n!text_entity_extraction_annotation\x18\n \x01(\x0b\x32\x41.google.cloud.datalabeling.v1beta1.TextEntityExtractionAnnotationH\x00\x12k\n\x1fvideo_classification_annotation\x18\x04 \x01(\x0b\x32@.google.cloud.datalabeling.v1beta1.VideoClassificationAnnotationH\x00\x12l\n video_object_tracking_annotation\x18\x05 \x01(\x0b\x32@.google.cloud.datalabeling.v1beta1.VideoObjectTrackingAnnotationH\x00\x12Y\n\x16video_event_annotation\x18\x06 \x01(\x0b\x32\x37.google.cloud.datalabeling.v1beta1.VideoEventAnnotationH\x00\x42\x0c\n\nvalue_type"k\n\x1dImageClassificationAnnotation\x12J\n\x0f\x61nnotation_spec\x18\x01 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpec"\x1e\n\x06Vertex\x12\t\n\x01x\x18\x01 \x01(\x05\x12\t\n\x01y\x18\x02 \x01(\x05"(\n\x10NormalizedVertex\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02"K\n\x0c\x42oundingPoly\x12;\n\x08vertices\x18\x01 \x03(\x0b\x32).google.cloud.datalabeling.v1beta1.Vertex"j\n\x16NormalizedBoundingPoly\x12P\n\x13normalized_vertices\x18\x01 \x03(\x0b\x32\x33.google.cloud.datalabeling.v1beta1.NormalizedVertex"\xa2\x02\n\x1bImageBoundingPolyAnnotation\x12H\n\rbounding_poly\x18\x02 \x01(\x0b\x32/.google.cloud.datalabeling.v1beta1.BoundingPolyH\x00\x12]\n\x18normalized_bounding_poly\x18\x03 \x01(\x0b\x32\x39.google.cloud.datalabeling.v1beta1.NormalizedBoundingPolyH\x00\x12J\n\x0f\x61nnotation_spec\x18\x01 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpecB\x0e\n\x0c\x62ounded_area"G\n\x08Polyline\x12;\n\x08vertices\x18\x01 \x03(\x0b\x32).google.cloud.datalabeling.v1beta1.Vertex"f\n\x12NormalizedPolyline\x12P\n\x13normalized_vertices\x18\x01 \x03(\x0b\x32\x33.google.cloud.datalabeling.v1beta1.NormalizedVertex"\x84\x02\n\x17ImagePolylineAnnotation\x12?\n\x08polyline\x18\x02 \x01(\x0b\x32+.google.cloud.datalabeling.v1beta1.PolylineH\x00\x12T\n\x13normalized_polyline\x18\x03 \x01(\x0b\x32\x35.google.cloud.datalabeling.v1beta1.NormalizedPolylineH\x00\x12J\n\x0f\x61nnotation_spec\x18\x01 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpecB\x06\n\x04poly"\xa2\x02\n\x1bImageSegmentationAnnotation\x12o\n\x11\x61nnotation_colors\x18\x01 \x03(\x0b\x32T.google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation.AnnotationColorsEntry\x12\x11\n\tmime_type\x18\x02 \x01(\t\x12\x13\n\x0bimage_bytes\x18\x03 \x01(\x0c\x1aj\n\x15\x41nnotationColorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12@\n\x05value\x18\x02 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpec:\x02\x38\x01"j\n\x1cTextClassificationAnnotation\x12J\n\x0f\x61nnotation_spec\x18\x01 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpec"\xbe\x01\n\x1eTextEntityExtractionAnnotation\x12J\n\x0f\x61nnotation_spec\x18\x01 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpec\x12P\n\x12sequential_segment\x18\x02 \x01(\x0b\x32\x34.google.cloud.datalabeling.v1beta1.SequentialSegment"/\n\x11SequentialSegment\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05"w\n\x0bTimeSegment\x12\x34\n\x11start_time_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0f\x65nd_time_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xb1\x01\n\x1dVideoClassificationAnnotation\x12\x44\n\x0ctime_segment\x18\x01 \x01(\x0b\x32..google.cloud.datalabeling.v1beta1.TimeSegment\x12J\n\x0f\x61nnotation_spec\x18\x02 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpec"\xfe\x01\n\x13ObjectTrackingFrame\x12H\n\rbounding_poly\x18\x01 \x01(\x0b\x32/.google.cloud.datalabeling.v1beta1.BoundingPolyH\x00\x12]\n\x18normalized_bounding_poly\x18\x02 \x01(\x0b\x32\x39.google.cloud.datalabeling.v1beta1.NormalizedBoundingPolyH\x00\x12.\n\x0btime_offset\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x0e\n\x0c\x62ounded_area"\x89\x02\n\x1dVideoObjectTrackingAnnotation\x12J\n\x0f\x61nnotation_spec\x18\x01 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpec\x12\x44\n\x0ctime_segment\x18\x02 \x01(\x0b\x32..google.cloud.datalabeling.v1beta1.TimeSegment\x12V\n\x16object_tracking_frames\x18\x03 \x03(\x0b\x32\x36.google.cloud.datalabeling.v1beta1.ObjectTrackingFrame"\xa8\x01\n\x14VideoEventAnnotation\x12J\n\x0f\x61nnotation_spec\x18\x01 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpec\x12\x44\n\x0ctime_segment\x18\x02 \x01(\x0b\x32..google.cloud.datalabeling.v1beta1.TimeSegment"d\n\x12\x41nnotationMetadata\x12N\n\x11operator_metadata\x18\x02 \x01(\x0b\x32\x33.google.cloud.datalabeling.v1beta1.OperatorMetadata"]\n\x10OperatorMetadata\x12\r\n\x05score\x18\x01 \x01(\x02\x12\x13\n\x0btotal_votes\x18\x02 \x01(\x05\x12\x13\n\x0blabel_votes\x18\x03 \x01(\x05\x12\x10\n\x08\x63omments\x18\x04 \x03(\t*C\n\x10\x41nnotationSource\x12!\n\x1d\x41NNOTATION_SOURCE_UNSPECIFIED\x10\x00\x12\x0c\n\x08OPERATOR\x10\x03*W\n\x13\x41nnotationSentiment\x12$\n ANNOTATION_SENTIMENT_UNSPECIFIED\x10\x00\x12\x0c\n\x08NEGATIVE\x10\x01\x12\x0c\n\x08POSITIVE\x10\x02*\x91\x04\n\x0e\x41nnotationType\x12\x1f\n\x1b\x41NNOTATION_TYPE_UNSPECIFIED\x10\x00\x12#\n\x1fIMAGE_CLASSIFICATION_ANNOTATION\x10\x01\x12!\n\x1dIMAGE_BOUNDING_BOX_ANNOTATION\x10\x02\x12*\n&IMAGE_ORIENTED_BOUNDING_BOX_ANNOTATION\x10\r\x12"\n\x1eIMAGE_BOUNDING_POLY_ANNOTATION\x10\n\x12\x1d\n\x19IMAGE_POLYLINE_ANNOTATION\x10\x0b\x12!\n\x1dIMAGE_SEGMENTATION_ANNOTATION\x10\x0c\x12)\n%VIDEO_SHOTS_CLASSIFICATION_ANNOTATION\x10\x03\x12$\n VIDEO_OBJECT_TRACKING_ANNOTATION\x10\x04\x12%\n!VIDEO_OBJECT_DETECTION_ANNOTATION\x10\x05\x12\x1a\n\x16VIDEO_EVENT_ANNOTATION\x10\x06\x12"\n\x1eTEXT_CLASSIFICATION_ANNOTATION\x10\x08\x12%\n!TEXT_ENTITY_EXTRACTION_ANNOTATION\x10\t\x12%\n!GENERAL_CLASSIFICATION_ANNOTATION\x10\x0e\x42x\n%com.google.cloud.datalabeling.v1beta1P\x01ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabelingb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - ], -) - -_ANNOTATIONSOURCE = _descriptor.EnumDescriptor( - name="AnnotationSource", - full_name="google.cloud.datalabeling.v1beta1.AnnotationSource", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="ANNOTATION_SOURCE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="OPERATOR", - index=1, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4526, - serialized_end=4593, -) -_sym_db.RegisterEnumDescriptor(_ANNOTATIONSOURCE) - -AnnotationSource = enum_type_wrapper.EnumTypeWrapper(_ANNOTATIONSOURCE) -_ANNOTATIONSENTIMENT = _descriptor.EnumDescriptor( - name="AnnotationSentiment", - full_name="google.cloud.datalabeling.v1beta1.AnnotationSentiment", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="ANNOTATION_SENTIMENT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NEGATIVE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="POSITIVE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4595, - serialized_end=4682, -) -_sym_db.RegisterEnumDescriptor(_ANNOTATIONSENTIMENT) - -AnnotationSentiment = enum_type_wrapper.EnumTypeWrapper(_ANNOTATIONSENTIMENT) -_ANNOTATIONTYPE = _descriptor.EnumDescriptor( - name="AnnotationType", - full_name="google.cloud.datalabeling.v1beta1.AnnotationType", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="ANNOTATION_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMAGE_CLASSIFICATION_ANNOTATION", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMAGE_BOUNDING_BOX_ANNOTATION", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMAGE_ORIENTED_BOUNDING_BOX_ANNOTATION", - index=3, - number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMAGE_BOUNDING_POLY_ANNOTATION", - index=4, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMAGE_POLYLINE_ANNOTATION", - index=5, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMAGE_SEGMENTATION_ANNOTATION", - index=6, - number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VIDEO_SHOTS_CLASSIFICATION_ANNOTATION", - index=7, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VIDEO_OBJECT_TRACKING_ANNOTATION", - index=8, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VIDEO_OBJECT_DETECTION_ANNOTATION", - index=9, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VIDEO_EVENT_ANNOTATION", - index=10, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TEXT_CLASSIFICATION_ANNOTATION", - index=11, - number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TEXT_ENTITY_EXTRACTION_ANNOTATION", - index=12, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GENERAL_CLASSIFICATION_ANNOTATION", - index=13, - number=14, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4685, - serialized_end=5214, -) -_sym_db.RegisterEnumDescriptor(_ANNOTATIONTYPE) - -AnnotationType = enum_type_wrapper.EnumTypeWrapper(_ANNOTATIONTYPE) -ANNOTATION_SOURCE_UNSPECIFIED = 0 -OPERATOR = 3 -ANNOTATION_SENTIMENT_UNSPECIFIED = 0 -NEGATIVE = 1 -POSITIVE = 2 -ANNOTATION_TYPE_UNSPECIFIED = 0 -IMAGE_CLASSIFICATION_ANNOTATION = 1 -IMAGE_BOUNDING_BOX_ANNOTATION = 2 -IMAGE_ORIENTED_BOUNDING_BOX_ANNOTATION = 13 -IMAGE_BOUNDING_POLY_ANNOTATION = 10 -IMAGE_POLYLINE_ANNOTATION = 11 -IMAGE_SEGMENTATION_ANNOTATION = 12 -VIDEO_SHOTS_CLASSIFICATION_ANNOTATION = 3 -VIDEO_OBJECT_TRACKING_ANNOTATION = 4 -VIDEO_OBJECT_DETECTION_ANNOTATION = 5 -VIDEO_EVENT_ANNOTATION = 6 -TEXT_CLASSIFICATION_ANNOTATION = 8 -TEXT_ENTITY_EXTRACTION_ANNOTATION = 9 -GENERAL_CLASSIFICATION_ANNOTATION = 14 - - -_ANNOTATION = _descriptor.Descriptor( - name="Annotation", - full_name="google.cloud.datalabeling.v1beta1.Annotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.Annotation.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_source", - full_name="google.cloud.datalabeling.v1beta1.Annotation.annotation_source", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_value", - full_name="google.cloud.datalabeling.v1beta1.Annotation.annotation_value", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_metadata", - full_name="google.cloud.datalabeling.v1beta1.Annotation.annotation_metadata", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_sentiment", - full_name="google.cloud.datalabeling.v1beta1.Annotation.annotation_sentiment", - index=4, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=252, - serialized_end=606, -) - - -_ANNOTATIONVALUE = _descriptor.Descriptor( - name="AnnotationValue", - full_name="google.cloud.datalabeling.v1beta1.AnnotationValue", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="image_classification_annotation", - full_name="google.cloud.datalabeling.v1beta1.AnnotationValue.image_classification_annotation", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="image_bounding_poly_annotation", - full_name="google.cloud.datalabeling.v1beta1.AnnotationValue.image_bounding_poly_annotation", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="image_polyline_annotation", - full_name="google.cloud.datalabeling.v1beta1.AnnotationValue.image_polyline_annotation", - index=2, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="image_segmentation_annotation", - full_name="google.cloud.datalabeling.v1beta1.AnnotationValue.image_segmentation_annotation", - index=3, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="text_classification_annotation", - full_name="google.cloud.datalabeling.v1beta1.AnnotationValue.text_classification_annotation", - index=4, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="text_entity_extraction_annotation", - full_name="google.cloud.datalabeling.v1beta1.AnnotationValue.text_entity_extraction_annotation", - index=5, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_classification_annotation", - full_name="google.cloud.datalabeling.v1beta1.AnnotationValue.video_classification_annotation", - index=6, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_object_tracking_annotation", - full_name="google.cloud.datalabeling.v1beta1.AnnotationValue.video_object_tracking_annotation", - index=7, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_event_annotation", - full_name="google.cloud.datalabeling.v1beta1.AnnotationValue.video_event_annotation", - index=8, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="value_type", - full_name="google.cloud.datalabeling.v1beta1.AnnotationValue.value_type", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=609, - serialized_end=1586, -) - - -_IMAGECLASSIFICATIONANNOTATION = _descriptor.Descriptor( - name="ImageClassificationAnnotation", - full_name="google.cloud.datalabeling.v1beta1.ImageClassificationAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec", - full_name="google.cloud.datalabeling.v1beta1.ImageClassificationAnnotation.annotation_spec", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1588, - serialized_end=1695, -) - - -_VERTEX = _descriptor.Descriptor( - name="Vertex", - full_name="google.cloud.datalabeling.v1beta1.Vertex", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="x", - full_name="google.cloud.datalabeling.v1beta1.Vertex.x", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="y", - full_name="google.cloud.datalabeling.v1beta1.Vertex.y", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1697, - serialized_end=1727, -) - - -_NORMALIZEDVERTEX = _descriptor.Descriptor( - name="NormalizedVertex", - full_name="google.cloud.datalabeling.v1beta1.NormalizedVertex", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="x", - full_name="google.cloud.datalabeling.v1beta1.NormalizedVertex.x", - index=0, - number=1, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="y", - full_name="google.cloud.datalabeling.v1beta1.NormalizedVertex.y", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1729, - serialized_end=1769, -) - - -_BOUNDINGPOLY = _descriptor.Descriptor( - name="BoundingPoly", - full_name="google.cloud.datalabeling.v1beta1.BoundingPoly", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="vertices", - full_name="google.cloud.datalabeling.v1beta1.BoundingPoly.vertices", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1771, - serialized_end=1846, -) - - -_NORMALIZEDBOUNDINGPOLY = _descriptor.Descriptor( - name="NormalizedBoundingPoly", - full_name="google.cloud.datalabeling.v1beta1.NormalizedBoundingPoly", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="normalized_vertices", - full_name="google.cloud.datalabeling.v1beta1.NormalizedBoundingPoly.normalized_vertices", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1848, - serialized_end=1954, -) - - -_IMAGEBOUNDINGPOLYANNOTATION = _descriptor.Descriptor( - name="ImageBoundingPolyAnnotation", - full_name="google.cloud.datalabeling.v1beta1.ImageBoundingPolyAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="bounding_poly", - full_name="google.cloud.datalabeling.v1beta1.ImageBoundingPolyAnnotation.bounding_poly", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="normalized_bounding_poly", - full_name="google.cloud.datalabeling.v1beta1.ImageBoundingPolyAnnotation.normalized_bounding_poly", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_spec", - full_name="google.cloud.datalabeling.v1beta1.ImageBoundingPolyAnnotation.annotation_spec", - index=2, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="bounded_area", - full_name="google.cloud.datalabeling.v1beta1.ImageBoundingPolyAnnotation.bounded_area", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=1957, - serialized_end=2247, -) - - -_POLYLINE = _descriptor.Descriptor( - name="Polyline", - full_name="google.cloud.datalabeling.v1beta1.Polyline", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="vertices", - full_name="google.cloud.datalabeling.v1beta1.Polyline.vertices", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2249, - serialized_end=2320, -) - - -_NORMALIZEDPOLYLINE = _descriptor.Descriptor( - name="NormalizedPolyline", - full_name="google.cloud.datalabeling.v1beta1.NormalizedPolyline", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="normalized_vertices", - full_name="google.cloud.datalabeling.v1beta1.NormalizedPolyline.normalized_vertices", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2322, - serialized_end=2424, -) - - -_IMAGEPOLYLINEANNOTATION = _descriptor.Descriptor( - name="ImagePolylineAnnotation", - full_name="google.cloud.datalabeling.v1beta1.ImagePolylineAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="polyline", - full_name="google.cloud.datalabeling.v1beta1.ImagePolylineAnnotation.polyline", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="normalized_polyline", - full_name="google.cloud.datalabeling.v1beta1.ImagePolylineAnnotation.normalized_polyline", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_spec", - full_name="google.cloud.datalabeling.v1beta1.ImagePolylineAnnotation.annotation_spec", - index=2, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="poly", - full_name="google.cloud.datalabeling.v1beta1.ImagePolylineAnnotation.poly", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=2427, - serialized_end=2687, -) - - -_IMAGESEGMENTATIONANNOTATION_ANNOTATIONCOLORSENTRY = _descriptor.Descriptor( - name="AnnotationColorsEntry", - full_name="google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation.AnnotationColorsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation.AnnotationColorsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation.AnnotationColorsEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2874, - serialized_end=2980, -) - -_IMAGESEGMENTATIONANNOTATION = _descriptor.Descriptor( - name="ImageSegmentationAnnotation", - full_name="google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_colors", - full_name="google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation.annotation_colors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="mime_type", - full_name="google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation.mime_type", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="image_bytes", - full_name="google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation.image_bytes", - index=2, - number=3, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_IMAGESEGMENTATIONANNOTATION_ANNOTATIONCOLORSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2690, - serialized_end=2980, -) - - -_TEXTCLASSIFICATIONANNOTATION = _descriptor.Descriptor( - name="TextClassificationAnnotation", - full_name="google.cloud.datalabeling.v1beta1.TextClassificationAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec", - full_name="google.cloud.datalabeling.v1beta1.TextClassificationAnnotation.annotation_spec", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2982, - serialized_end=3088, -) - - -_TEXTENTITYEXTRACTIONANNOTATION = _descriptor.Descriptor( - name="TextEntityExtractionAnnotation", - full_name="google.cloud.datalabeling.v1beta1.TextEntityExtractionAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec", - full_name="google.cloud.datalabeling.v1beta1.TextEntityExtractionAnnotation.annotation_spec", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="sequential_segment", - full_name="google.cloud.datalabeling.v1beta1.TextEntityExtractionAnnotation.sequential_segment", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3091, - serialized_end=3281, -) - - -_SEQUENTIALSEGMENT = _descriptor.Descriptor( - name="SequentialSegment", - full_name="google.cloud.datalabeling.v1beta1.SequentialSegment", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="start", - full_name="google.cloud.datalabeling.v1beta1.SequentialSegment.start", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="end", - full_name="google.cloud.datalabeling.v1beta1.SequentialSegment.end", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3283, - serialized_end=3330, -) - - -_TIMESEGMENT = _descriptor.Descriptor( - name="TimeSegment", - full_name="google.cloud.datalabeling.v1beta1.TimeSegment", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="start_time_offset", - full_name="google.cloud.datalabeling.v1beta1.TimeSegment.start_time_offset", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="end_time_offset", - full_name="google.cloud.datalabeling.v1beta1.TimeSegment.end_time_offset", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3332, - serialized_end=3451, -) - - -_VIDEOCLASSIFICATIONANNOTATION = _descriptor.Descriptor( - name="VideoClassificationAnnotation", - full_name="google.cloud.datalabeling.v1beta1.VideoClassificationAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="time_segment", - full_name="google.cloud.datalabeling.v1beta1.VideoClassificationAnnotation.time_segment", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_spec", - full_name="google.cloud.datalabeling.v1beta1.VideoClassificationAnnotation.annotation_spec", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3454, - serialized_end=3631, -) - - -_OBJECTTRACKINGFRAME = _descriptor.Descriptor( - name="ObjectTrackingFrame", - full_name="google.cloud.datalabeling.v1beta1.ObjectTrackingFrame", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="bounding_poly", - full_name="google.cloud.datalabeling.v1beta1.ObjectTrackingFrame.bounding_poly", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="normalized_bounding_poly", - full_name="google.cloud.datalabeling.v1beta1.ObjectTrackingFrame.normalized_bounding_poly", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="time_offset", - full_name="google.cloud.datalabeling.v1beta1.ObjectTrackingFrame.time_offset", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="bounded_area", - full_name="google.cloud.datalabeling.v1beta1.ObjectTrackingFrame.bounded_area", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=3634, - serialized_end=3888, -) - - -_VIDEOOBJECTTRACKINGANNOTATION = _descriptor.Descriptor( - name="VideoObjectTrackingAnnotation", - full_name="google.cloud.datalabeling.v1beta1.VideoObjectTrackingAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec", - full_name="google.cloud.datalabeling.v1beta1.VideoObjectTrackingAnnotation.annotation_spec", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="time_segment", - full_name="google.cloud.datalabeling.v1beta1.VideoObjectTrackingAnnotation.time_segment", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="object_tracking_frames", - full_name="google.cloud.datalabeling.v1beta1.VideoObjectTrackingAnnotation.object_tracking_frames", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3891, - serialized_end=4156, -) - - -_VIDEOEVENTANNOTATION = _descriptor.Descriptor( - name="VideoEventAnnotation", - full_name="google.cloud.datalabeling.v1beta1.VideoEventAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec", - full_name="google.cloud.datalabeling.v1beta1.VideoEventAnnotation.annotation_spec", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="time_segment", - full_name="google.cloud.datalabeling.v1beta1.VideoEventAnnotation.time_segment", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4159, - serialized_end=4327, -) - - -_ANNOTATIONMETADATA = _descriptor.Descriptor( - name="AnnotationMetadata", - full_name="google.cloud.datalabeling.v1beta1.AnnotationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="operator_metadata", - full_name="google.cloud.datalabeling.v1beta1.AnnotationMetadata.operator_metadata", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4329, - serialized_end=4429, -) - - -_OPERATORMETADATA = _descriptor.Descriptor( - name="OperatorMetadata", - full_name="google.cloud.datalabeling.v1beta1.OperatorMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="score", - full_name="google.cloud.datalabeling.v1beta1.OperatorMetadata.score", - index=0, - number=1, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="total_votes", - full_name="google.cloud.datalabeling.v1beta1.OperatorMetadata.total_votes", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="label_votes", - full_name="google.cloud.datalabeling.v1beta1.OperatorMetadata.label_votes", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="comments", - full_name="google.cloud.datalabeling.v1beta1.OperatorMetadata.comments", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4431, - serialized_end=4524, -) - -_ANNOTATION.fields_by_name["annotation_source"].enum_type = _ANNOTATIONSOURCE -_ANNOTATION.fields_by_name["annotation_value"].message_type = _ANNOTATIONVALUE -_ANNOTATION.fields_by_name["annotation_metadata"].message_type = _ANNOTATIONMETADATA -_ANNOTATION.fields_by_name["annotation_sentiment"].enum_type = _ANNOTATIONSENTIMENT -_ANNOTATIONVALUE.fields_by_name[ - "image_classification_annotation" -].message_type = _IMAGECLASSIFICATIONANNOTATION -_ANNOTATIONVALUE.fields_by_name[ - "image_bounding_poly_annotation" -].message_type = _IMAGEBOUNDINGPOLYANNOTATION -_ANNOTATIONVALUE.fields_by_name[ - "image_polyline_annotation" -].message_type = _IMAGEPOLYLINEANNOTATION -_ANNOTATIONVALUE.fields_by_name[ - "image_segmentation_annotation" -].message_type = _IMAGESEGMENTATIONANNOTATION -_ANNOTATIONVALUE.fields_by_name[ - "text_classification_annotation" -].message_type = _TEXTCLASSIFICATIONANNOTATION -_ANNOTATIONVALUE.fields_by_name[ - "text_entity_extraction_annotation" -].message_type = _TEXTENTITYEXTRACTIONANNOTATION -_ANNOTATIONVALUE.fields_by_name[ - "video_classification_annotation" -].message_type = _VIDEOCLASSIFICATIONANNOTATION -_ANNOTATIONVALUE.fields_by_name[ - "video_object_tracking_annotation" -].message_type = _VIDEOOBJECTTRACKINGANNOTATION -_ANNOTATIONVALUE.fields_by_name[ - "video_event_annotation" -].message_type = _VIDEOEVENTANNOTATION -_ANNOTATIONVALUE.oneofs_by_name["value_type"].fields.append( - _ANNOTATIONVALUE.fields_by_name["image_classification_annotation"] -) -_ANNOTATIONVALUE.fields_by_name[ - "image_classification_annotation" -].containing_oneof = _ANNOTATIONVALUE.oneofs_by_name["value_type"] -_ANNOTATIONVALUE.oneofs_by_name["value_type"].fields.append( - _ANNOTATIONVALUE.fields_by_name["image_bounding_poly_annotation"] -) -_ANNOTATIONVALUE.fields_by_name[ - "image_bounding_poly_annotation" -].containing_oneof = _ANNOTATIONVALUE.oneofs_by_name["value_type"] -_ANNOTATIONVALUE.oneofs_by_name["value_type"].fields.append( - _ANNOTATIONVALUE.fields_by_name["image_polyline_annotation"] -) -_ANNOTATIONVALUE.fields_by_name[ - "image_polyline_annotation" -].containing_oneof = _ANNOTATIONVALUE.oneofs_by_name["value_type"] -_ANNOTATIONVALUE.oneofs_by_name["value_type"].fields.append( - _ANNOTATIONVALUE.fields_by_name["image_segmentation_annotation"] -) -_ANNOTATIONVALUE.fields_by_name[ - "image_segmentation_annotation" -].containing_oneof = _ANNOTATIONVALUE.oneofs_by_name["value_type"] -_ANNOTATIONVALUE.oneofs_by_name["value_type"].fields.append( - _ANNOTATIONVALUE.fields_by_name["text_classification_annotation"] -) -_ANNOTATIONVALUE.fields_by_name[ - "text_classification_annotation" -].containing_oneof = _ANNOTATIONVALUE.oneofs_by_name["value_type"] -_ANNOTATIONVALUE.oneofs_by_name["value_type"].fields.append( - _ANNOTATIONVALUE.fields_by_name["text_entity_extraction_annotation"] -) -_ANNOTATIONVALUE.fields_by_name[ - "text_entity_extraction_annotation" -].containing_oneof = _ANNOTATIONVALUE.oneofs_by_name["value_type"] -_ANNOTATIONVALUE.oneofs_by_name["value_type"].fields.append( - _ANNOTATIONVALUE.fields_by_name["video_classification_annotation"] -) -_ANNOTATIONVALUE.fields_by_name[ - "video_classification_annotation" -].containing_oneof = _ANNOTATIONVALUE.oneofs_by_name["value_type"] -_ANNOTATIONVALUE.oneofs_by_name["value_type"].fields.append( - _ANNOTATIONVALUE.fields_by_name["video_object_tracking_annotation"] -) -_ANNOTATIONVALUE.fields_by_name[ - "video_object_tracking_annotation" -].containing_oneof = _ANNOTATIONVALUE.oneofs_by_name["value_type"] -_ANNOTATIONVALUE.oneofs_by_name["value_type"].fields.append( - _ANNOTATIONVALUE.fields_by_name["video_event_annotation"] -) -_ANNOTATIONVALUE.fields_by_name[ - "video_event_annotation" -].containing_oneof = _ANNOTATIONVALUE.oneofs_by_name["value_type"] -_IMAGECLASSIFICATIONANNOTATION.fields_by_name[ - "annotation_spec" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_BOUNDINGPOLY.fields_by_name["vertices"].message_type = _VERTEX -_NORMALIZEDBOUNDINGPOLY.fields_by_name[ - "normalized_vertices" -].message_type = _NORMALIZEDVERTEX -_IMAGEBOUNDINGPOLYANNOTATION.fields_by_name[ - "bounding_poly" -].message_type = _BOUNDINGPOLY -_IMAGEBOUNDINGPOLYANNOTATION.fields_by_name[ - "normalized_bounding_poly" -].message_type = _NORMALIZEDBOUNDINGPOLY -_IMAGEBOUNDINGPOLYANNOTATION.fields_by_name[ - "annotation_spec" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_IMAGEBOUNDINGPOLYANNOTATION.oneofs_by_name["bounded_area"].fields.append( - _IMAGEBOUNDINGPOLYANNOTATION.fields_by_name["bounding_poly"] -) -_IMAGEBOUNDINGPOLYANNOTATION.fields_by_name[ - "bounding_poly" -].containing_oneof = _IMAGEBOUNDINGPOLYANNOTATION.oneofs_by_name["bounded_area"] -_IMAGEBOUNDINGPOLYANNOTATION.oneofs_by_name["bounded_area"].fields.append( - _IMAGEBOUNDINGPOLYANNOTATION.fields_by_name["normalized_bounding_poly"] -) -_IMAGEBOUNDINGPOLYANNOTATION.fields_by_name[ - "normalized_bounding_poly" -].containing_oneof = _IMAGEBOUNDINGPOLYANNOTATION.oneofs_by_name["bounded_area"] -_POLYLINE.fields_by_name["vertices"].message_type = _VERTEX -_NORMALIZEDPOLYLINE.fields_by_name[ - "normalized_vertices" -].message_type = _NORMALIZEDVERTEX -_IMAGEPOLYLINEANNOTATION.fields_by_name["polyline"].message_type = _POLYLINE -_IMAGEPOLYLINEANNOTATION.fields_by_name[ - "normalized_polyline" -].message_type = _NORMALIZEDPOLYLINE -_IMAGEPOLYLINEANNOTATION.fields_by_name[ - "annotation_spec" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_IMAGEPOLYLINEANNOTATION.oneofs_by_name["poly"].fields.append( - _IMAGEPOLYLINEANNOTATION.fields_by_name["polyline"] -) -_IMAGEPOLYLINEANNOTATION.fields_by_name[ - "polyline" -].containing_oneof = _IMAGEPOLYLINEANNOTATION.oneofs_by_name["poly"] -_IMAGEPOLYLINEANNOTATION.oneofs_by_name["poly"].fields.append( - _IMAGEPOLYLINEANNOTATION.fields_by_name["normalized_polyline"] -) -_IMAGEPOLYLINEANNOTATION.fields_by_name[ - "normalized_polyline" -].containing_oneof = _IMAGEPOLYLINEANNOTATION.oneofs_by_name["poly"] -_IMAGESEGMENTATIONANNOTATION_ANNOTATIONCOLORSENTRY.fields_by_name[ - "value" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_IMAGESEGMENTATIONANNOTATION_ANNOTATIONCOLORSENTRY.containing_type = ( - _IMAGESEGMENTATIONANNOTATION -) -_IMAGESEGMENTATIONANNOTATION.fields_by_name[ - "annotation_colors" -].message_type = _IMAGESEGMENTATIONANNOTATION_ANNOTATIONCOLORSENTRY -_TEXTCLASSIFICATIONANNOTATION.fields_by_name[ - "annotation_spec" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_TEXTENTITYEXTRACTIONANNOTATION.fields_by_name[ - "annotation_spec" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_TEXTENTITYEXTRACTIONANNOTATION.fields_by_name[ - "sequential_segment" -].message_type = _SEQUENTIALSEGMENT -_TIMESEGMENT.fields_by_name[ - "start_time_offset" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_TIMESEGMENT.fields_by_name[ - "end_time_offset" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_VIDEOCLASSIFICATIONANNOTATION.fields_by_name[ - "time_segment" -].message_type = _TIMESEGMENT -_VIDEOCLASSIFICATIONANNOTATION.fields_by_name[ - "annotation_spec" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_OBJECTTRACKINGFRAME.fields_by_name["bounding_poly"].message_type = _BOUNDINGPOLY -_OBJECTTRACKINGFRAME.fields_by_name[ - "normalized_bounding_poly" -].message_type = _NORMALIZEDBOUNDINGPOLY -_OBJECTTRACKINGFRAME.fields_by_name[ - "time_offset" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_OBJECTTRACKINGFRAME.oneofs_by_name["bounded_area"].fields.append( - _OBJECTTRACKINGFRAME.fields_by_name["bounding_poly"] -) -_OBJECTTRACKINGFRAME.fields_by_name[ - "bounding_poly" -].containing_oneof = _OBJECTTRACKINGFRAME.oneofs_by_name["bounded_area"] -_OBJECTTRACKINGFRAME.oneofs_by_name["bounded_area"].fields.append( - _OBJECTTRACKINGFRAME.fields_by_name["normalized_bounding_poly"] -) -_OBJECTTRACKINGFRAME.fields_by_name[ - "normalized_bounding_poly" -].containing_oneof = _OBJECTTRACKINGFRAME.oneofs_by_name["bounded_area"] -_VIDEOOBJECTTRACKINGANNOTATION.fields_by_name[ - "annotation_spec" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_VIDEOOBJECTTRACKINGANNOTATION.fields_by_name[ - "time_segment" -].message_type = _TIMESEGMENT -_VIDEOOBJECTTRACKINGANNOTATION.fields_by_name[ - "object_tracking_frames" -].message_type = _OBJECTTRACKINGFRAME -_VIDEOEVENTANNOTATION.fields_by_name[ - "annotation_spec" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_VIDEOEVENTANNOTATION.fields_by_name["time_segment"].message_type = _TIMESEGMENT -_ANNOTATIONMETADATA.fields_by_name["operator_metadata"].message_type = _OPERATORMETADATA -DESCRIPTOR.message_types_by_name["Annotation"] = _ANNOTATION -DESCRIPTOR.message_types_by_name["AnnotationValue"] = _ANNOTATIONVALUE -DESCRIPTOR.message_types_by_name[ - "ImageClassificationAnnotation" -] = _IMAGECLASSIFICATIONANNOTATION -DESCRIPTOR.message_types_by_name["Vertex"] = _VERTEX -DESCRIPTOR.message_types_by_name["NormalizedVertex"] = _NORMALIZEDVERTEX -DESCRIPTOR.message_types_by_name["BoundingPoly"] = _BOUNDINGPOLY -DESCRIPTOR.message_types_by_name["NormalizedBoundingPoly"] = _NORMALIZEDBOUNDINGPOLY -DESCRIPTOR.message_types_by_name[ - "ImageBoundingPolyAnnotation" -] = _IMAGEBOUNDINGPOLYANNOTATION -DESCRIPTOR.message_types_by_name["Polyline"] = _POLYLINE -DESCRIPTOR.message_types_by_name["NormalizedPolyline"] = _NORMALIZEDPOLYLINE -DESCRIPTOR.message_types_by_name["ImagePolylineAnnotation"] = _IMAGEPOLYLINEANNOTATION -DESCRIPTOR.message_types_by_name[ - "ImageSegmentationAnnotation" -] = _IMAGESEGMENTATIONANNOTATION -DESCRIPTOR.message_types_by_name[ - "TextClassificationAnnotation" -] = _TEXTCLASSIFICATIONANNOTATION -DESCRIPTOR.message_types_by_name[ - "TextEntityExtractionAnnotation" -] = _TEXTENTITYEXTRACTIONANNOTATION -DESCRIPTOR.message_types_by_name["SequentialSegment"] = _SEQUENTIALSEGMENT -DESCRIPTOR.message_types_by_name["TimeSegment"] = _TIMESEGMENT -DESCRIPTOR.message_types_by_name[ - "VideoClassificationAnnotation" -] = _VIDEOCLASSIFICATIONANNOTATION -DESCRIPTOR.message_types_by_name["ObjectTrackingFrame"] = _OBJECTTRACKINGFRAME -DESCRIPTOR.message_types_by_name[ - "VideoObjectTrackingAnnotation" -] = _VIDEOOBJECTTRACKINGANNOTATION -DESCRIPTOR.message_types_by_name["VideoEventAnnotation"] = _VIDEOEVENTANNOTATION -DESCRIPTOR.message_types_by_name["AnnotationMetadata"] = _ANNOTATIONMETADATA -DESCRIPTOR.message_types_by_name["OperatorMetadata"] = _OPERATORMETADATA -DESCRIPTOR.enum_types_by_name["AnnotationSource"] = _ANNOTATIONSOURCE -DESCRIPTOR.enum_types_by_name["AnnotationSentiment"] = _ANNOTATIONSENTIMENT -DESCRIPTOR.enum_types_by_name["AnnotationType"] = _ANNOTATIONTYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Annotation = _reflection.GeneratedProtocolMessageType( - "Annotation", - (_message.Message,), - { - "DESCRIPTOR": _ANNOTATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Annotation for Example. Each example may have one or more annotations. - For example in image classification problem, each image might have one - or more labels. We call labels binded with this image an Annotation. - - Attributes: - name: - Output only. Unique name of this annotation, format is: proje - cts/{project_id}/datasets/{dataset_id}/annotatedDatasets/{anno - tated_dataset}/examples/{example_id}/annotations/{annotation_i - d} - annotation_source: - Output only. The source of the annotation. - annotation_value: - Output only. This is the actual annotation value, e.g - classification, bounding box values are stored here. - annotation_metadata: - Output only. Annotation metadata, including information like - votes for labels. - annotation_sentiment: - Output only. Sentiment for this annotation. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.Annotation) - }, -) -_sym_db.RegisterMessage(Annotation) - -AnnotationValue = _reflection.GeneratedProtocolMessageType( - "AnnotationValue", - (_message.Message,), - { - "DESCRIPTOR": _ANNOTATIONVALUE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Annotation value for an example. - - Attributes: - image_classification_annotation: - Annotation value for image classification case. - image_bounding_poly_annotation: - Annotation value for image bounding box, oriented bounding box - and polygon cases. - image_polyline_annotation: - Annotation value for image polyline cases. Polyline here is - different from BoundingPoly. It is formed by line segments - connected to each other but not closed form(Bounding Poly). - The line segments can cross each other. - image_segmentation_annotation: - Annotation value for image segmentation. - text_classification_annotation: - Annotation value for text classification case. - text_entity_extraction_annotation: - Annotation value for text entity extraction case. - video_classification_annotation: - Annotation value for video classification case. - video_object_tracking_annotation: - Annotation value for video object detection and tracking case. - video_event_annotation: - Annotation value for video event case. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.AnnotationValue) - }, -) -_sym_db.RegisterMessage(AnnotationValue) - -ImageClassificationAnnotation = _reflection.GeneratedProtocolMessageType( - "ImageClassificationAnnotation", - (_message.Message,), - { - "DESCRIPTOR": _IMAGECLASSIFICATIONANNOTATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Image classification annotation definition. - - Attributes: - annotation_spec: - Label of image. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ImageClassificationAnnotation) - }, -) -_sym_db.RegisterMessage(ImageClassificationAnnotation) - -Vertex = _reflection.GeneratedProtocolMessageType( - "Vertex", - (_message.Message,), - { - "DESCRIPTOR": _VERTEX, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """X coordinate. - - Attributes: - y: - Y coordinate. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.Vertex) - }, -) -_sym_db.RegisterMessage(Vertex) - -NormalizedVertex = _reflection.GeneratedProtocolMessageType( - "NormalizedVertex", - (_message.Message,), - { - "DESCRIPTOR": _NORMALIZEDVERTEX, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """X coordinate. - - Attributes: - y: - Y coordinate. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.NormalizedVertex) - }, -) -_sym_db.RegisterMessage(NormalizedVertex) - -BoundingPoly = _reflection.GeneratedProtocolMessageType( - "BoundingPoly", - (_message.Message,), - { - "DESCRIPTOR": _BOUNDINGPOLY, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """A bounding polygon in the image. - - Attributes: - vertices: - The bounding polygon vertices. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.BoundingPoly) - }, -) -_sym_db.RegisterMessage(BoundingPoly) - -NormalizedBoundingPoly = _reflection.GeneratedProtocolMessageType( - "NormalizedBoundingPoly", - (_message.Message,), - { - "DESCRIPTOR": _NORMALIZEDBOUNDINGPOLY, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Normalized bounding polygon. - - Attributes: - normalized_vertices: - The bounding polygon normalized vertices. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.NormalizedBoundingPoly) - }, -) -_sym_db.RegisterMessage(NormalizedBoundingPoly) - -ImageBoundingPolyAnnotation = _reflection.GeneratedProtocolMessageType( - "ImageBoundingPolyAnnotation", - (_message.Message,), - { - "DESCRIPTOR": _IMAGEBOUNDINGPOLYANNOTATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Image bounding poly annotation. It represents a polygon including - bounding box in the image. - - Attributes: - bounded_area: - The region of the polygon. If it is a bounding box, it is - guaranteed to be four points. - annotation_spec: - Label of object in this bounding polygon. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ImageBoundingPolyAnnotation) - }, -) -_sym_db.RegisterMessage(ImageBoundingPolyAnnotation) - -Polyline = _reflection.GeneratedProtocolMessageType( - "Polyline", - (_message.Message,), - { - "DESCRIPTOR": _POLYLINE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """A line with multiple line segments. - - Attributes: - vertices: - The polyline vertices. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.Polyline) - }, -) -_sym_db.RegisterMessage(Polyline) - -NormalizedPolyline = _reflection.GeneratedProtocolMessageType( - "NormalizedPolyline", - (_message.Message,), - { - "DESCRIPTOR": _NORMALIZEDPOLYLINE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Normalized polyline. - - Attributes: - normalized_vertices: - The normalized polyline vertices. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.NormalizedPolyline) - }, -) -_sym_db.RegisterMessage(NormalizedPolyline) - -ImagePolylineAnnotation = _reflection.GeneratedProtocolMessageType( - "ImagePolylineAnnotation", - (_message.Message,), - { - "DESCRIPTOR": _IMAGEPOLYLINEANNOTATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """A polyline for the image annotation. - - Attributes: - annotation_spec: - Label of this polyline. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ImagePolylineAnnotation) - }, -) -_sym_db.RegisterMessage(ImagePolylineAnnotation) - -ImageSegmentationAnnotation = _reflection.GeneratedProtocolMessageType( - "ImageSegmentationAnnotation", - (_message.Message,), - { - "AnnotationColorsEntry": _reflection.GeneratedProtocolMessageType( - "AnnotationColorsEntry", - (_message.Message,), - { - "DESCRIPTOR": _IMAGESEGMENTATIONANNOTATION_ANNOTATIONCOLORSENTRY, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation.AnnotationColorsEntry) - }, - ), - "DESCRIPTOR": _IMAGESEGMENTATIONANNOTATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Image segmentation annotation. - - Attributes: - annotation_colors: - The mapping between rgb color and annotation spec. The key is - the rgb color represented in format of rgb(0, 0, 0). The value - is the AnnotationSpec. - mime_type: - Image format. - image_bytes: - A byte string of a full image’s color map. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation) - }, -) -_sym_db.RegisterMessage(ImageSegmentationAnnotation) -_sym_db.RegisterMessage(ImageSegmentationAnnotation.AnnotationColorsEntry) - -TextClassificationAnnotation = _reflection.GeneratedProtocolMessageType( - "TextClassificationAnnotation", - (_message.Message,), - { - "DESCRIPTOR": _TEXTCLASSIFICATIONANNOTATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Text classification annotation. - - Attributes: - annotation_spec: - Label of the text. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.TextClassificationAnnotation) - }, -) -_sym_db.RegisterMessage(TextClassificationAnnotation) - -TextEntityExtractionAnnotation = _reflection.GeneratedProtocolMessageType( - "TextEntityExtractionAnnotation", - (_message.Message,), - { - "DESCRIPTOR": _TEXTENTITYEXTRACTIONANNOTATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Text entity extraction annotation. - - Attributes: - annotation_spec: - Label of the text entities. - sequential_segment: - Position of the entity. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.TextEntityExtractionAnnotation) - }, -) -_sym_db.RegisterMessage(TextEntityExtractionAnnotation) - -SequentialSegment = _reflection.GeneratedProtocolMessageType( - "SequentialSegment", - (_message.Message,), - { - "DESCRIPTOR": _SEQUENTIALSEGMENT, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Start and end position in a sequence (e.g. text segment). - - Attributes: - start: - Start position (inclusive). - end: - End position (exclusive). - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.SequentialSegment) - }, -) -_sym_db.RegisterMessage(SequentialSegment) - -TimeSegment = _reflection.GeneratedProtocolMessageType( - "TimeSegment", - (_message.Message,), - { - "DESCRIPTOR": _TIMESEGMENT, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """A time period inside of an example that has a time dimension - (e.g. video). - - Attributes: - start_time_offset: - Start of the time segment (inclusive), represented as the - duration since the example start. - end_time_offset: - End of the time segment (exclusive), represented as the - duration since the example start. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.TimeSegment) - }, -) -_sym_db.RegisterMessage(TimeSegment) - -VideoClassificationAnnotation = _reflection.GeneratedProtocolMessageType( - "VideoClassificationAnnotation", - (_message.Message,), - { - "DESCRIPTOR": _VIDEOCLASSIFICATIONANNOTATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Video classification annotation. - - Attributes: - time_segment: - The time segment of the video to which the annotation applies. - annotation_spec: - Label of the segment specified by time_segment. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.VideoClassificationAnnotation) - }, -) -_sym_db.RegisterMessage(VideoClassificationAnnotation) - -ObjectTrackingFrame = _reflection.GeneratedProtocolMessageType( - "ObjectTrackingFrame", - (_message.Message,), - { - "DESCRIPTOR": _OBJECTTRACKINGFRAME, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Video frame level annotation for object detection and tracking. - - Attributes: - bounded_area: - The bounding box location of this object track for the frame. - time_offset: - The time offset of this frame relative to the beginning of the - video. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ObjectTrackingFrame) - }, -) -_sym_db.RegisterMessage(ObjectTrackingFrame) - -VideoObjectTrackingAnnotation = _reflection.GeneratedProtocolMessageType( - "VideoObjectTrackingAnnotation", - (_message.Message,), - { - "DESCRIPTOR": _VIDEOOBJECTTRACKINGANNOTATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Video object tracking annotation. - - Attributes: - annotation_spec: - Label of the object tracked in this annotation. - time_segment: - The time segment of the video to which object tracking - applies. - object_tracking_frames: - The list of frames where this object track appears. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.VideoObjectTrackingAnnotation) - }, -) -_sym_db.RegisterMessage(VideoObjectTrackingAnnotation) - -VideoEventAnnotation = _reflection.GeneratedProtocolMessageType( - "VideoEventAnnotation", - (_message.Message,), - { - "DESCRIPTOR": _VIDEOEVENTANNOTATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Video event annotation. - - Attributes: - annotation_spec: - Label of the event in this annotation. - time_segment: - The time segment of the video to which the annotation applies. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.VideoEventAnnotation) - }, -) -_sym_db.RegisterMessage(VideoEventAnnotation) - -AnnotationMetadata = _reflection.GeneratedProtocolMessageType( - "AnnotationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _ANNOTATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """Additional information associated with the annotation. - - Attributes: - operator_metadata: - Metadata related to human labeling. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.AnnotationMetadata) - }, -) -_sym_db.RegisterMessage(AnnotationMetadata) - -OperatorMetadata = _reflection.GeneratedProtocolMessageType( - "OperatorMetadata", - (_message.Message,), - { - "DESCRIPTOR": _OPERATORMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_pb2", - "__doc__": """General information useful for labels coming from contributors. - - Attributes: - score: - Confidence score corresponding to a label. For examle, if 3 - contributors have answered the question and 2 of them agree on - the final label, the confidence score will be 0.67 (2/3). - total_votes: - The total number of contributors that answer this question. - label_votes: - The total number of contributors that choose this label. - comments: - Comments from contributors. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.OperatorMetadata) - }, -) -_sym_db.RegisterMessage(OperatorMetadata) - - -DESCRIPTOR._options = None -_IMAGESEGMENTATIONANNOTATION_ANNOTATIONCOLORSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datalabeling_v1beta1/proto/annotation_pb2_grpc.py b/google/cloud/datalabeling_v1beta1/proto/annotation_pb2_grpc.py deleted file mode 100644 index 8a93939..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/annotation_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/datalabeling_v1beta1/proto/annotation_spec_set_pb2.py b/google/cloud/datalabeling_v1beta1/proto/annotation_spec_set_pb2.py deleted file mode 100644 index 80dfcbd..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/annotation_spec_set_pb2.py +++ /dev/null @@ -1,278 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datalabeling_v1beta1/proto/annotation_spec_set.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datalabeling_v1beta1/proto/annotation_spec_set.proto", - package="google.cloud.datalabeling.v1beta1", - syntax="proto3", - serialized_options=b"\n%com.google.cloud.datalabeling.v1beta1P\001ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabeling", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\nAgoogle/cloud/datalabeling_v1beta1/proto/annotation_spec_set.proto\x12!google.cloud.datalabeling.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto"\xa6\x02\n\x11\x41nnotationSpecSet\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12K\n\x10\x61nnotation_specs\x18\x04 \x03(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpec\x12\x1a\n\x12\x62locking_resources\x18\x05 \x03(\t:o\xea\x41l\n-datalabeling.googleapis.com/AnnotationSpecSet\x12;projects/{project}/annotationSpecSets/{annotation_spec_set}";\n\x0e\x41nnotationSpec\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\tBx\n%com.google.cloud.datalabeling.v1beta1P\x01ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabelingb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - ], -) - - -_ANNOTATIONSPECSET = _descriptor.Descriptor( - name="AnnotationSpecSet", - full_name="google.cloud.datalabeling.v1beta1.AnnotationSpecSet", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.AnnotationSpecSet.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.datalabeling.v1beta1.AnnotationSpecSet.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.datalabeling.v1beta1.AnnotationSpecSet.description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_specs", - full_name="google.cloud.datalabeling.v1beta1.AnnotationSpecSet.annotation_specs", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="blocking_resources", - full_name="google.cloud.datalabeling.v1beta1.AnnotationSpecSet.blocking_resources", - index=4, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"\352Al\n-datalabeling.googleapis.com/AnnotationSpecSet\022;projects/{project}/annotationSpecSets/{annotation_spec_set}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=162, - serialized_end=456, -) - - -_ANNOTATIONSPEC = _descriptor.Descriptor( - name="AnnotationSpec", - full_name="google.cloud.datalabeling.v1beta1.AnnotationSpec", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.datalabeling.v1beta1.AnnotationSpec.display_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.datalabeling.v1beta1.AnnotationSpec.description", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=458, - serialized_end=517, -) - -_ANNOTATIONSPECSET.fields_by_name["annotation_specs"].message_type = _ANNOTATIONSPEC -DESCRIPTOR.message_types_by_name["AnnotationSpecSet"] = _ANNOTATIONSPECSET -DESCRIPTOR.message_types_by_name["AnnotationSpec"] = _ANNOTATIONSPEC -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -AnnotationSpecSet = _reflection.GeneratedProtocolMessageType( - "AnnotationSpecSet", - (_message.Message,), - { - "DESCRIPTOR": _ANNOTATIONSPECSET, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_spec_set_pb2", - "__doc__": """An AnnotationSpecSet is a collection of label definitions. For - example, in image classification tasks, you define a set of possible - labels for images as an AnnotationSpecSet. An AnnotationSpecSet is - immutable upon creation. - - Attributes: - name: - Output only. The AnnotationSpecSet resource name in the - following format: “projects/{project_id}/annotationSpecSets/{ - annotation_spec_set_id}” - display_name: - Required. The display name for AnnotationSpecSet that you - define when you create it. Maximum of 64 characters. - description: - Optional. User-provided description of the annotation - specification set. The description can be up to 10,000 - characters long. - annotation_specs: - Required. The array of AnnotationSpecs that you define when - you create the AnnotationSpecSet. These are the possible - labels for the labeling task. - blocking_resources: - Output only. The names of any related resources that are - blocking changes to the annotation spec set. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.AnnotationSpecSet) - }, -) -_sym_db.RegisterMessage(AnnotationSpecSet) - -AnnotationSpec = _reflection.GeneratedProtocolMessageType( - "AnnotationSpec", - (_message.Message,), - { - "DESCRIPTOR": _ANNOTATIONSPEC, - "__module__": "google.cloud.datalabeling_v1beta1.proto.annotation_spec_set_pb2", - "__doc__": """Container of information related to one possible annotation that can - be used in a labeling task. For example, an image classification task - where images are labeled as ``dog`` or ``cat`` must reference an - AnnotationSpec for ``dog`` and an AnnotationSpec for ``cat``. - - Attributes: - display_name: - Required. The display name of the AnnotationSpec. Maximum of - 64 characters. - description: - Optional. User-provided description of the annotation - specification. The description can be up to 10,000 characters - long. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.AnnotationSpec) - }, -) -_sym_db.RegisterMessage(AnnotationSpec) - - -DESCRIPTOR._options = None -_ANNOTATIONSPECSET._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datalabeling_v1beta1/proto/annotation_spec_set_pb2_grpc.py b/google/cloud/datalabeling_v1beta1/proto/annotation_spec_set_pb2_grpc.py deleted file mode 100644 index 8a93939..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/annotation_spec_set_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/datalabeling_v1beta1/proto/data_labeling_service_pb2.py b/google/cloud/datalabeling_v1beta1/proto/data_labeling_service_pb2.py deleted file mode 100644 index 3e9bdfa..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/data_labeling_service_pb2.py +++ /dev/null @@ -1,5074 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datalabeling_v1beta1/proto/data_labeling_service.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.datalabeling_v1beta1.proto import ( - annotation_spec_set_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - dataset_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - evaluation_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - evaluation_job_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - human_annotation_config_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - instruction_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_instruction__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datalabeling_v1beta1/proto/data_labeling_service.proto", - package="google.cloud.datalabeling.v1beta1", - syntax="proto3", - serialized_options=b"\n%com.google.cloud.datalabeling.v1beta1P\001ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabeling", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\nCgoogle/cloud/datalabeling_v1beta1/proto/data_labeling_service.proto\x12!google.cloud.datalabeling.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x41google/cloud/datalabeling_v1beta1/proto/annotation_spec_set.proto\x1a\x35google/cloud/datalabeling_v1beta1/proto/dataset.proto\x1a\x38google/cloud/datalabeling_v1beta1/proto/evaluation.proto\x1a\n\x06parent\x18\x01 \x01(\tB.\xe0\x41\x02\xfa\x41(\n&datalabeling.googleapis.com/Evaluation\x12\x13\n\x06\x66ilter\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x04 \x01(\tB\x03\xe0\x41\x01"x\n\x19SearchEvaluationsResponse\x12\x42\n\x0b\x65valuations\x18\x01 \x03(\x0b\x32-.google.cloud.datalabeling.v1beta1.Evaluation\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x92\x01\n\x1fSearchExampleComparisonsRequest\x12>\n\x06parent\x18\x01 \x01(\tB.\xe0\x41\x02\xfa\x41(\n&datalabeling.googleapis.com/Evaluation\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\xdb\x02\n SearchExampleComparisonsResponse\x12r\n\x13\x65xample_comparisons\x18\x01 \x03(\x0b\x32U.google.cloud.datalabeling.v1beta1.SearchExampleComparisonsResponse.ExampleComparison\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x1a\xa9\x01\n\x11\x45xampleComparison\x12H\n\x14ground_truth_example\x18\x01 \x01(\x0b\x32*.google.cloud.datalabeling.v1beta1.Example\x12J\n\x16model_created_examples\x18\x02 \x03(\x0b\x32*.google.cloud.datalabeling.v1beta1.Example"\xa5\x01\n\x1a\x43reateEvaluationJobRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x42\n\x03job\x18\x02 \x01(\x0b\x32\x30.google.cloud.datalabeling.v1beta1.EvaluationJobB\x03\xe0\x41\x02"\xa1\x01\n\x1aUpdateEvaluationJobRequest\x12M\n\x0e\x65valuation_job\x18\x01 \x01(\x0b\x32\x30.google.cloud.datalabeling.v1beta1.EvaluationJobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x01"Z\n\x17GetEvaluationJobRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)datalabeling.googleapis.com/EvaluationJob"\\\n\x19PauseEvaluationJobRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)datalabeling.googleapis.com/EvaluationJob"]\n\x1aResumeEvaluationJobRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)datalabeling.googleapis.com/EvaluationJob"]\n\x1a\x44\x65leteEvaluationJobRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)datalabeling.googleapis.com/EvaluationJob"\xa6\x01\n\x19ListEvaluationJobsRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x13\n\x06\x66ilter\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x04 \x01(\tB\x03\xe0\x41\x01"\x80\x01\n\x1aListEvaluationJobsResponse\x12I\n\x0f\x65valuation_jobs\x18\x01 \x03(\x0b\x32\x30.google.cloud.datalabeling.v1beta1.EvaluationJob\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xa6\x38\n\x13\x44\x61taLabelingService\x12\xb7\x01\n\rCreateDataset\x12\x37.google.cloud.datalabeling.v1beta1.CreateDatasetRequest\x1a*.google.cloud.datalabeling.v1beta1.Dataset"A\x82\xd3\xe4\x93\x02*"%/v1beta1/{parent=projects/*}/datasets:\x01*\xda\x41\x0eparent,dataset\x12\xa4\x01\n\nGetDataset\x12\x34.google.cloud.datalabeling.v1beta1.GetDatasetRequest\x1a*.google.cloud.datalabeling.v1beta1.Dataset"4\x82\xd3\xe4\x93\x02\'\x12%/v1beta1/{name=projects/*/datasets/*}\xda\x41\x04name\x12\xbe\x01\n\x0cListDatasets\x12\x36.google.cloud.datalabeling.v1beta1.ListDatasetsRequest\x1a\x37.google.cloud.datalabeling.v1beta1.ListDatasetsResponse"=\x82\xd3\xe4\x93\x02\'\x12%/v1beta1/{parent=projects/*}/datasets\xda\x41\rparent,filter\x12\x96\x01\n\rDeleteDataset\x12\x37.google.cloud.datalabeling.v1beta1.DeleteDatasetRequest\x1a\x16.google.protobuf.Empty"4\x82\xd3\xe4\x93\x02\'*%/v1beta1/{name=projects/*/datasets/*}\xda\x41\x04name\x12\xf0\x01\n\nImportData\x12\x34.google.cloud.datalabeling.v1beta1.ImportDataRequest\x1a\x1d.google.longrunning.Operation"\x8c\x01\x82\xd3\xe4\x93\x02\x35"0/v1beta1/{name=projects/*/datasets/*}:importData:\x01*\xda\x41\x11name,input_config\xca\x41:\n\x1bImportDataOperationResponse\x12\x1bImportDataOperationMetadata\x12\x8a\x02\n\nExportData\x12\x34.google.cloud.datalabeling.v1beta1.ExportDataRequest\x1a\x1d.google.longrunning.Operation"\xa6\x01\x82\xd3\xe4\x93\x02\x35"0/v1beta1/{name=projects/*/datasets/*}:exportData:\x01*\xda\x41+name,annotated_dataset,filter,output_config\xca\x41:\n\x1b\x45xportDataOperationResponse\x12\x1b\x45xportDataOperationMetadata\x12\xb3\x01\n\x0bGetDataItem\x12\x35.google.cloud.datalabeling.v1beta1.GetDataItemRequest\x1a+.google.cloud.datalabeling.v1beta1.DataItem"@\x82\xd3\xe4\x93\x02\x33\x12\x31/v1beta1/{name=projects/*/datasets/*/dataItems/*}\xda\x41\x04name\x12\xcd\x01\n\rListDataItems\x12\x37.google.cloud.datalabeling.v1beta1.ListDataItemsRequest\x1a\x38.google.cloud.datalabeling.v1beta1.ListDataItemsResponse"I\x82\xd3\xe4\x93\x02\x33\x12\x31/v1beta1/{parent=projects/*/datasets/*}/dataItems\xda\x41\rparent,filter\x12\xd3\x01\n\x13GetAnnotatedDataset\x12=.google.cloud.datalabeling.v1beta1.GetAnnotatedDatasetRequest\x1a\x33.google.cloud.datalabeling.v1beta1.AnnotatedDataset"H\x82\xd3\xe4\x93\x02;\x12\x39/v1beta1/{name=projects/*/datasets/*/annotatedDatasets/*}\xda\x41\x04name\x12\xed\x01\n\x15ListAnnotatedDatasets\x12?.google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsRequest\x1a@.google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsResponse"Q\x82\xd3\xe4\x93\x02;\x12\x39/v1beta1/{parent=projects/*/datasets/*}/annotatedDatasets\xda\x41\rparent,filter\x12\xb5\x01\n\x16\x44\x65leteAnnotatedDataset\x12@.google.cloud.datalabeling.v1beta1.DeleteAnnotatedDatasetRequest\x1a\x16.google.protobuf.Empty"A\x82\xd3\xe4\x93\x02;*9/v1beta1/{name=projects/*/datasets/*/annotatedDatasets/*}\x12\xed\x01\n\nLabelImage\x12\x34.google.cloud.datalabeling.v1beta1.LabelImageRequest\x1a\x1d.google.longrunning.Operation"\x89\x01\x82\xd3\xe4\x93\x02\x38"3/v1beta1/{parent=projects/*/datasets/*}/image:label:\x01*\xda\x41\x1bparent,basic_config,feature\xca\x41*\n\x10\x41nnotatedDataset\x12\x16LabelOperationMetadata\x12\xed\x01\n\nLabelVideo\x12\x34.google.cloud.datalabeling.v1beta1.LabelVideoRequest\x1a\x1d.google.longrunning.Operation"\x89\x01\x82\xd3\xe4\x93\x02\x38"3/v1beta1/{parent=projects/*/datasets/*}/video:label:\x01*\xda\x41\x1bparent,basic_config,feature\xca\x41*\n\x10\x41nnotatedDataset\x12\x16LabelOperationMetadata\x12\xea\x01\n\tLabelText\x12\x33.google.cloud.datalabeling.v1beta1.LabelTextRequest\x1a\x1d.google.longrunning.Operation"\x88\x01\x82\xd3\xe4\x93\x02\x37"2/v1beta1/{parent=projects/*/datasets/*}/text:label:\x01*\xda\x41\x1bparent,basic_config,feature\xca\x41*\n\x10\x41nnotatedDataset\x12\x16LabelOperationMetadata\x12\xca\x01\n\nGetExample\x12\x34.google.cloud.datalabeling.v1beta1.GetExampleRequest\x1a*.google.cloud.datalabeling.v1beta1.Example"Z\x82\xd3\xe4\x93\x02\x46\x12\x44/v1beta1/{name=projects/*/datasets/*/annotatedDatasets/*/examples/*}\xda\x41\x0bname,filter\x12\xdd\x01\n\x0cListExamples\x12\x36.google.cloud.datalabeling.v1beta1.ListExamplesRequest\x1a\x37.google.cloud.datalabeling.v1beta1.ListExamplesResponse"\\\x82\xd3\xe4\x93\x02\x46\x12\x44/v1beta1/{parent=projects/*/datasets/*/annotatedDatasets/*}/examples\xda\x41\rparent,filter\x12\xeb\x01\n\x17\x43reateAnnotationSpecSet\x12\x41.google.cloud.datalabeling.v1beta1.CreateAnnotationSpecSetRequest\x1a\x34.google.cloud.datalabeling.v1beta1.AnnotationSpecSet"W\x82\xd3\xe4\x93\x02\x34"//v1beta1/{parent=projects/*}/annotationSpecSets:\x01*\xda\x41\x1aparent,annotation_spec_set\x12\xcc\x01\n\x14GetAnnotationSpecSet\x12>.google.cloud.datalabeling.v1beta1.GetAnnotationSpecSetRequest\x1a\x34.google.cloud.datalabeling.v1beta1.AnnotationSpecSet">\x82\xd3\xe4\x93\x02\x31\x12//v1beta1/{name=projects/*/annotationSpecSets/*}\xda\x41\x04name\x12\xe6\x01\n\x16ListAnnotationSpecSets\x12@.google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsRequest\x1a\x41.google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsResponse"G\x82\xd3\xe4\x93\x02\x31\x12//v1beta1/{parent=projects/*}/annotationSpecSets\xda\x41\rparent,filter\x12\xb4\x01\n\x17\x44\x65leteAnnotationSpecSet\x12\x41.google.cloud.datalabeling.v1beta1.DeleteAnnotationSpecSetRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v1beta1/{name=projects/*/annotationSpecSets/*}\xda\x41\x04name\x12\xe5\x01\n\x11\x43reateInstruction\x12;.google.cloud.datalabeling.v1beta1.CreateInstructionRequest\x1a\x1d.google.longrunning.Operation"t\x82\xd3\xe4\x93\x02.")/v1beta1/{parent=projects/*}/instructions:\x01*\xda\x41\x12parent,instruction\xca\x41(\n\x0bInstruction\x12\x19\x43reateInstructionMetadata\x12\xb4\x01\n\x0eGetInstruction\x12\x38.google.cloud.datalabeling.v1beta1.GetInstructionRequest\x1a..google.cloud.datalabeling.v1beta1.Instruction"8\x82\xd3\xe4\x93\x02+\x12)/v1beta1/{name=projects/*/instructions/*}\xda\x41\x04name\x12\xce\x01\n\x10ListInstructions\x12:.google.cloud.datalabeling.v1beta1.ListInstructionsRequest\x1a;.google.cloud.datalabeling.v1beta1.ListInstructionsResponse"A\x82\xd3\xe4\x93\x02+\x12)/v1beta1/{parent=projects/*}/instructions\xda\x41\rparent,filter\x12\xa2\x01\n\x11\x44\x65leteInstruction\x12;.google.cloud.datalabeling.v1beta1.DeleteInstructionRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02+*)/v1beta1/{name=projects/*/instructions/*}\xda\x41\x04name\x12\xbb\x01\n\rGetEvaluation\x12\x37.google.cloud.datalabeling.v1beta1.GetEvaluationRequest\x1a-.google.cloud.datalabeling.v1beta1.Evaluation"B\x82\xd3\xe4\x93\x02\x35\x12\x33/v1beta1/{name=projects/*/datasets/*/evaluations/*}\xda\x41\x04name\x12\xd7\x01\n\x11SearchEvaluations\x12;.google.cloud.datalabeling.v1beta1.SearchEvaluationsRequest\x1a<.google.cloud.datalabeling.v1beta1.SearchEvaluationsResponse"G\x82\xd3\xe4\x93\x02\x31\x12//v1beta1/{parent=projects/*}/evaluations:search\xda\x41\rparent,filter\x12\x88\x02\n\x18SearchExampleComparisons\x12\x42.google.cloud.datalabeling.v1beta1.SearchExampleComparisonsRequest\x1a\x43.google.cloud.datalabeling.v1beta1.SearchExampleComparisonsResponse"c\x82\xd3\xe4\x93\x02T"O/v1beta1/{parent=projects/*/datasets/*/evaluations/*}/exampleComparisons:search:\x01*\xda\x41\x06parent\x12\xcb\x01\n\x13\x43reateEvaluationJob\x12=.google.cloud.datalabeling.v1beta1.CreateEvaluationJobRequest\x1a\x30.google.cloud.datalabeling.v1beta1.EvaluationJob"C\x82\xd3\xe4\x93\x02\x30"+/v1beta1/{parent=projects/*}/evaluationJobs:\x01*\xda\x41\nparent,job\x12\xf7\x01\n\x13UpdateEvaluationJob\x12=.google.cloud.datalabeling.v1beta1.UpdateEvaluationJobRequest\x1a\x30.google.cloud.datalabeling.v1beta1.EvaluationJob"o\x82\xd3\xe4\x93\x02L2:/v1beta1/{evaluation_job.name=projects/*/evaluationJobs/*}:\x0e\x65valuation_job\xda\x41\x1a\x65valuation_job,update_mask\x12\xbc\x01\n\x10GetEvaluationJob\x12:.google.cloud.datalabeling.v1beta1.GetEvaluationJobRequest\x1a\x30.google.cloud.datalabeling.v1beta1.EvaluationJob":\x82\xd3\xe4\x93\x02-\x12+/v1beta1/{name=projects/*/evaluationJobs/*}\xda\x41\x04name\x12\xaf\x01\n\x12PauseEvaluationJob\x12<.google.cloud.datalabeling.v1beta1.PauseEvaluationJobRequest\x1a\x16.google.protobuf.Empty"C\x82\xd3\xe4\x93\x02\x36"1/v1beta1/{name=projects/*/evaluationJobs/*}:pause:\x01*\xda\x41\x04name\x12\xb2\x01\n\x13ResumeEvaluationJob\x12=.google.cloud.datalabeling.v1beta1.ResumeEvaluationJobRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02\x37"2/v1beta1/{name=projects/*/evaluationJobs/*}:resume:\x01*\xda\x41\x04name\x12\xa8\x01\n\x13\x44\x65leteEvaluationJob\x12=.google.cloud.datalabeling.v1beta1.DeleteEvaluationJobRequest\x1a\x16.google.protobuf.Empty":\x82\xd3\xe4\x93\x02-*+/v1beta1/{name=projects/*/evaluationJobs/*}\xda\x41\x04name\x12\xd6\x01\n\x12ListEvaluationJobs\x12<.google.cloud.datalabeling.v1beta1.ListEvaluationJobsRequest\x1a=.google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse"C\x82\xd3\xe4\x93\x02-\x12+/v1beta1/{parent=projects/*}/evaluationJobs\xda\x41\rparent,filter\x1aO\xca\x41\x1b\x64\x61talabeling.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBx\n%com.google.cloud.datalabeling.v1beta1P\x01ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabelingb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_instruction__pb2.DESCRIPTOR, - google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - ], -) - - -_LABELIMAGEREQUEST_FEATURE = _descriptor.EnumDescriptor( - name="Feature", - full_name="google.cloud.datalabeling.v1beta1.LabelImageRequest.Feature", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="FEATURE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CLASSIFICATION", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="BOUNDING_BOX", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ORIENTED_BOUNDING_BOX", - index=3, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="BOUNDING_POLY", - index=4, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="POLYLINE", - index=5, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SEGMENTATION", - index=6, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3223, - serialized_end=3373, -) -_sym_db.RegisterEnumDescriptor(_LABELIMAGEREQUEST_FEATURE) - -_LABELVIDEOREQUEST_FEATURE = _descriptor.EnumDescriptor( - name="Feature", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoRequest.Feature", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="FEATURE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CLASSIFICATION", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="OBJECT_DETECTION", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="OBJECT_TRACKING", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="EVENT", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4002, - serialized_end=4110, -) -_sym_db.RegisterEnumDescriptor(_LABELVIDEOREQUEST_FEATURE) - -_LABELTEXTREQUEST_FEATURE = _descriptor.EnumDescriptor( - name="Feature", - full_name="google.cloud.datalabeling.v1beta1.LabelTextRequest.Feature", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="FEATURE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TEXT_CLASSIFICATION", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TEXT_ENTITY_EXTRACTION", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4583, - serialized_end=4670, -) -_sym_db.RegisterEnumDescriptor(_LABELTEXTREQUEST_FEATURE) - - -_CREATEDATASETREQUEST = _descriptor.Descriptor( - name="CreateDatasetRequest", - full_name="google.cloud.datalabeling.v1beta1.CreateDatasetRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.CreateDatasetRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dataset", - full_name="google.cloud.datalabeling.v1beta1.CreateDatasetRequest.dataset", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=694, - serialized_end=851, -) - - -_GETDATASETREQUEST = _descriptor.Descriptor( - name="GetDatasetRequest", - full_name="google.cloud.datalabeling.v1beta1.GetDatasetRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.GetDatasetRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#datalabeling.googleapis.com/Dataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=853, - serialized_end=931, -) - - -_LISTDATASETSREQUEST = _descriptor.Descriptor( - name="ListDatasetsRequest", - full_name="google.cloud.datalabeling.v1beta1.ListDatasetsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.ListDatasetsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.datalabeling.v1beta1.ListDatasetsRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.datalabeling.v1beta1.ListDatasetsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.datalabeling.v1beta1.ListDatasetsRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=934, - serialized_end=1094, -) - - -_LISTDATASETSRESPONSE = _descriptor.Descriptor( - name="ListDatasetsResponse", - full_name="google.cloud.datalabeling.v1beta1.ListDatasetsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="datasets", - full_name="google.cloud.datalabeling.v1beta1.ListDatasetsResponse.datasets", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.datalabeling.v1beta1.ListDatasetsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1096, - serialized_end=1205, -) - - -_DELETEDATASETREQUEST = _descriptor.Descriptor( - name="DeleteDatasetRequest", - full_name="google.cloud.datalabeling.v1beta1.DeleteDatasetRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.DeleteDatasetRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#datalabeling.googleapis.com/Dataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1207, - serialized_end=1288, -) - - -_IMPORTDATAREQUEST = _descriptor.Descriptor( - name="ImportDataRequest", - full_name="google.cloud.datalabeling.v1beta1.ImportDataRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.ImportDataRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#datalabeling.googleapis.com/Dataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="input_config", - full_name="google.cloud.datalabeling.v1beta1.ImportDataRequest.input_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="user_email_address", - full_name="google.cloud.datalabeling.v1beta1.ImportDataRequest.user_email_address", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1291, - serialized_end=1472, -) - - -_EXPORTDATAREQUEST = _descriptor.Descriptor( - name="ExportDataRequest", - full_name="google.cloud.datalabeling.v1beta1.ExportDataRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.ExportDataRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#datalabeling.googleapis.com/Dataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotated_dataset", - full_name="google.cloud.datalabeling.v1beta1.ExportDataRequest.annotated_dataset", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A.\n,datalabeling.googleapis.com/AnnotatedDataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.datalabeling.v1beta1.ExportDataRequest.filter", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="output_config", - full_name="google.cloud.datalabeling.v1beta1.ExportDataRequest.output_config", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="user_email_address", - full_name="google.cloud.datalabeling.v1beta1.ExportDataRequest.user_email_address", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1475, - serialized_end=1760, -) - - -_GETDATAITEMREQUEST = _descriptor.Descriptor( - name="GetDataItemRequest", - full_name="google.cloud.datalabeling.v1beta1.GetDataItemRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.GetDataItemRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A&\n$datalabeling.googleapis.com/DataItem", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1762, - serialized_end=1842, -) - - -_LISTDATAITEMSREQUEST = _descriptor.Descriptor( - name="ListDataItemsRequest", - full_name="google.cloud.datalabeling.v1beta1.ListDataItemsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.ListDataItemsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#datalabeling.googleapis.com/Dataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.datalabeling.v1beta1.ListDataItemsRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.datalabeling.v1beta1.ListDataItemsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.datalabeling.v1beta1.ListDataItemsRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1845, - serialized_end=1998, -) - - -_LISTDATAITEMSRESPONSE = _descriptor.Descriptor( - name="ListDataItemsResponse", - full_name="google.cloud.datalabeling.v1beta1.ListDataItemsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="data_items", - full_name="google.cloud.datalabeling.v1beta1.ListDataItemsResponse.data_items", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.datalabeling.v1beta1.ListDataItemsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2000, - serialized_end=2113, -) - - -_GETANNOTATEDDATASETREQUEST = _descriptor.Descriptor( - name="GetAnnotatedDatasetRequest", - full_name="google.cloud.datalabeling.v1beta1.GetAnnotatedDatasetRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.GetAnnotatedDatasetRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A.\n,datalabeling.googleapis.com/AnnotatedDataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2115, - serialized_end=2211, -) - - -_LISTANNOTATEDDATASETSREQUEST = _descriptor.Descriptor( - name="ListAnnotatedDatasetsRequest", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#datalabeling.googleapis.com/Dataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2214, - serialized_end=2375, -) - - -_LISTANNOTATEDDATASETSRESPONSE = _descriptor.Descriptor( - name="ListAnnotatedDatasetsResponse", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotated_datasets", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsResponse.annotated_datasets", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2378, - serialized_end=2515, -) - - -_DELETEANNOTATEDDATASETREQUEST = _descriptor.Descriptor( - name="DeleteAnnotatedDatasetRequest", - full_name="google.cloud.datalabeling.v1beta1.DeleteAnnotatedDatasetRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.DeleteAnnotatedDatasetRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A.\n,datalabeling.googleapis.com/AnnotatedDataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2517, - serialized_end=2616, -) - - -_LABELIMAGEREQUEST = _descriptor.Descriptor( - name="LabelImageRequest", - full_name="google.cloud.datalabeling.v1beta1.LabelImageRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="image_classification_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImageRequest.image_classification_config", - index=0, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="bounding_poly_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImageRequest.bounding_poly_config", - index=1, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="polyline_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImageRequest.polyline_config", - index=2, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="segmentation_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImageRequest.segmentation_config", - index=3, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.LabelImageRequest.parent", - index=4, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#datalabeling.googleapis.com/Dataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImageRequest.basic_config", - index=5, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="feature", - full_name="google.cloud.datalabeling.v1beta1.LabelImageRequest.feature", - index=6, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LABELIMAGEREQUEST_FEATURE,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="request_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImageRequest.request_config", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=2619, - serialized_end=3391, -) - - -_LABELVIDEOREQUEST = _descriptor.Descriptor( - name="LabelVideoRequest", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="video_classification_config", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoRequest.video_classification_config", - index=0, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="object_detection_config", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoRequest.object_detection_config", - index=1, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="object_tracking_config", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoRequest.object_tracking_config", - index=2, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="event_config", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoRequest.event_config", - index=3, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoRequest.parent", - index=4, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#datalabeling.googleapis.com/Dataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoRequest.basic_config", - index=5, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="feature", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoRequest.feature", - index=6, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LABELVIDEOREQUEST_FEATURE,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="request_config", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoRequest.request_config", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=3394, - serialized_end=4128, -) - - -_LABELTEXTREQUEST = _descriptor.Descriptor( - name="LabelTextRequest", - full_name="google.cloud.datalabeling.v1beta1.LabelTextRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="text_classification_config", - full_name="google.cloud.datalabeling.v1beta1.LabelTextRequest.text_classification_config", - index=0, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="text_entity_extraction_config", - full_name="google.cloud.datalabeling.v1beta1.LabelTextRequest.text_entity_extraction_config", - index=1, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.LabelTextRequest.parent", - index=2, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#datalabeling.googleapis.com/Dataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelTextRequest.basic_config", - index=3, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="feature", - full_name="google.cloud.datalabeling.v1beta1.LabelTextRequest.feature", - index=4, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LABELTEXTREQUEST_FEATURE,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="request_config", - full_name="google.cloud.datalabeling.v1beta1.LabelTextRequest.request_config", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=4131, - serialized_end=4688, -) - - -_GETEXAMPLEREQUEST = _descriptor.Descriptor( - name="GetExampleRequest", - full_name="google.cloud.datalabeling.v1beta1.GetExampleRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.GetExampleRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#datalabeling.googleapis.com/Example", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.datalabeling.v1beta1.GetExampleRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4690, - serialized_end=4789, -) - - -_LISTEXAMPLESREQUEST = _descriptor.Descriptor( - name="ListExamplesRequest", - full_name="google.cloud.datalabeling.v1beta1.ListExamplesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.ListExamplesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A.\n,datalabeling.googleapis.com/AnnotatedDataset", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.datalabeling.v1beta1.ListExamplesRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.datalabeling.v1beta1.ListExamplesRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.datalabeling.v1beta1.ListExamplesRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4792, - serialized_end=4953, -) - - -_LISTEXAMPLESRESPONSE = _descriptor.Descriptor( - name="ListExamplesResponse", - full_name="google.cloud.datalabeling.v1beta1.ListExamplesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="examples", - full_name="google.cloud.datalabeling.v1beta1.ListExamplesResponse.examples", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.datalabeling.v1beta1.ListExamplesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4955, - serialized_end=5064, -) - - -_CREATEANNOTATIONSPECSETREQUEST = _descriptor.Descriptor( - name="CreateAnnotationSpecSetRequest", - full_name="google.cloud.datalabeling.v1beta1.CreateAnnotationSpecSetRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.CreateAnnotationSpecSetRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_spec_set", - full_name="google.cloud.datalabeling.v1beta1.CreateAnnotationSpecSetRequest.annotation_spec_set", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5067, - serialized_end=5256, -) - - -_GETANNOTATIONSPECSETREQUEST = _descriptor.Descriptor( - name="GetAnnotationSpecSetRequest", - full_name="google.cloud.datalabeling.v1beta1.GetAnnotationSpecSetRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.GetAnnotationSpecSetRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A/\n-datalabeling.googleapis.com/AnnotationSpecSet", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5258, - serialized_end=5356, -) - - -_LISTANNOTATIONSPECSETSREQUEST = _descriptor.Descriptor( - name="ListAnnotationSpecSetsRequest", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5359, - serialized_end=5529, -) - - -_LISTANNOTATIONSPECSETSRESPONSE = _descriptor.Descriptor( - name="ListAnnotationSpecSetsResponse", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec_sets", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsResponse.annotation_spec_sets", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5532, - serialized_end=5673, -) - - -_DELETEANNOTATIONSPECSETREQUEST = _descriptor.Descriptor( - name="DeleteAnnotationSpecSetRequest", - full_name="google.cloud.datalabeling.v1beta1.DeleteAnnotationSpecSetRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.DeleteAnnotationSpecSetRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A/\n-datalabeling.googleapis.com/AnnotationSpecSet", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5675, - serialized_end=5776, -) - - -_CREATEINSTRUCTIONREQUEST = _descriptor.Descriptor( - name="CreateInstructionRequest", - full_name="google.cloud.datalabeling.v1beta1.CreateInstructionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.CreateInstructionRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="instruction", - full_name="google.cloud.datalabeling.v1beta1.CreateInstructionRequest.instruction", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5779, - serialized_end=5948, -) - - -_GETINSTRUCTIONREQUEST = _descriptor.Descriptor( - name="GetInstructionRequest", - full_name="google.cloud.datalabeling.v1beta1.GetInstructionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.GetInstructionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A)\n'datalabeling.googleapis.com/Instruction", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5950, - serialized_end=6036, -) - - -_DELETEINSTRUCTIONREQUEST = _descriptor.Descriptor( - name="DeleteInstructionRequest", - full_name="google.cloud.datalabeling.v1beta1.DeleteInstructionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.DeleteInstructionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A)\n'datalabeling.googleapis.com/Instruction", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6038, - serialized_end=6127, -) - - -_LISTINSTRUCTIONSREQUEST = _descriptor.Descriptor( - name="ListInstructionsRequest", - full_name="google.cloud.datalabeling.v1beta1.ListInstructionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.ListInstructionsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.datalabeling.v1beta1.ListInstructionsRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.datalabeling.v1beta1.ListInstructionsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.datalabeling.v1beta1.ListInstructionsRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6130, - serialized_end=6294, -) - - -_LISTINSTRUCTIONSRESPONSE = _descriptor.Descriptor( - name="ListInstructionsResponse", - full_name="google.cloud.datalabeling.v1beta1.ListInstructionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="instructions", - full_name="google.cloud.datalabeling.v1beta1.ListInstructionsResponse.instructions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.datalabeling.v1beta1.ListInstructionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6296, - serialized_end=6417, -) - - -_GETEVALUATIONREQUEST = _descriptor.Descriptor( - name="GetEvaluationRequest", - full_name="google.cloud.datalabeling.v1beta1.GetEvaluationRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.GetEvaluationRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A(\n&datalabeling.googleapis.com/Evaluation", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6419, - serialized_end=6503, -) - - -_SEARCHEVALUATIONSREQUEST = _descriptor.Descriptor( - name="SearchEvaluationsRequest", - full_name="google.cloud.datalabeling.v1beta1.SearchEvaluationsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.SearchEvaluationsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A(\n&datalabeling.googleapis.com/Evaluation", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.datalabeling.v1beta1.SearchEvaluationsRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.datalabeling.v1beta1.SearchEvaluationsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.datalabeling.v1beta1.SearchEvaluationsRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6506, - serialized_end=6666, -) - - -_SEARCHEVALUATIONSRESPONSE = _descriptor.Descriptor( - name="SearchEvaluationsResponse", - full_name="google.cloud.datalabeling.v1beta1.SearchEvaluationsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="evaluations", - full_name="google.cloud.datalabeling.v1beta1.SearchEvaluationsResponse.evaluations", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.datalabeling.v1beta1.SearchEvaluationsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6668, - serialized_end=6788, -) - - -_SEARCHEXAMPLECOMPARISONSREQUEST = _descriptor.Descriptor( - name="SearchExampleComparisonsRequest", - full_name="google.cloud.datalabeling.v1beta1.SearchExampleComparisonsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.SearchExampleComparisonsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A(\n&datalabeling.googleapis.com/Evaluation", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.datalabeling.v1beta1.SearchExampleComparisonsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.datalabeling.v1beta1.SearchExampleComparisonsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6791, - serialized_end=6937, -) - - -_SEARCHEXAMPLECOMPARISONSRESPONSE_EXAMPLECOMPARISON = _descriptor.Descriptor( - name="ExampleComparison", - full_name="google.cloud.datalabeling.v1beta1.SearchExampleComparisonsResponse.ExampleComparison", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="ground_truth_example", - full_name="google.cloud.datalabeling.v1beta1.SearchExampleComparisonsResponse.ExampleComparison.ground_truth_example", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="model_created_examples", - full_name="google.cloud.datalabeling.v1beta1.SearchExampleComparisonsResponse.ExampleComparison.model_created_examples", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7118, - serialized_end=7287, -) - -_SEARCHEXAMPLECOMPARISONSRESPONSE = _descriptor.Descriptor( - name="SearchExampleComparisonsResponse", - full_name="google.cloud.datalabeling.v1beta1.SearchExampleComparisonsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="example_comparisons", - full_name="google.cloud.datalabeling.v1beta1.SearchExampleComparisonsResponse.example_comparisons", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.datalabeling.v1beta1.SearchExampleComparisonsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_SEARCHEXAMPLECOMPARISONSRESPONSE_EXAMPLECOMPARISON,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6940, - serialized_end=7287, -) - - -_CREATEEVALUATIONJOBREQUEST = _descriptor.Descriptor( - name="CreateEvaluationJobRequest", - full_name="google.cloud.datalabeling.v1beta1.CreateEvaluationJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.CreateEvaluationJobRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="job", - full_name="google.cloud.datalabeling.v1beta1.CreateEvaluationJobRequest.job", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7290, - serialized_end=7455, -) - - -_UPDATEEVALUATIONJOBREQUEST = _descriptor.Descriptor( - name="UpdateEvaluationJobRequest", - full_name="google.cloud.datalabeling.v1beta1.UpdateEvaluationJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="evaluation_job", - full_name="google.cloud.datalabeling.v1beta1.UpdateEvaluationJobRequest.evaluation_job", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.datalabeling.v1beta1.UpdateEvaluationJobRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7458, - serialized_end=7619, -) - - -_GETEVALUATIONJOBREQUEST = _descriptor.Descriptor( - name="GetEvaluationJobRequest", - full_name="google.cloud.datalabeling.v1beta1.GetEvaluationJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.GetEvaluationJobRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A+\n)datalabeling.googleapis.com/EvaluationJob", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7621, - serialized_end=7711, -) - - -_PAUSEEVALUATIONJOBREQUEST = _descriptor.Descriptor( - name="PauseEvaluationJobRequest", - full_name="google.cloud.datalabeling.v1beta1.PauseEvaluationJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.PauseEvaluationJobRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A+\n)datalabeling.googleapis.com/EvaluationJob", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7713, - serialized_end=7805, -) - - -_RESUMEEVALUATIONJOBREQUEST = _descriptor.Descriptor( - name="ResumeEvaluationJobRequest", - full_name="google.cloud.datalabeling.v1beta1.ResumeEvaluationJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.ResumeEvaluationJobRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A+\n)datalabeling.googleapis.com/EvaluationJob", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7807, - serialized_end=7900, -) - - -_DELETEEVALUATIONJOBREQUEST = _descriptor.Descriptor( - name="DeleteEvaluationJobRequest", - full_name="google.cloud.datalabeling.v1beta1.DeleteEvaluationJobRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.DeleteEvaluationJobRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A+\n)datalabeling.googleapis.com/EvaluationJob", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7902, - serialized_end=7995, -) - - -_LISTEVALUATIONJOBSREQUEST = _descriptor.Descriptor( - name="ListEvaluationJobsRequest", - full_name="google.cloud.datalabeling.v1beta1.ListEvaluationJobsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.datalabeling.v1beta1.ListEvaluationJobsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.cloud.datalabeling.v1beta1.ListEvaluationJobsRequest.filter", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.datalabeling.v1beta1.ListEvaluationJobsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.datalabeling.v1beta1.ListEvaluationJobsRequest.page_token", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7998, - serialized_end=8164, -) - - -_LISTEVALUATIONJOBSRESPONSE = _descriptor.Descriptor( - name="ListEvaluationJobsResponse", - full_name="google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="evaluation_jobs", - full_name="google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse.evaluation_jobs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=8167, - serialized_end=8295, -) - -_CREATEDATASETREQUEST.fields_by_name[ - "dataset" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._DATASET -) -_LISTDATASETSRESPONSE.fields_by_name[ - "datasets" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._DATASET -) -_IMPORTDATAREQUEST.fields_by_name[ - "input_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._INPUTCONFIG -) -_EXPORTDATAREQUEST.fields_by_name[ - "output_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._OUTPUTCONFIG -) -_LISTDATAITEMSRESPONSE.fields_by_name[ - "data_items" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._DATAITEM -) -_LISTANNOTATEDDATASETSRESPONSE.fields_by_name[ - "annotated_datasets" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._ANNOTATEDDATASET -) -_LABELIMAGEREQUEST.fields_by_name[ - "image_classification_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._IMAGECLASSIFICATIONCONFIG -) -_LABELIMAGEREQUEST.fields_by_name[ - "bounding_poly_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._BOUNDINGPOLYCONFIG -) -_LABELIMAGEREQUEST.fields_by_name[ - "polyline_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._POLYLINECONFIG -) -_LABELIMAGEREQUEST.fields_by_name[ - "segmentation_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._SEGMENTATIONCONFIG -) -_LABELIMAGEREQUEST.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELIMAGEREQUEST.fields_by_name["feature"].enum_type = _LABELIMAGEREQUEST_FEATURE -_LABELIMAGEREQUEST_FEATURE.containing_type = _LABELIMAGEREQUEST -_LABELIMAGEREQUEST.oneofs_by_name["request_config"].fields.append( - _LABELIMAGEREQUEST.fields_by_name["image_classification_config"] -) -_LABELIMAGEREQUEST.fields_by_name[ - "image_classification_config" -].containing_oneof = _LABELIMAGEREQUEST.oneofs_by_name["request_config"] -_LABELIMAGEREQUEST.oneofs_by_name["request_config"].fields.append( - _LABELIMAGEREQUEST.fields_by_name["bounding_poly_config"] -) -_LABELIMAGEREQUEST.fields_by_name[ - "bounding_poly_config" -].containing_oneof = _LABELIMAGEREQUEST.oneofs_by_name["request_config"] -_LABELIMAGEREQUEST.oneofs_by_name["request_config"].fields.append( - _LABELIMAGEREQUEST.fields_by_name["polyline_config"] -) -_LABELIMAGEREQUEST.fields_by_name[ - "polyline_config" -].containing_oneof = _LABELIMAGEREQUEST.oneofs_by_name["request_config"] -_LABELIMAGEREQUEST.oneofs_by_name["request_config"].fields.append( - _LABELIMAGEREQUEST.fields_by_name["segmentation_config"] -) -_LABELIMAGEREQUEST.fields_by_name[ - "segmentation_config" -].containing_oneof = _LABELIMAGEREQUEST.oneofs_by_name["request_config"] -_LABELVIDEOREQUEST.fields_by_name[ - "video_classification_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._VIDEOCLASSIFICATIONCONFIG -) -_LABELVIDEOREQUEST.fields_by_name[ - "object_detection_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._OBJECTDETECTIONCONFIG -) -_LABELVIDEOREQUEST.fields_by_name[ - "object_tracking_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._OBJECTTRACKINGCONFIG -) -_LABELVIDEOREQUEST.fields_by_name[ - "event_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._EVENTCONFIG -) -_LABELVIDEOREQUEST.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELVIDEOREQUEST.fields_by_name["feature"].enum_type = _LABELVIDEOREQUEST_FEATURE -_LABELVIDEOREQUEST_FEATURE.containing_type = _LABELVIDEOREQUEST -_LABELVIDEOREQUEST.oneofs_by_name["request_config"].fields.append( - _LABELVIDEOREQUEST.fields_by_name["video_classification_config"] -) -_LABELVIDEOREQUEST.fields_by_name[ - "video_classification_config" -].containing_oneof = _LABELVIDEOREQUEST.oneofs_by_name["request_config"] -_LABELVIDEOREQUEST.oneofs_by_name["request_config"].fields.append( - _LABELVIDEOREQUEST.fields_by_name["object_detection_config"] -) -_LABELVIDEOREQUEST.fields_by_name[ - "object_detection_config" -].containing_oneof = _LABELVIDEOREQUEST.oneofs_by_name["request_config"] -_LABELVIDEOREQUEST.oneofs_by_name["request_config"].fields.append( - _LABELVIDEOREQUEST.fields_by_name["object_tracking_config"] -) -_LABELVIDEOREQUEST.fields_by_name[ - "object_tracking_config" -].containing_oneof = _LABELVIDEOREQUEST.oneofs_by_name["request_config"] -_LABELVIDEOREQUEST.oneofs_by_name["request_config"].fields.append( - _LABELVIDEOREQUEST.fields_by_name["event_config"] -) -_LABELVIDEOREQUEST.fields_by_name[ - "event_config" -].containing_oneof = _LABELVIDEOREQUEST.oneofs_by_name["request_config"] -_LABELTEXTREQUEST.fields_by_name[ - "text_classification_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._TEXTCLASSIFICATIONCONFIG -) -_LABELTEXTREQUEST.fields_by_name[ - "text_entity_extraction_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._TEXTENTITYEXTRACTIONCONFIG -) -_LABELTEXTREQUEST.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELTEXTREQUEST.fields_by_name["feature"].enum_type = _LABELTEXTREQUEST_FEATURE -_LABELTEXTREQUEST_FEATURE.containing_type = _LABELTEXTREQUEST -_LABELTEXTREQUEST.oneofs_by_name["request_config"].fields.append( - _LABELTEXTREQUEST.fields_by_name["text_classification_config"] -) -_LABELTEXTREQUEST.fields_by_name[ - "text_classification_config" -].containing_oneof = _LABELTEXTREQUEST.oneofs_by_name["request_config"] -_LABELTEXTREQUEST.oneofs_by_name["request_config"].fields.append( - _LABELTEXTREQUEST.fields_by_name["text_entity_extraction_config"] -) -_LABELTEXTREQUEST.fields_by_name[ - "text_entity_extraction_config" -].containing_oneof = _LABELTEXTREQUEST.oneofs_by_name["request_config"] -_LISTEXAMPLESRESPONSE.fields_by_name[ - "examples" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._EXAMPLE -) -_CREATEANNOTATIONSPECSETREQUEST.fields_by_name[ - "annotation_spec_set" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPECSET -) -_LISTANNOTATIONSPECSETSRESPONSE.fields_by_name[ - "annotation_spec_sets" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPECSET -) -_CREATEINSTRUCTIONREQUEST.fields_by_name[ - "instruction" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_instruction__pb2._INSTRUCTION -) -_LISTINSTRUCTIONSRESPONSE.fields_by_name[ - "instructions" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_instruction__pb2._INSTRUCTION -) -_SEARCHEVALUATIONSRESPONSE.fields_by_name[ - "evaluations" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__pb2._EVALUATION -) -_SEARCHEXAMPLECOMPARISONSRESPONSE_EXAMPLECOMPARISON.fields_by_name[ - "ground_truth_example" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._EXAMPLE -) -_SEARCHEXAMPLECOMPARISONSRESPONSE_EXAMPLECOMPARISON.fields_by_name[ - "model_created_examples" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._EXAMPLE -) -_SEARCHEXAMPLECOMPARISONSRESPONSE_EXAMPLECOMPARISON.containing_type = ( - _SEARCHEXAMPLECOMPARISONSRESPONSE -) -_SEARCHEXAMPLECOMPARISONSRESPONSE.fields_by_name[ - "example_comparisons" -].message_type = _SEARCHEXAMPLECOMPARISONSRESPONSE_EXAMPLECOMPARISON -_CREATEEVALUATIONJOBREQUEST.fields_by_name[ - "job" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2._EVALUATIONJOB -) -_UPDATEEVALUATIONJOBREQUEST.fields_by_name[ - "evaluation_job" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2._EVALUATIONJOB -) -_UPDATEEVALUATIONJOBREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTEVALUATIONJOBSRESPONSE.fields_by_name[ - "evaluation_jobs" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2._EVALUATIONJOB -) -DESCRIPTOR.message_types_by_name["CreateDatasetRequest"] = _CREATEDATASETREQUEST -DESCRIPTOR.message_types_by_name["GetDatasetRequest"] = _GETDATASETREQUEST -DESCRIPTOR.message_types_by_name["ListDatasetsRequest"] = _LISTDATASETSREQUEST -DESCRIPTOR.message_types_by_name["ListDatasetsResponse"] = _LISTDATASETSRESPONSE -DESCRIPTOR.message_types_by_name["DeleteDatasetRequest"] = _DELETEDATASETREQUEST -DESCRIPTOR.message_types_by_name["ImportDataRequest"] = _IMPORTDATAREQUEST -DESCRIPTOR.message_types_by_name["ExportDataRequest"] = _EXPORTDATAREQUEST -DESCRIPTOR.message_types_by_name["GetDataItemRequest"] = _GETDATAITEMREQUEST -DESCRIPTOR.message_types_by_name["ListDataItemsRequest"] = _LISTDATAITEMSREQUEST -DESCRIPTOR.message_types_by_name["ListDataItemsResponse"] = _LISTDATAITEMSRESPONSE -DESCRIPTOR.message_types_by_name[ - "GetAnnotatedDatasetRequest" -] = _GETANNOTATEDDATASETREQUEST -DESCRIPTOR.message_types_by_name[ - "ListAnnotatedDatasetsRequest" -] = _LISTANNOTATEDDATASETSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListAnnotatedDatasetsResponse" -] = _LISTANNOTATEDDATASETSRESPONSE -DESCRIPTOR.message_types_by_name[ - "DeleteAnnotatedDatasetRequest" -] = _DELETEANNOTATEDDATASETREQUEST -DESCRIPTOR.message_types_by_name["LabelImageRequest"] = _LABELIMAGEREQUEST -DESCRIPTOR.message_types_by_name["LabelVideoRequest"] = _LABELVIDEOREQUEST -DESCRIPTOR.message_types_by_name["LabelTextRequest"] = _LABELTEXTREQUEST -DESCRIPTOR.message_types_by_name["GetExampleRequest"] = _GETEXAMPLEREQUEST -DESCRIPTOR.message_types_by_name["ListExamplesRequest"] = _LISTEXAMPLESREQUEST -DESCRIPTOR.message_types_by_name["ListExamplesResponse"] = _LISTEXAMPLESRESPONSE -DESCRIPTOR.message_types_by_name[ - "CreateAnnotationSpecSetRequest" -] = _CREATEANNOTATIONSPECSETREQUEST -DESCRIPTOR.message_types_by_name[ - "GetAnnotationSpecSetRequest" -] = _GETANNOTATIONSPECSETREQUEST -DESCRIPTOR.message_types_by_name[ - "ListAnnotationSpecSetsRequest" -] = _LISTANNOTATIONSPECSETSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListAnnotationSpecSetsResponse" -] = _LISTANNOTATIONSPECSETSRESPONSE -DESCRIPTOR.message_types_by_name[ - "DeleteAnnotationSpecSetRequest" -] = _DELETEANNOTATIONSPECSETREQUEST -DESCRIPTOR.message_types_by_name["CreateInstructionRequest"] = _CREATEINSTRUCTIONREQUEST -DESCRIPTOR.message_types_by_name["GetInstructionRequest"] = _GETINSTRUCTIONREQUEST -DESCRIPTOR.message_types_by_name["DeleteInstructionRequest"] = _DELETEINSTRUCTIONREQUEST -DESCRIPTOR.message_types_by_name["ListInstructionsRequest"] = _LISTINSTRUCTIONSREQUEST -DESCRIPTOR.message_types_by_name["ListInstructionsResponse"] = _LISTINSTRUCTIONSRESPONSE -DESCRIPTOR.message_types_by_name["GetEvaluationRequest"] = _GETEVALUATIONREQUEST -DESCRIPTOR.message_types_by_name["SearchEvaluationsRequest"] = _SEARCHEVALUATIONSREQUEST -DESCRIPTOR.message_types_by_name[ - "SearchEvaluationsResponse" -] = _SEARCHEVALUATIONSRESPONSE -DESCRIPTOR.message_types_by_name[ - "SearchExampleComparisonsRequest" -] = _SEARCHEXAMPLECOMPARISONSREQUEST -DESCRIPTOR.message_types_by_name[ - "SearchExampleComparisonsResponse" -] = _SEARCHEXAMPLECOMPARISONSRESPONSE -DESCRIPTOR.message_types_by_name[ - "CreateEvaluationJobRequest" -] = _CREATEEVALUATIONJOBREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateEvaluationJobRequest" -] = _UPDATEEVALUATIONJOBREQUEST -DESCRIPTOR.message_types_by_name["GetEvaluationJobRequest"] = _GETEVALUATIONJOBREQUEST -DESCRIPTOR.message_types_by_name[ - "PauseEvaluationJobRequest" -] = _PAUSEEVALUATIONJOBREQUEST -DESCRIPTOR.message_types_by_name[ - "ResumeEvaluationJobRequest" -] = _RESUMEEVALUATIONJOBREQUEST -DESCRIPTOR.message_types_by_name[ - "DeleteEvaluationJobRequest" -] = _DELETEEVALUATIONJOBREQUEST -DESCRIPTOR.message_types_by_name[ - "ListEvaluationJobsRequest" -] = _LISTEVALUATIONJOBSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListEvaluationJobsResponse" -] = _LISTEVALUATIONJOBSRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -CreateDatasetRequest = _reflection.GeneratedProtocolMessageType( - "CreateDatasetRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEDATASETREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for CreateDataset. - - Attributes: - parent: - Required. Dataset resource parent, format: - projects/{project_id} - dataset: - Required. The dataset to be created. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.CreateDatasetRequest) - }, -) -_sym_db.RegisterMessage(CreateDatasetRequest) - -GetDatasetRequest = _reflection.GeneratedProtocolMessageType( - "GetDatasetRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETDATASETREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for GetDataSet. - - Attributes: - name: - Required. Dataset resource name, format: - projects/{project_id}/datasets/{dataset_id} - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.GetDatasetRequest) - }, -) -_sym_db.RegisterMessage(GetDatasetRequest) - -ListDatasetsRequest = _reflection.GeneratedProtocolMessageType( - "ListDatasetsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTDATASETSREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for ListDataset. - - Attributes: - parent: - Required. Dataset resource parent, format: - projects/{project_id} - filter: - Optional. Filter on dataset is not supported at this moment. - page_size: - Optional. Requested page size. Server may return fewer results - than requested. Default value is 100. - page_token: - Optional. A token identifying a page of results for the server - to return. Typically obtained by [ListDatasetsResponse.next_pa - ge_token][google.cloud.datalabeling.v1beta1.ListDatasetsRespon - se.next_page_token] of the previous - [DataLabelingService.ListDatasets] call. Returns the first - page if empty. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListDatasetsRequest) - }, -) -_sym_db.RegisterMessage(ListDatasetsRequest) - -ListDatasetsResponse = _reflection.GeneratedProtocolMessageType( - "ListDatasetsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTDATASETSRESPONSE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Results of listing datasets within a project. - - Attributes: - datasets: - The list of datasets to return. - next_page_token: - A token to retrieve next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListDatasetsResponse) - }, -) -_sym_db.RegisterMessage(ListDatasetsResponse) - -DeleteDatasetRequest = _reflection.GeneratedProtocolMessageType( - "DeleteDatasetRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEDATASETREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for DeleteDataset. - - Attributes: - name: - Required. Dataset resource name, format: - projects/{project_id}/datasets/{dataset_id} - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.DeleteDatasetRequest) - }, -) -_sym_db.RegisterMessage(DeleteDatasetRequest) - -ImportDataRequest = _reflection.GeneratedProtocolMessageType( - "ImportDataRequest", - (_message.Message,), - { - "DESCRIPTOR": _IMPORTDATAREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for ImportData API. - - Attributes: - name: - Required. Dataset resource name, format: - projects/{project_id}/datasets/{dataset_id} - input_config: - Required. Specify the input source of the data. - user_email_address: - Email of the user who started the import task and should be - notified by email. If empty no notification will be sent. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ImportDataRequest) - }, -) -_sym_db.RegisterMessage(ImportDataRequest) - -ExportDataRequest = _reflection.GeneratedProtocolMessageType( - "ExportDataRequest", - (_message.Message,), - { - "DESCRIPTOR": _EXPORTDATAREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for ExportData API. - - Attributes: - name: - Required. Dataset resource name, format: - projects/{project_id}/datasets/{dataset_id} - annotated_dataset: - Required. Annotated dataset resource name. DataItem in Dataset - and their annotations in specified annotated dataset will be - exported. It’s in format of - projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ - {annotated_dataset_id} - filter: - Optional. Filter is not supported at this moment. - output_config: - Required. Specify the output destination. - user_email_address: - Email of the user who started the export task and should be - notified by email. If empty no notification will be sent. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ExportDataRequest) - }, -) -_sym_db.RegisterMessage(ExportDataRequest) - -GetDataItemRequest = _reflection.GeneratedProtocolMessageType( - "GetDataItemRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETDATAITEMREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for GetDataItem. - - Attributes: - name: - Required. The name of the data item to get, format: projects/{ - project_id}/datasets/{dataset_id}/dataItems/{data_item_id} - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.GetDataItemRequest) - }, -) -_sym_db.RegisterMessage(GetDataItemRequest) - -ListDataItemsRequest = _reflection.GeneratedProtocolMessageType( - "ListDataItemsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTDATAITEMSREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for ListDataItems. - - Attributes: - parent: - Required. Name of the dataset to list data items, format: - projects/{project_id}/datasets/{dataset_id} - filter: - Optional. Filter is not supported at this moment. - page_size: - Optional. Requested page size. Server may return fewer results - than requested. Default value is 100. - page_token: - Optional. A token identifying a page of results for the server - to return. Typically obtained by [ListDataItemsResponse.next_p - age_token][google.cloud.datalabeling.v1beta1.ListDataItemsResp - onse.next_page_token] of the previous - [DataLabelingService.ListDataItems] call. Return first page if - empty. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListDataItemsRequest) - }, -) -_sym_db.RegisterMessage(ListDataItemsRequest) - -ListDataItemsResponse = _reflection.GeneratedProtocolMessageType( - "ListDataItemsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTDATAITEMSRESPONSE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Results of listing data items in a dataset. - - Attributes: - data_items: - The list of data items to return. - next_page_token: - A token to retrieve next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListDataItemsResponse) - }, -) -_sym_db.RegisterMessage(ListDataItemsResponse) - -GetAnnotatedDatasetRequest = _reflection.GeneratedProtocolMessageType( - "GetAnnotatedDatasetRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETANNOTATEDDATASETREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for GetAnnotatedDataset. - - Attributes: - name: - Required. Name of the annotated dataset to get, format: - projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ - {annotated_dataset_id} - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.GetAnnotatedDatasetRequest) - }, -) -_sym_db.RegisterMessage(GetAnnotatedDatasetRequest) - -ListAnnotatedDatasetsRequest = _reflection.GeneratedProtocolMessageType( - "ListAnnotatedDatasetsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTANNOTATEDDATASETSREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for ListAnnotatedDatasets. - - Attributes: - parent: - Required. Name of the dataset to list annotated datasets, - format: projects/{project_id}/datasets/{dataset_id} - filter: - Optional. Filter is not supported at this moment. - page_size: - Optional. Requested page size. Server may return fewer results - than requested. Default value is 100. - page_token: - Optional. A token identifying a page of results for the server - to return. Typically obtained by [ListAnnotatedDatasetsRespons - e.next_page_token][google.cloud.datalabeling.v1beta1.ListAnnot - atedDatasetsResponse.next_page_token] of the previous - [DataLabelingService.ListAnnotatedDatasets] call. Return first - page if empty. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsRequest) - }, -) -_sym_db.RegisterMessage(ListAnnotatedDatasetsRequest) - -ListAnnotatedDatasetsResponse = _reflection.GeneratedProtocolMessageType( - "ListAnnotatedDatasetsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTANNOTATEDDATASETSRESPONSE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Results of listing annotated datasets for a dataset. - - Attributes: - annotated_datasets: - The list of annotated datasets to return. - next_page_token: - A token to retrieve next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsResponse) - }, -) -_sym_db.RegisterMessage(ListAnnotatedDatasetsResponse) - -DeleteAnnotatedDatasetRequest = _reflection.GeneratedProtocolMessageType( - "DeleteAnnotatedDatasetRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEANNOTATEDDATASETREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for DeleteAnnotatedDataset. - - Attributes: - name: - Required. Name of the annotated dataset to delete, format: - projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ - {annotated_dataset_id} - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.DeleteAnnotatedDatasetRequest) - }, -) -_sym_db.RegisterMessage(DeleteAnnotatedDatasetRequest) - -LabelImageRequest = _reflection.GeneratedProtocolMessageType( - "LabelImageRequest", - (_message.Message,), - { - "DESCRIPTOR": _LABELIMAGEREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for starting an image labeling task. - - Attributes: - request_config: - Required. Config for labeling tasks. The type of request - config must match the selected feature. - image_classification_config: - Configuration for image classification task. One of - image_classification_config, bounding_poly_config, - polyline_config and segmentation_config are required. - bounding_poly_config: - Configuration for bounding box and bounding poly task. One of - image_classification_config, bounding_poly_config, - polyline_config and segmentation_config are required. - polyline_config: - Configuration for polyline task. One of - image_classification_config, bounding_poly_config, - polyline_config and segmentation_config are required. - segmentation_config: - Configuration for segmentation task. One of - image_classification_config, bounding_poly_config, - polyline_config and segmentation_config are required. - parent: - Required. Name of the dataset to request labeling task, - format: projects/{project_id}/datasets/{dataset_id} - basic_config: - Required. Basic human annotation config. - feature: - Required. The type of image labeling task. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelImageRequest) - }, -) -_sym_db.RegisterMessage(LabelImageRequest) - -LabelVideoRequest = _reflection.GeneratedProtocolMessageType( - "LabelVideoRequest", - (_message.Message,), - { - "DESCRIPTOR": _LABELVIDEOREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for LabelVideo. - - Attributes: - request_config: - Required. Config for labeling tasks. The type of request - config must match the selected feature. - video_classification_config: - Configuration for video classification task. One of - video_classification_config, object_detection_config, - object_tracking_config and event_config is required. - object_detection_config: - Configuration for video object detection task. One of - video_classification_config, object_detection_config, - object_tracking_config and event_config is required. - object_tracking_config: - Configuration for video object tracking task. One of - video_classification_config, object_detection_config, - object_tracking_config and event_config is required. - event_config: - Configuration for video event task. One of - video_classification_config, object_detection_config, - object_tracking_config and event_config is required. - parent: - Required. Name of the dataset to request labeling task, - format: projects/{project_id}/datasets/{dataset_id} - basic_config: - Required. Basic human annotation config. - feature: - Required. The type of video labeling task. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelVideoRequest) - }, -) -_sym_db.RegisterMessage(LabelVideoRequest) - -LabelTextRequest = _reflection.GeneratedProtocolMessageType( - "LabelTextRequest", - (_message.Message,), - { - "DESCRIPTOR": _LABELTEXTREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for LabelText. - - Attributes: - request_config: - Required. Config for labeling tasks. The type of request - config must match the selected feature. - text_classification_config: - Configuration for text classification task. One of - text_classification_config and text_entity_extraction_config - is required. - text_entity_extraction_config: - Configuration for entity extraction task. One of - text_classification_config and text_entity_extraction_config - is required. - parent: - Required. Name of the data set to request labeling task, - format: projects/{project_id}/datasets/{dataset_id} - basic_config: - Required. Basic human annotation config. - feature: - Required. The type of text labeling task. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelTextRequest) - }, -) -_sym_db.RegisterMessage(LabelTextRequest) - -GetExampleRequest = _reflection.GeneratedProtocolMessageType( - "GetExampleRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETEXAMPLEREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for GetExample - - Attributes: - name: - Required. Name of example, format: - projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ - {annotated_dataset_id}/examples/{example_id} - filter: - Optional. An expression for filtering Examples. Filter by - annotation_spec.display_name is supported. Format - “annotation_spec.display_name = {display_name}” - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.GetExampleRequest) - }, -) -_sym_db.RegisterMessage(GetExampleRequest) - -ListExamplesRequest = _reflection.GeneratedProtocolMessageType( - "ListExamplesRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTEXAMPLESREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for ListExamples. - - Attributes: - parent: - Required. Example resource parent. - filter: - Optional. An expression for filtering Examples. For annotated - datasets that have annotation spec set, filter by - annotation_spec.display_name is supported. Format - “annotation_spec.display_name = {display_name}” - page_size: - Optional. Requested page size. Server may return fewer results - than requested. Default value is 100. - page_token: - Optional. A token identifying a page of results for the server - to return. Typically obtained by [ListExamplesResponse.next_pa - ge_token][google.cloud.datalabeling.v1beta1.ListExamplesRespon - se.next_page_token] of the previous - [DataLabelingService.ListExamples] call. Return first page if - empty. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListExamplesRequest) - }, -) -_sym_db.RegisterMessage(ListExamplesRequest) - -ListExamplesResponse = _reflection.GeneratedProtocolMessageType( - "ListExamplesResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTEXAMPLESRESPONSE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Results of listing Examples in and annotated dataset. - - Attributes: - examples: - The list of examples to return. - next_page_token: - A token to retrieve next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListExamplesResponse) - }, -) -_sym_db.RegisterMessage(ListExamplesResponse) - -CreateAnnotationSpecSetRequest = _reflection.GeneratedProtocolMessageType( - "CreateAnnotationSpecSetRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEANNOTATIONSPECSETREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for CreateAnnotationSpecSet. - - Attributes: - parent: - Required. AnnotationSpecSet resource parent, format: - projects/{project_id} - annotation_spec_set: - Required. Annotation spec set to create. Annotation specs must - be included. Only one annotation spec will be accepted for - annotation specs with same display_name. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.CreateAnnotationSpecSetRequest) - }, -) -_sym_db.RegisterMessage(CreateAnnotationSpecSetRequest) - -GetAnnotationSpecSetRequest = _reflection.GeneratedProtocolMessageType( - "GetAnnotationSpecSetRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETANNOTATIONSPECSETREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for GetAnnotationSpecSet. - - Attributes: - name: - Required. AnnotationSpecSet resource name, format: projects/{p - roject_id}/annotationSpecSets/{annotation_spec_set_id} - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.GetAnnotationSpecSetRequest) - }, -) -_sym_db.RegisterMessage(GetAnnotationSpecSetRequest) - -ListAnnotationSpecSetsRequest = _reflection.GeneratedProtocolMessageType( - "ListAnnotationSpecSetsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTANNOTATIONSPECSETSREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for ListAnnotationSpecSets. - - Attributes: - parent: - Required. Parent of AnnotationSpecSet resource, format: - projects/{project_id} - filter: - Optional. Filter is not supported at this moment. - page_size: - Optional. Requested page size. Server may return fewer results - than requested. Default value is 100. - page_token: - Optional. A token identifying a page of results for the server - to return. Typically obtained by [ListAnnotationSpecSetsRespon - se.next_page_token][google.cloud.datalabeling.v1beta1.ListAnno - tationSpecSetsResponse.next_page_token] of the previous - [DataLabelingService.ListAnnotationSpecSets] call. Return - first page if empty. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsRequest) - }, -) -_sym_db.RegisterMessage(ListAnnotationSpecSetsRequest) - -ListAnnotationSpecSetsResponse = _reflection.GeneratedProtocolMessageType( - "ListAnnotationSpecSetsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTANNOTATIONSPECSETSRESPONSE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Results of listing annotation spec set under a project. - - Attributes: - annotation_spec_sets: - The list of annotation spec sets. - next_page_token: - A token to retrieve next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsResponse) - }, -) -_sym_db.RegisterMessage(ListAnnotationSpecSetsResponse) - -DeleteAnnotationSpecSetRequest = _reflection.GeneratedProtocolMessageType( - "DeleteAnnotationSpecSetRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEANNOTATIONSPECSETREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for DeleteAnnotationSpecSet. - - Attributes: - name: - Required. AnnotationSpec resource name, format: ``projects/{pr - oject_id}/annotationSpecSets/{annotation_spec_set_id}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.DeleteAnnotationSpecSetRequest) - }, -) -_sym_db.RegisterMessage(DeleteAnnotationSpecSetRequest) - -CreateInstructionRequest = _reflection.GeneratedProtocolMessageType( - "CreateInstructionRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEINSTRUCTIONREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for CreateInstruction. - - Attributes: - parent: - Required. Instruction resource parent, format: - projects/{project_id} - instruction: - Required. Instruction of how to perform the labeling task. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.CreateInstructionRequest) - }, -) -_sym_db.RegisterMessage(CreateInstructionRequest) - -GetInstructionRequest = _reflection.GeneratedProtocolMessageType( - "GetInstructionRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETINSTRUCTIONREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for GetInstruction. - - Attributes: - name: - Required. Instruction resource name, format: - projects/{project_id}/instructions/{instruction_id} - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.GetInstructionRequest) - }, -) -_sym_db.RegisterMessage(GetInstructionRequest) - -DeleteInstructionRequest = _reflection.GeneratedProtocolMessageType( - "DeleteInstructionRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEINSTRUCTIONREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for DeleteInstruction. - - Attributes: - name: - Required. Instruction resource name, format: - projects/{project_id}/instructions/{instruction_id} - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.DeleteInstructionRequest) - }, -) -_sym_db.RegisterMessage(DeleteInstructionRequest) - -ListInstructionsRequest = _reflection.GeneratedProtocolMessageType( - "ListInstructionsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTINSTRUCTIONSREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for ListInstructions. - - Attributes: - parent: - Required. Instruction resource parent, format: - projects/{project_id} - filter: - Optional. Filter is not supported at this moment. - page_size: - Optional. Requested page size. Server may return fewer results - than requested. Default value is 100. - page_token: - Optional. A token identifying a page of results for the server - to return. Typically obtained by [ListInstructionsResponse.nex - t_page_token][google.cloud.datalabeling.v1beta1.ListInstructio - nsResponse.next_page_token] of the previous - [DataLabelingService.ListInstructions] call. Return first page - if empty. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListInstructionsRequest) - }, -) -_sym_db.RegisterMessage(ListInstructionsRequest) - -ListInstructionsResponse = _reflection.GeneratedProtocolMessageType( - "ListInstructionsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTINSTRUCTIONSRESPONSE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Results of listing instructions under a project. - - Attributes: - instructions: - The list of Instructions to return. - next_page_token: - A token to retrieve next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListInstructionsResponse) - }, -) -_sym_db.RegisterMessage(ListInstructionsResponse) - -GetEvaluationRequest = _reflection.GeneratedProtocolMessageType( - "GetEvaluationRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETEVALUATIONREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for GetEvaluation. - - Attributes: - name: - Required. Name of the evaluation. Format: "projects/{project\_ - id}/datasets/{dataset_id}/evaluations/{evaluation_id}’ - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.GetEvaluationRequest) - }, -) -_sym_db.RegisterMessage(GetEvaluationRequest) - -SearchEvaluationsRequest = _reflection.GeneratedProtocolMessageType( - "SearchEvaluationsRequest", - (_message.Message,), - { - "DESCRIPTOR": _SEARCHEVALUATIONSREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for SearchEvaluation. - - Attributes: - parent: - Required. Evaluation search parent (project ID). Format: - “projects/{project_id}” - filter: - Optional. To search evaluations, you can filter by the - following: - evaluation\_job.evaluation_job_id (the last - part of [EvaluationJob.name][google.cloud.datalabeling.v1be - ta1.EvaluationJob.name]) - evaluation\_job.model_id (the - {model_name} portion of [EvaluationJob.modelVersion][google - .cloud.datalabeling.v1beta1.EvaluationJob.model_version]) - - evaluation\_job.evaluation_job_run_time_start (Minimum - threshold for the [evaluationJobRunTime][google.cloud.da - talabeling.v1beta1.Evaluation.evaluation_job_run_time] that - created the evaluation) - - evaluation\_job.evaluation_job_run_time_end (Maximum threshold - for the [evaluationJobRunTime][google.cloud.datalabeling - .v1beta1.Evaluation.evaluation_job_run_time] that created - the evaluation) - evaluation\_job.job_state ([EvaluationJo - b.state][google.cloud.datalabeling.v1beta1.EvaluationJob.state - ]) - annotation\_spec.display_name (the Evaluation contains a - metric for the annotation spec with this [displayName][g - oogle.cloud.datalabeling.v1beta1.AnnotationSpec.display_name]) - To filter by multiple critiera, use the ``AND`` operator or - the ``OR`` operator. The following examples shows a string - that filters by several critiera: - "evaluation\_job.evaluation_job_id = {evaluation_job_id} AND - evaluationjob.model_id = {model_name} AND - evaluationjob.evaluation_job_run_time_start = {timestamp_1} - AND evaluationjob.evaluation_job_run_time_end = {timestamp_2} - AND annotationspec.display_name = {display_name}" - page_size: - Optional. Requested page size. Server may return fewer results - than requested. Default value is 100. - page_token: - Optional. A token identifying a page of results for the server - to return. Typically obtained by the [nextPageToken][google.cl - oud.datalabeling.v1beta1.SearchEvaluationsResponse.next_page_t - oken] of the response to a previous search request. If you - don’t specify this field, the API call requests the first page - of the search. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.SearchEvaluationsRequest) - }, -) -_sym_db.RegisterMessage(SearchEvaluationsRequest) - -SearchEvaluationsResponse = _reflection.GeneratedProtocolMessageType( - "SearchEvaluationsResponse", - (_message.Message,), - { - "DESCRIPTOR": _SEARCHEVALUATIONSRESPONSE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Results of searching evaluations. - - Attributes: - evaluations: - The list of evaluations matching the search. - next_page_token: - A token to retrieve next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.SearchEvaluationsResponse) - }, -) -_sym_db.RegisterMessage(SearchEvaluationsResponse) - -SearchExampleComparisonsRequest = _reflection.GeneratedProtocolMessageType( - "SearchExampleComparisonsRequest", - (_message.Message,), - { - "DESCRIPTOR": _SEARCHEXAMPLECOMPARISONSREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message of SearchExampleComparisons. - - Attributes: - parent: - Required. Name of the - [Evaluation][google.cloud.datalabeling.v1beta1.Evaluation] - resource to search for example comparisons from. Format: “pro - jects/{project_id}/datasets/{dataset_id}/evaluations/{evaluati - on_id}” - page_size: - Optional. Requested page size. Server may return fewer results - than requested. Default value is 100. - page_token: - Optional. A token identifying a page of results for the server - to return. Typically obtained by the - [nextPageToken][SearchExampleComparisons.next_page_token] of - the response to a previous search rquest. If you don’t - specify this field, the API call requests the first page of - the search. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.SearchExampleComparisonsRequest) - }, -) -_sym_db.RegisterMessage(SearchExampleComparisonsRequest) - -SearchExampleComparisonsResponse = _reflection.GeneratedProtocolMessageType( - "SearchExampleComparisonsResponse", - (_message.Message,), - { - "ExampleComparison": _reflection.GeneratedProtocolMessageType( - "ExampleComparison", - (_message.Message,), - { - "DESCRIPTOR": _SEARCHEXAMPLECOMPARISONSRESPONSE_EXAMPLECOMPARISON, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Example comparisons comparing ground truth output and predictions for - a specific input. - - Attributes: - ground_truth_example: - The ground truth output for the input. - model_created_examples: - Predictions by the model for the input. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.SearchExampleComparisonsResponse.ExampleComparison) - }, - ), - "DESCRIPTOR": _SEARCHEXAMPLECOMPARISONSRESPONSE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Results of searching example comparisons. - - Attributes: - example_comparisons: - A list of example comparisons matching the search criteria. - next_page_token: - A token to retrieve next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.SearchExampleComparisonsResponse) - }, -) -_sym_db.RegisterMessage(SearchExampleComparisonsResponse) -_sym_db.RegisterMessage(SearchExampleComparisonsResponse.ExampleComparison) - -CreateEvaluationJobRequest = _reflection.GeneratedProtocolMessageType( - "CreateEvaluationJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEEVALUATIONJOBREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for CreateEvaluationJob. - - Attributes: - parent: - Required. Evaluation job resource parent. Format: - “projects/{project_id}” - job: - Required. The evaluation job to create. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.CreateEvaluationJobRequest) - }, -) -_sym_db.RegisterMessage(CreateEvaluationJobRequest) - -UpdateEvaluationJobRequest = _reflection.GeneratedProtocolMessageType( - "UpdateEvaluationJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATEEVALUATIONJOBREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for UpdateEvaluationJob. - - Attributes: - evaluation_job: - Required. Evaluation job that is going to be updated. - update_mask: - Optional. Mask for which fields to update. You can only - provide the following fields: - - ``evaluationJobConfig.humanAnnotationConfig.instruction`` - - ``evaluationJobConfig.exampleCount`` - - ``evaluationJobConfig.exampleSamplePercentage`` You can - provide more than one of these fields by separating them with - commas. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.UpdateEvaluationJobRequest) - }, -) -_sym_db.RegisterMessage(UpdateEvaluationJobRequest) - -GetEvaluationJobRequest = _reflection.GeneratedProtocolMessageType( - "GetEvaluationJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETEVALUATIONJOBREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for GetEvaluationJob. - - Attributes: - name: - Required. Name of the evaluation job. Format: - “projects/{project_id}/evaluationJobs/{evaluation_job_id}” - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.GetEvaluationJobRequest) - }, -) -_sym_db.RegisterMessage(GetEvaluationJobRequest) - -PauseEvaluationJobRequest = _reflection.GeneratedProtocolMessageType( - "PauseEvaluationJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _PAUSEEVALUATIONJOBREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for PauseEvaluationJob. - - Attributes: - name: - Required. Name of the evaluation job that is going to be - paused. Format: - “projects/{project_id}/evaluationJobs/{evaluation_job_id}” - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.PauseEvaluationJobRequest) - }, -) -_sym_db.RegisterMessage(PauseEvaluationJobRequest) - -ResumeEvaluationJobRequest = _reflection.GeneratedProtocolMessageType( - "ResumeEvaluationJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _RESUMEEVALUATIONJOBREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message ResumeEvaluationJob. - - Attributes: - name: - Required. Name of the evaluation job that is going to be - resumed. Format: - “projects/{project_id}/evaluationJobs/{evaluation_job_id}” - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ResumeEvaluationJobRequest) - }, -) -_sym_db.RegisterMessage(ResumeEvaluationJobRequest) - -DeleteEvaluationJobRequest = _reflection.GeneratedProtocolMessageType( - "DeleteEvaluationJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEEVALUATIONJOBREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message DeleteEvaluationJob. - - Attributes: - name: - Required. Name of the evaluation job that is going to be - deleted. Format: - “projects/{project_id}/evaluationJobs/{evaluation_job_id}” - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.DeleteEvaluationJobRequest) - }, -) -_sym_db.RegisterMessage(DeleteEvaluationJobRequest) - -ListEvaluationJobsRequest = _reflection.GeneratedProtocolMessageType( - "ListEvaluationJobsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTEVALUATIONJOBSREQUEST, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Request message for ListEvaluationJobs. - - Attributes: - parent: - Required. Evaluation job resource parent. Format: - “projects/{project_id}” - filter: - Optional. You can filter the jobs to list by model_id (also - known as model_name, as described in [EvaluationJob.modelVersi - on][google.cloud.datalabeling.v1beta1.EvaluationJob.model_vers - ion]) or by evaluation job state (as described in [EvaluationJ - ob.state][google.cloud.datalabeling.v1beta1.EvaluationJob.stat - e]). To filter by both criteria, use the ``AND`` operator or - the ``OR`` operator. For example, you can use the following - string for your filter: “evaluationjob.model_id = {model_name} - AND evaluationjob.state = {evaluation_job_state}” - page_size: - Optional. Requested page size. Server may return fewer results - than requested. Default value is 100. - page_token: - Optional. A token identifying a page of results for the server - to return. Typically obtained by the [nextPageToken][google.cl - oud.datalabeling.v1beta1.ListEvaluationJobsResponse.next_page\_ - token] in the response to the previous request. The request - returns the first page if this is empty. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListEvaluationJobsRequest) - }, -) -_sym_db.RegisterMessage(ListEvaluationJobsRequest) - -ListEvaluationJobsResponse = _reflection.GeneratedProtocolMessageType( - "ListEvaluationJobsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTEVALUATIONJOBSRESPONSE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_labeling_service_pb2", - "__doc__": """Results for listing evaluation jobs. - - Attributes: - evaluation_jobs: - The list of evaluation jobs to return. - next_page_token: - A token to retrieve next page of results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse) - }, -) -_sym_db.RegisterMessage(ListEvaluationJobsResponse) - - -DESCRIPTOR._options = None -_CREATEDATASETREQUEST.fields_by_name["parent"]._options = None -_CREATEDATASETREQUEST.fields_by_name["dataset"]._options = None -_GETDATASETREQUEST.fields_by_name["name"]._options = None -_LISTDATASETSREQUEST.fields_by_name["parent"]._options = None -_LISTDATASETSREQUEST.fields_by_name["filter"]._options = None -_LISTDATASETSREQUEST.fields_by_name["page_size"]._options = None -_LISTDATASETSREQUEST.fields_by_name["page_token"]._options = None -_DELETEDATASETREQUEST.fields_by_name["name"]._options = None -_IMPORTDATAREQUEST.fields_by_name["name"]._options = None -_IMPORTDATAREQUEST.fields_by_name["input_config"]._options = None -_EXPORTDATAREQUEST.fields_by_name["name"]._options = None -_EXPORTDATAREQUEST.fields_by_name["annotated_dataset"]._options = None -_EXPORTDATAREQUEST.fields_by_name["filter"]._options = None -_EXPORTDATAREQUEST.fields_by_name["output_config"]._options = None -_GETDATAITEMREQUEST.fields_by_name["name"]._options = None -_LISTDATAITEMSREQUEST.fields_by_name["parent"]._options = None -_LISTDATAITEMSREQUEST.fields_by_name["filter"]._options = None -_LISTDATAITEMSREQUEST.fields_by_name["page_size"]._options = None -_LISTDATAITEMSREQUEST.fields_by_name["page_token"]._options = None -_GETANNOTATEDDATASETREQUEST.fields_by_name["name"]._options = None -_LISTANNOTATEDDATASETSREQUEST.fields_by_name["parent"]._options = None -_LISTANNOTATEDDATASETSREQUEST.fields_by_name["filter"]._options = None -_LISTANNOTATEDDATASETSREQUEST.fields_by_name["page_size"]._options = None -_LISTANNOTATEDDATASETSREQUEST.fields_by_name["page_token"]._options = None -_DELETEANNOTATEDDATASETREQUEST.fields_by_name["name"]._options = None -_LABELIMAGEREQUEST.fields_by_name["parent"]._options = None -_LABELIMAGEREQUEST.fields_by_name["basic_config"]._options = None -_LABELIMAGEREQUEST.fields_by_name["feature"]._options = None -_LABELVIDEOREQUEST.fields_by_name["parent"]._options = None -_LABELVIDEOREQUEST.fields_by_name["basic_config"]._options = None -_LABELVIDEOREQUEST.fields_by_name["feature"]._options = None -_LABELTEXTREQUEST.fields_by_name["parent"]._options = None -_LABELTEXTREQUEST.fields_by_name["basic_config"]._options = None -_LABELTEXTREQUEST.fields_by_name["feature"]._options = None -_GETEXAMPLEREQUEST.fields_by_name["name"]._options = None -_GETEXAMPLEREQUEST.fields_by_name["filter"]._options = None -_LISTEXAMPLESREQUEST.fields_by_name["parent"]._options = None -_LISTEXAMPLESREQUEST.fields_by_name["filter"]._options = None -_LISTEXAMPLESREQUEST.fields_by_name["page_size"]._options = None -_LISTEXAMPLESREQUEST.fields_by_name["page_token"]._options = None -_CREATEANNOTATIONSPECSETREQUEST.fields_by_name["parent"]._options = None -_CREATEANNOTATIONSPECSETREQUEST.fields_by_name["annotation_spec_set"]._options = None -_GETANNOTATIONSPECSETREQUEST.fields_by_name["name"]._options = None -_LISTANNOTATIONSPECSETSREQUEST.fields_by_name["parent"]._options = None -_LISTANNOTATIONSPECSETSREQUEST.fields_by_name["filter"]._options = None -_LISTANNOTATIONSPECSETSREQUEST.fields_by_name["page_size"]._options = None -_LISTANNOTATIONSPECSETSREQUEST.fields_by_name["page_token"]._options = None -_DELETEANNOTATIONSPECSETREQUEST.fields_by_name["name"]._options = None -_CREATEINSTRUCTIONREQUEST.fields_by_name["parent"]._options = None -_CREATEINSTRUCTIONREQUEST.fields_by_name["instruction"]._options = None -_GETINSTRUCTIONREQUEST.fields_by_name["name"]._options = None -_DELETEINSTRUCTIONREQUEST.fields_by_name["name"]._options = None -_LISTINSTRUCTIONSREQUEST.fields_by_name["parent"]._options = None -_LISTINSTRUCTIONSREQUEST.fields_by_name["filter"]._options = None -_LISTINSTRUCTIONSREQUEST.fields_by_name["page_size"]._options = None -_LISTINSTRUCTIONSREQUEST.fields_by_name["page_token"]._options = None -_GETEVALUATIONREQUEST.fields_by_name["name"]._options = None -_SEARCHEVALUATIONSREQUEST.fields_by_name["parent"]._options = None -_SEARCHEVALUATIONSREQUEST.fields_by_name["filter"]._options = None -_SEARCHEVALUATIONSREQUEST.fields_by_name["page_size"]._options = None -_SEARCHEVALUATIONSREQUEST.fields_by_name["page_token"]._options = None -_SEARCHEXAMPLECOMPARISONSREQUEST.fields_by_name["parent"]._options = None -_SEARCHEXAMPLECOMPARISONSREQUEST.fields_by_name["page_size"]._options = None -_SEARCHEXAMPLECOMPARISONSREQUEST.fields_by_name["page_token"]._options = None -_CREATEEVALUATIONJOBREQUEST.fields_by_name["parent"]._options = None -_CREATEEVALUATIONJOBREQUEST.fields_by_name["job"]._options = None -_UPDATEEVALUATIONJOBREQUEST.fields_by_name["evaluation_job"]._options = None -_UPDATEEVALUATIONJOBREQUEST.fields_by_name["update_mask"]._options = None -_GETEVALUATIONJOBREQUEST.fields_by_name["name"]._options = None -_PAUSEEVALUATIONJOBREQUEST.fields_by_name["name"]._options = None -_RESUMEEVALUATIONJOBREQUEST.fields_by_name["name"]._options = None -_DELETEEVALUATIONJOBREQUEST.fields_by_name["name"]._options = None -_LISTEVALUATIONJOBSREQUEST.fields_by_name["parent"]._options = None -_LISTEVALUATIONJOBSREQUEST.fields_by_name["filter"]._options = None -_LISTEVALUATIONJOBSREQUEST.fields_by_name["page_size"]._options = None -_LISTEVALUATIONJOBSREQUEST.fields_by_name["page_token"]._options = None - -_DATALABELINGSERVICE = _descriptor.ServiceDescriptor( - name="DataLabelingService", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\033datalabeling.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform", - create_key=_descriptor._internal_create_key, - serialized_start=8298, - serialized_end=15504, - methods=[ - _descriptor.MethodDescriptor( - name="CreateDataset", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.CreateDataset", - index=0, - containing_service=None, - input_type=_CREATEDATASETREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._DATASET, - serialized_options=b'\202\323\344\223\002*"%/v1beta1/{parent=projects/*}/datasets:\001*\332A\016parent,dataset', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetDataset", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.GetDataset", - index=1, - containing_service=None, - input_type=_GETDATASETREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._DATASET, - serialized_options=b"\202\323\344\223\002'\022%/v1beta1/{name=projects/*/datasets/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListDatasets", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.ListDatasets", - index=2, - containing_service=None, - input_type=_LISTDATASETSREQUEST, - output_type=_LISTDATASETSRESPONSE, - serialized_options=b"\202\323\344\223\002'\022%/v1beta1/{parent=projects/*}/datasets\332A\rparent,filter", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteDataset", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.DeleteDataset", - index=3, - containing_service=None, - input_type=_DELETEDATASETREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002'*%/v1beta1/{name=projects/*/datasets/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ImportData", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.ImportData", - index=4, - containing_service=None, - input_type=_IMPORTDATAREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=b'\202\323\344\223\0025"0/v1beta1/{name=projects/*/datasets/*}:importData:\001*\332A\021name,input_config\312A:\n\033ImportDataOperationResponse\022\033ImportDataOperationMetadata', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ExportData", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.ExportData", - index=5, - containing_service=None, - input_type=_EXPORTDATAREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=b'\202\323\344\223\0025"0/v1beta1/{name=projects/*/datasets/*}:exportData:\001*\332A+name,annotated_dataset,filter,output_config\312A:\n\033ExportDataOperationResponse\022\033ExportDataOperationMetadata', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetDataItem", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.GetDataItem", - index=6, - containing_service=None, - input_type=_GETDATAITEMREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._DATAITEM, - serialized_options=b"\202\323\344\223\0023\0221/v1beta1/{name=projects/*/datasets/*/dataItems/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListDataItems", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.ListDataItems", - index=7, - containing_service=None, - input_type=_LISTDATAITEMSREQUEST, - output_type=_LISTDATAITEMSRESPONSE, - serialized_options=b"\202\323\344\223\0023\0221/v1beta1/{parent=projects/*/datasets/*}/dataItems\332A\rparent,filter", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetAnnotatedDataset", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.GetAnnotatedDataset", - index=8, - containing_service=None, - input_type=_GETANNOTATEDDATASETREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._ANNOTATEDDATASET, - serialized_options=b"\202\323\344\223\002;\0229/v1beta1/{name=projects/*/datasets/*/annotatedDatasets/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListAnnotatedDatasets", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.ListAnnotatedDatasets", - index=9, - containing_service=None, - input_type=_LISTANNOTATEDDATASETSREQUEST, - output_type=_LISTANNOTATEDDATASETSRESPONSE, - serialized_options=b"\202\323\344\223\002;\0229/v1beta1/{parent=projects/*/datasets/*}/annotatedDatasets\332A\rparent,filter", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteAnnotatedDataset", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.DeleteAnnotatedDataset", - index=10, - containing_service=None, - input_type=_DELETEANNOTATEDDATASETREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002;*9/v1beta1/{name=projects/*/datasets/*/annotatedDatasets/*}", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="LabelImage", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.LabelImage", - index=11, - containing_service=None, - input_type=_LABELIMAGEREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=b'\202\323\344\223\0028"3/v1beta1/{parent=projects/*/datasets/*}/image:label:\001*\332A\033parent,basic_config,feature\312A*\n\020AnnotatedDataset\022\026LabelOperationMetadata', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="LabelVideo", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.LabelVideo", - index=12, - containing_service=None, - input_type=_LABELVIDEOREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=b'\202\323\344\223\0028"3/v1beta1/{parent=projects/*/datasets/*}/video:label:\001*\332A\033parent,basic_config,feature\312A*\n\020AnnotatedDataset\022\026LabelOperationMetadata', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="LabelText", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.LabelText", - index=13, - containing_service=None, - input_type=_LABELTEXTREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=b'\202\323\344\223\0027"2/v1beta1/{parent=projects/*/datasets/*}/text:label:\001*\332A\033parent,basic_config,feature\312A*\n\020AnnotatedDataset\022\026LabelOperationMetadata', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetExample", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.GetExample", - index=14, - containing_service=None, - input_type=_GETEXAMPLEREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._EXAMPLE, - serialized_options=b"\202\323\344\223\002F\022D/v1beta1/{name=projects/*/datasets/*/annotatedDatasets/*/examples/*}\332A\013name,filter", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListExamples", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.ListExamples", - index=15, - containing_service=None, - input_type=_LISTEXAMPLESREQUEST, - output_type=_LISTEXAMPLESRESPONSE, - serialized_options=b"\202\323\344\223\002F\022D/v1beta1/{parent=projects/*/datasets/*/annotatedDatasets/*}/examples\332A\rparent,filter", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateAnnotationSpecSet", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.CreateAnnotationSpecSet", - index=16, - containing_service=None, - input_type=_CREATEANNOTATIONSPECSETREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPECSET, - serialized_options=b'\202\323\344\223\0024"//v1beta1/{parent=projects/*}/annotationSpecSets:\001*\332A\032parent,annotation_spec_set', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetAnnotationSpecSet", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.GetAnnotationSpecSet", - index=17, - containing_service=None, - input_type=_GETANNOTATIONSPECSETREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPECSET, - serialized_options=b"\202\323\344\223\0021\022//v1beta1/{name=projects/*/annotationSpecSets/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListAnnotationSpecSets", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.ListAnnotationSpecSets", - index=18, - containing_service=None, - input_type=_LISTANNOTATIONSPECSETSREQUEST, - output_type=_LISTANNOTATIONSPECSETSRESPONSE, - serialized_options=b"\202\323\344\223\0021\022//v1beta1/{parent=projects/*}/annotationSpecSets\332A\rparent,filter", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteAnnotationSpecSet", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.DeleteAnnotationSpecSet", - index=19, - containing_service=None, - input_type=_DELETEANNOTATIONSPECSETREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\0021*//v1beta1/{name=projects/*/annotationSpecSets/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateInstruction", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.CreateInstruction", - index=20, - containing_service=None, - input_type=_CREATEINSTRUCTIONREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=b'\202\323\344\223\002.")/v1beta1/{parent=projects/*}/instructions:\001*\332A\022parent,instruction\312A(\n\013Instruction\022\031CreateInstructionMetadata', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetInstruction", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.GetInstruction", - index=21, - containing_service=None, - input_type=_GETINSTRUCTIONREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_instruction__pb2._INSTRUCTION, - serialized_options=b"\202\323\344\223\002+\022)/v1beta1/{name=projects/*/instructions/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListInstructions", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.ListInstructions", - index=22, - containing_service=None, - input_type=_LISTINSTRUCTIONSREQUEST, - output_type=_LISTINSTRUCTIONSRESPONSE, - serialized_options=b"\202\323\344\223\002+\022)/v1beta1/{parent=projects/*}/instructions\332A\rparent,filter", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteInstruction", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.DeleteInstruction", - index=23, - containing_service=None, - input_type=_DELETEINSTRUCTIONREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002+*)/v1beta1/{name=projects/*/instructions/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetEvaluation", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.GetEvaluation", - index=24, - containing_service=None, - input_type=_GETEVALUATIONREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__pb2._EVALUATION, - serialized_options=b"\202\323\344\223\0025\0223/v1beta1/{name=projects/*/datasets/*/evaluations/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="SearchEvaluations", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.SearchEvaluations", - index=25, - containing_service=None, - input_type=_SEARCHEVALUATIONSREQUEST, - output_type=_SEARCHEVALUATIONSRESPONSE, - serialized_options=b"\202\323\344\223\0021\022//v1beta1/{parent=projects/*}/evaluations:search\332A\rparent,filter", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="SearchExampleComparisons", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.SearchExampleComparisons", - index=26, - containing_service=None, - input_type=_SEARCHEXAMPLECOMPARISONSREQUEST, - output_type=_SEARCHEXAMPLECOMPARISONSRESPONSE, - serialized_options=b'\202\323\344\223\002T"O/v1beta1/{parent=projects/*/datasets/*/evaluations/*}/exampleComparisons:search:\001*\332A\006parent', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateEvaluationJob", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.CreateEvaluationJob", - index=27, - containing_service=None, - input_type=_CREATEEVALUATIONJOBREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2._EVALUATIONJOB, - serialized_options=b'\202\323\344\223\0020"+/v1beta1/{parent=projects/*}/evaluationJobs:\001*\332A\nparent,job', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateEvaluationJob", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.UpdateEvaluationJob", - index=28, - containing_service=None, - input_type=_UPDATEEVALUATIONJOBREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2._EVALUATIONJOB, - serialized_options=b"\202\323\344\223\002L2:/v1beta1/{evaluation_job.name=projects/*/evaluationJobs/*}:\016evaluation_job\332A\032evaluation_job,update_mask", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetEvaluationJob", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.GetEvaluationJob", - index=29, - containing_service=None, - input_type=_GETEVALUATIONJOBREQUEST, - output_type=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2._EVALUATIONJOB, - serialized_options=b"\202\323\344\223\002-\022+/v1beta1/{name=projects/*/evaluationJobs/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="PauseEvaluationJob", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.PauseEvaluationJob", - index=30, - containing_service=None, - input_type=_PAUSEEVALUATIONJOBREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\0026"1/v1beta1/{name=projects/*/evaluationJobs/*}:pause:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ResumeEvaluationJob", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.ResumeEvaluationJob", - index=31, - containing_service=None, - input_type=_RESUMEEVALUATIONJOBREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\0027"2/v1beta1/{name=projects/*/evaluationJobs/*}:resume:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteEvaluationJob", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.DeleteEvaluationJob", - index=32, - containing_service=None, - input_type=_DELETEEVALUATIONJOBREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002-*+/v1beta1/{name=projects/*/evaluationJobs/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListEvaluationJobs", - full_name="google.cloud.datalabeling.v1beta1.DataLabelingService.ListEvaluationJobs", - index=33, - containing_service=None, - input_type=_LISTEVALUATIONJOBSREQUEST, - output_type=_LISTEVALUATIONJOBSRESPONSE, - serialized_options=b"\202\323\344\223\002-\022+/v1beta1/{parent=projects/*}/evaluationJobs\332A\rparent,filter", - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_DATALABELINGSERVICE) - -DESCRIPTOR.services_by_name["DataLabelingService"] = _DATALABELINGSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datalabeling_v1beta1/proto/data_labeling_service_pb2_grpc.py b/google/cloud/datalabeling_v1beta1/proto/data_labeling_service_pb2_grpc.py deleted file mode 100644 index ad9c37d..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/data_labeling_service_pb2_grpc.py +++ /dev/null @@ -1,1571 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from google.cloud.datalabeling_v1beta1.proto import ( - annotation_spec_set_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - data_labeling_service_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - dataset_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - evaluation_job_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - evaluation_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - instruction_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_instruction__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class DataLabelingServiceStub(object): - """Missing associated documentation comment in .proto file.""" - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateDataset = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateDataset", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateDatasetRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.Dataset.FromString, - ) - self.GetDataset = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataset", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetDatasetRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.Dataset.FromString, - ) - self.ListDatasets = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDatasets", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDatasetsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDatasetsResponse.FromString, - ) - self.DeleteDataset = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteDataset", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteDatasetRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ImportData = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ImportData", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ImportDataRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.ExportData = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ExportData", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ExportDataRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.GetDataItem = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataItem", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetDataItemRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.DataItem.FromString, - ) - self.ListDataItems = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDataItems", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDataItemsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDataItemsResponse.FromString, - ) - self.GetAnnotatedDataset = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotatedDataset", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetAnnotatedDatasetRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.AnnotatedDataset.FromString, - ) - self.ListAnnotatedDatasets = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotatedDatasets", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotatedDatasetsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotatedDatasetsResponse.FromString, - ) - self.DeleteAnnotatedDataset = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotatedDataset", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteAnnotatedDatasetRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.LabelImage = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelImage", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.LabelImageRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.LabelVideo = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelVideo", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.LabelVideoRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.LabelText = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelText", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.LabelTextRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.GetExample = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetExample", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetExampleRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.Example.FromString, - ) - self.ListExamples = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListExamples", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListExamplesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListExamplesResponse.FromString, - ) - self.CreateAnnotationSpecSet = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateAnnotationSpecSet", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateAnnotationSpecSetRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2.AnnotationSpecSet.FromString, - ) - self.GetAnnotationSpecSet = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotationSpecSet", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetAnnotationSpecSetRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2.AnnotationSpecSet.FromString, - ) - self.ListAnnotationSpecSets = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotationSpecSets", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotationSpecSetsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotationSpecSetsResponse.FromString, - ) - self.DeleteAnnotationSpecSet = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotationSpecSet", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteAnnotationSpecSetRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CreateInstruction = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateInstruction", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateInstructionRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.GetInstruction = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetInstruction", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetInstructionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_instruction__pb2.Instruction.FromString, - ) - self.ListInstructions = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListInstructions", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListInstructionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListInstructionsResponse.FromString, - ) - self.DeleteInstruction = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteInstruction", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteInstructionRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetEvaluation = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetEvaluation", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetEvaluationRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__pb2.Evaluation.FromString, - ) - self.SearchEvaluations = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/SearchEvaluations", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchEvaluationsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchEvaluationsResponse.FromString, - ) - self.SearchExampleComparisons = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/SearchExampleComparisons", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchExampleComparisonsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchExampleComparisonsResponse.FromString, - ) - self.CreateEvaluationJob = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateEvaluationJob", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateEvaluationJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2.EvaluationJob.FromString, - ) - self.UpdateEvaluationJob = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/UpdateEvaluationJob", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.UpdateEvaluationJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2.EvaluationJob.FromString, - ) - self.GetEvaluationJob = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetEvaluationJob", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetEvaluationJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2.EvaluationJob.FromString, - ) - self.PauseEvaluationJob = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/PauseEvaluationJob", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.PauseEvaluationJobRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ResumeEvaluationJob = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ResumeEvaluationJob", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ResumeEvaluationJobRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.DeleteEvaluationJob = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteEvaluationJob", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteEvaluationJobRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ListEvaluationJobs = channel.unary_unary( - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListEvaluationJobs", - request_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListEvaluationJobsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListEvaluationJobsResponse.FromString, - ) - - -class DataLabelingServiceServicer(object): - """Missing associated documentation comment in .proto file.""" - - def CreateDataset(self, request, context): - """Creates dataset. If success return a Dataset resource. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetDataset(self, request, context): - """Gets dataset by resource name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListDatasets(self, request, context): - """Lists datasets under a project. Pagination is supported. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteDataset(self, request, context): - """Deletes a dataset by resource name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ImportData(self, request, context): - """Imports data into dataset based on source locations defined in request. - It can be called multiple times for the same dataset. Each dataset can - only have one long running operation running on it. For example, no - labeling task (also long running operation) can be started while - importing is still ongoing. Vice versa. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ExportData(self, request, context): - """Exports data and annotations from dataset. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetDataItem(self, request, context): - """Gets a data item in a dataset by resource name. This API can be - called after data are imported into dataset. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListDataItems(self, request, context): - """Lists data items in a dataset. This API can be called after data - are imported into dataset. Pagination is supported. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetAnnotatedDataset(self, request, context): - """Gets an annotated dataset by resource name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListAnnotatedDatasets(self, request, context): - """Lists annotated datasets for a dataset. Pagination is supported. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteAnnotatedDataset(self, request, context): - """Deletes an annotated dataset by resource name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def LabelImage(self, request, context): - """Starts a labeling task for image. The type of image labeling task is - configured by feature in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def LabelVideo(self, request, context): - """Starts a labeling task for video. The type of video labeling task is - configured by feature in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def LabelText(self, request, context): - """Starts a labeling task for text. The type of text labeling task is - configured by feature in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetExample(self, request, context): - """Gets an example by resource name, including both data and annotation. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListExamples(self, request, context): - """Lists examples in an annotated dataset. Pagination is supported. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateAnnotationSpecSet(self, request, context): - """Creates an annotation spec set by providing a set of labels. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetAnnotationSpecSet(self, request, context): - """Gets an annotation spec set by resource name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListAnnotationSpecSets(self, request, context): - """Lists annotation spec sets for a project. Pagination is supported. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteAnnotationSpecSet(self, request, context): - """Deletes an annotation spec set by resource name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateInstruction(self, request, context): - """Creates an instruction for how data should be labeled. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetInstruction(self, request, context): - """Gets an instruction by resource name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListInstructions(self, request, context): - """Lists instructions for a project. Pagination is supported. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteInstruction(self, request, context): - """Deletes an instruction object by resource name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetEvaluation(self, request, context): - """Gets an evaluation by resource name (to search, use - [projects.evaluations.search][google.cloud.datalabeling.v1beta1.DataLabelingService.SearchEvaluations]). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def SearchEvaluations(self, request, context): - """Searches [evaluations][google.cloud.datalabeling.v1beta1.Evaluation] within a project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def SearchExampleComparisons(self, request, context): - """Searches example comparisons from an evaluation. The return format is a - list of example comparisons that show ground truth and prediction(s) for - a single input. Search by providing an evaluation ID. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateEvaluationJob(self, request, context): - """Creates an evaluation job. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateEvaluationJob(self, request, context): - """Updates an evaluation job. You can only update certain fields of the job's - [EvaluationJobConfig][google.cloud.datalabeling.v1beta1.EvaluationJobConfig]: `humanAnnotationConfig.instruction`, - `exampleCount`, and `exampleSamplePercentage`. - - If you want to change any other aspect of the evaluation job, you must - delete the job and create a new one. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetEvaluationJob(self, request, context): - """Gets an evaluation job by resource name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def PauseEvaluationJob(self, request, context): - """Pauses an evaluation job. Pausing an evaluation job that is already in a - `PAUSED` state is a no-op. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ResumeEvaluationJob(self, request, context): - """Resumes a paused evaluation job. A deleted evaluation job can't be resumed. - Resuming a running or scheduled evaluation job is a no-op. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteEvaluationJob(self, request, context): - """Stops and deletes an evaluation job. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListEvaluationJobs(self, request, context): - """Lists all evaluation jobs within a project with possible filters. - Pagination is supported. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_DataLabelingServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateDataset": grpc.unary_unary_rpc_method_handler( - servicer.CreateDataset, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateDatasetRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.Dataset.SerializeToString, - ), - "GetDataset": grpc.unary_unary_rpc_method_handler( - servicer.GetDataset, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetDatasetRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.Dataset.SerializeToString, - ), - "ListDatasets": grpc.unary_unary_rpc_method_handler( - servicer.ListDatasets, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDatasetsRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDatasetsResponse.SerializeToString, - ), - "DeleteDataset": grpc.unary_unary_rpc_method_handler( - servicer.DeleteDataset, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteDatasetRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ImportData": grpc.unary_unary_rpc_method_handler( - servicer.ImportData, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ImportDataRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "ExportData": grpc.unary_unary_rpc_method_handler( - servicer.ExportData, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ExportDataRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "GetDataItem": grpc.unary_unary_rpc_method_handler( - servicer.GetDataItem, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetDataItemRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.DataItem.SerializeToString, - ), - "ListDataItems": grpc.unary_unary_rpc_method_handler( - servicer.ListDataItems, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDataItemsRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDataItemsResponse.SerializeToString, - ), - "GetAnnotatedDataset": grpc.unary_unary_rpc_method_handler( - servicer.GetAnnotatedDataset, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetAnnotatedDatasetRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.AnnotatedDataset.SerializeToString, - ), - "ListAnnotatedDatasets": grpc.unary_unary_rpc_method_handler( - servicer.ListAnnotatedDatasets, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotatedDatasetsRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotatedDatasetsResponse.SerializeToString, - ), - "DeleteAnnotatedDataset": grpc.unary_unary_rpc_method_handler( - servicer.DeleteAnnotatedDataset, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteAnnotatedDatasetRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "LabelImage": grpc.unary_unary_rpc_method_handler( - servicer.LabelImage, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.LabelImageRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "LabelVideo": grpc.unary_unary_rpc_method_handler( - servicer.LabelVideo, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.LabelVideoRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "LabelText": grpc.unary_unary_rpc_method_handler( - servicer.LabelText, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.LabelTextRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "GetExample": grpc.unary_unary_rpc_method_handler( - servicer.GetExample, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetExampleRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.Example.SerializeToString, - ), - "ListExamples": grpc.unary_unary_rpc_method_handler( - servicer.ListExamples, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListExamplesRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListExamplesResponse.SerializeToString, - ), - "CreateAnnotationSpecSet": grpc.unary_unary_rpc_method_handler( - servicer.CreateAnnotationSpecSet, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateAnnotationSpecSetRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2.AnnotationSpecSet.SerializeToString, - ), - "GetAnnotationSpecSet": grpc.unary_unary_rpc_method_handler( - servicer.GetAnnotationSpecSet, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetAnnotationSpecSetRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2.AnnotationSpecSet.SerializeToString, - ), - "ListAnnotationSpecSets": grpc.unary_unary_rpc_method_handler( - servicer.ListAnnotationSpecSets, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotationSpecSetsRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotationSpecSetsResponse.SerializeToString, - ), - "DeleteAnnotationSpecSet": grpc.unary_unary_rpc_method_handler( - servicer.DeleteAnnotationSpecSet, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteAnnotationSpecSetRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CreateInstruction": grpc.unary_unary_rpc_method_handler( - servicer.CreateInstruction, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateInstructionRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "GetInstruction": grpc.unary_unary_rpc_method_handler( - servicer.GetInstruction, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetInstructionRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_instruction__pb2.Instruction.SerializeToString, - ), - "ListInstructions": grpc.unary_unary_rpc_method_handler( - servicer.ListInstructions, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListInstructionsRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListInstructionsResponse.SerializeToString, - ), - "DeleteInstruction": grpc.unary_unary_rpc_method_handler( - servicer.DeleteInstruction, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteInstructionRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetEvaluation": grpc.unary_unary_rpc_method_handler( - servicer.GetEvaluation, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetEvaluationRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__pb2.Evaluation.SerializeToString, - ), - "SearchEvaluations": grpc.unary_unary_rpc_method_handler( - servicer.SearchEvaluations, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchEvaluationsRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchEvaluationsResponse.SerializeToString, - ), - "SearchExampleComparisons": grpc.unary_unary_rpc_method_handler( - servicer.SearchExampleComparisons, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchExampleComparisonsRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchExampleComparisonsResponse.SerializeToString, - ), - "CreateEvaluationJob": grpc.unary_unary_rpc_method_handler( - servicer.CreateEvaluationJob, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateEvaluationJobRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2.EvaluationJob.SerializeToString, - ), - "UpdateEvaluationJob": grpc.unary_unary_rpc_method_handler( - servicer.UpdateEvaluationJob, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.UpdateEvaluationJobRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2.EvaluationJob.SerializeToString, - ), - "GetEvaluationJob": grpc.unary_unary_rpc_method_handler( - servicer.GetEvaluationJob, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetEvaluationJobRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2.EvaluationJob.SerializeToString, - ), - "PauseEvaluationJob": grpc.unary_unary_rpc_method_handler( - servicer.PauseEvaluationJob, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.PauseEvaluationJobRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ResumeEvaluationJob": grpc.unary_unary_rpc_method_handler( - servicer.ResumeEvaluationJob, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ResumeEvaluationJobRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "DeleteEvaluationJob": grpc.unary_unary_rpc_method_handler( - servicer.DeleteEvaluationJob, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteEvaluationJobRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ListEvaluationJobs": grpc.unary_unary_rpc_method_handler( - servicer.ListEvaluationJobs, - request_deserializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListEvaluationJobsRequest.FromString, - response_serializer=google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListEvaluationJobsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.datalabeling.v1beta1.DataLabelingService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class DataLabelingService(object): - """Missing associated documentation comment in .proto file.""" - - @staticmethod - def CreateDataset( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateDataset", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateDatasetRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.Dataset.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetDataset( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataset", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetDatasetRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.Dataset.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListDatasets( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDatasets", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDatasetsRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDatasetsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteDataset( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteDataset", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteDatasetRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ImportData( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ImportData", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ImportDataRequest.SerializeToString, - google_dot_longrunning_dot_operations__pb2.Operation.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ExportData( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ExportData", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ExportDataRequest.SerializeToString, - google_dot_longrunning_dot_operations__pb2.Operation.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetDataItem( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataItem", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetDataItemRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.DataItem.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListDataItems( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDataItems", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDataItemsRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListDataItemsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetAnnotatedDataset( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotatedDataset", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetAnnotatedDatasetRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.AnnotatedDataset.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListAnnotatedDatasets( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotatedDatasets", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotatedDatasetsRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotatedDatasetsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteAnnotatedDataset( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotatedDataset", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteAnnotatedDatasetRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def LabelImage( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelImage", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.LabelImageRequest.SerializeToString, - google_dot_longrunning_dot_operations__pb2.Operation.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def LabelVideo( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelVideo", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.LabelVideoRequest.SerializeToString, - google_dot_longrunning_dot_operations__pb2.Operation.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def LabelText( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelText", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.LabelTextRequest.SerializeToString, - google_dot_longrunning_dot_operations__pb2.Operation.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetExample( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetExample", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetExampleRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.Example.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListExamples( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListExamples", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListExamplesRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListExamplesResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateAnnotationSpecSet( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateAnnotationSpecSet", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateAnnotationSpecSetRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2.AnnotationSpecSet.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetAnnotationSpecSet( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotationSpecSet", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetAnnotationSpecSetRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2.AnnotationSpecSet.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListAnnotationSpecSets( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotationSpecSets", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotationSpecSetsRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListAnnotationSpecSetsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteAnnotationSpecSet( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotationSpecSet", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteAnnotationSpecSetRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateInstruction( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateInstruction", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateInstructionRequest.SerializeToString, - google_dot_longrunning_dot_operations__pb2.Operation.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetInstruction( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetInstruction", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetInstructionRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_instruction__pb2.Instruction.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListInstructions( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListInstructions", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListInstructionsRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListInstructionsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteInstruction( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteInstruction", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteInstructionRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetEvaluation( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetEvaluation", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetEvaluationRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__pb2.Evaluation.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def SearchEvaluations( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/SearchEvaluations", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchEvaluationsRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchEvaluationsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def SearchExampleComparisons( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/SearchExampleComparisons", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchExampleComparisonsRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.SearchExampleComparisonsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateEvaluationJob( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateEvaluationJob", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.CreateEvaluationJobRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2.EvaluationJob.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateEvaluationJob( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/UpdateEvaluationJob", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.UpdateEvaluationJobRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2.EvaluationJob.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetEvaluationJob( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetEvaluationJob", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.GetEvaluationJobRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__job__pb2.EvaluationJob.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def PauseEvaluationJob( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/PauseEvaluationJob", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.PauseEvaluationJobRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ResumeEvaluationJob( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ResumeEvaluationJob", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ResumeEvaluationJobRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteEvaluationJob( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteEvaluationJob", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.DeleteEvaluationJobRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListEvaluationJobs( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListEvaluationJobs", - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListEvaluationJobsRequest.SerializeToString, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__labeling__service__pb2.ListEvaluationJobsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) diff --git a/google/cloud/datalabeling_v1beta1/proto/data_payloads_pb2.py b/google/cloud/datalabeling_v1beta1/proto/data_payloads_pb2.py deleted file mode 100644 index 58f3533..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/data_payloads_pb2.py +++ /dev/null @@ -1,448 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datalabeling_v1beta1/proto/data_payloads.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datalabeling_v1beta1/proto/data_payloads.proto", - package="google.cloud.datalabeling.v1beta1", - syntax="proto3", - serialized_options=b"\n%com.google.cloud.datalabeling.v1beta1P\001ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabeling", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n;google/cloud/datalabeling_v1beta1/proto/data_payloads.proto\x12!google.cloud.datalabeling.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto"a\n\x0cImagePayload\x12\x11\n\tmime_type\x18\x01 \x01(\t\x12\x17\n\x0fimage_thumbnail\x18\x02 \x01(\x0c\x12\x11\n\timage_uri\x18\x03 \x01(\t\x12\x12\n\nsigned_uri\x18\x04 \x01(\t"#\n\x0bTextPayload\x12\x14\n\x0ctext_content\x18\x01 \x01(\t"S\n\x0eVideoThumbnail\x12\x11\n\tthumbnail\x18\x01 \x01(\x0c\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xa9\x01\n\x0cVideoPayload\x12\x11\n\tmime_type\x18\x01 \x01(\t\x12\x11\n\tvideo_uri\x18\x02 \x01(\t\x12K\n\x10video_thumbnails\x18\x03 \x03(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.VideoThumbnail\x12\x12\n\nframe_rate\x18\x04 \x01(\x02\x12\x12\n\nsigned_uri\x18\x05 \x01(\tBx\n%com.google.cloud.datalabeling.v1beta1P\x01ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabelingb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_IMAGEPAYLOAD = _descriptor.Descriptor( - name="ImagePayload", - full_name="google.cloud.datalabeling.v1beta1.ImagePayload", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="mime_type", - full_name="google.cloud.datalabeling.v1beta1.ImagePayload.mime_type", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="image_thumbnail", - full_name="google.cloud.datalabeling.v1beta1.ImagePayload.image_thumbnail", - index=1, - number=2, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="image_uri", - full_name="google.cloud.datalabeling.v1beta1.ImagePayload.image_uri", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="signed_uri", - full_name="google.cloud.datalabeling.v1beta1.ImagePayload.signed_uri", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=193, - serialized_end=290, -) - - -_TEXTPAYLOAD = _descriptor.Descriptor( - name="TextPayload", - full_name="google.cloud.datalabeling.v1beta1.TextPayload", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="text_content", - full_name="google.cloud.datalabeling.v1beta1.TextPayload.text_content", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=292, - serialized_end=327, -) - - -_VIDEOTHUMBNAIL = _descriptor.Descriptor( - name="VideoThumbnail", - full_name="google.cloud.datalabeling.v1beta1.VideoThumbnail", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="thumbnail", - full_name="google.cloud.datalabeling.v1beta1.VideoThumbnail.thumbnail", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="time_offset", - full_name="google.cloud.datalabeling.v1beta1.VideoThumbnail.time_offset", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=329, - serialized_end=412, -) - - -_VIDEOPAYLOAD = _descriptor.Descriptor( - name="VideoPayload", - full_name="google.cloud.datalabeling.v1beta1.VideoPayload", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="mime_type", - full_name="google.cloud.datalabeling.v1beta1.VideoPayload.mime_type", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_uri", - full_name="google.cloud.datalabeling.v1beta1.VideoPayload.video_uri", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_thumbnails", - full_name="google.cloud.datalabeling.v1beta1.VideoPayload.video_thumbnails", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="frame_rate", - full_name="google.cloud.datalabeling.v1beta1.VideoPayload.frame_rate", - index=3, - number=4, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="signed_uri", - full_name="google.cloud.datalabeling.v1beta1.VideoPayload.signed_uri", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=415, - serialized_end=584, -) - -_VIDEOTHUMBNAIL.fields_by_name[ - "time_offset" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_VIDEOPAYLOAD.fields_by_name["video_thumbnails"].message_type = _VIDEOTHUMBNAIL -DESCRIPTOR.message_types_by_name["ImagePayload"] = _IMAGEPAYLOAD -DESCRIPTOR.message_types_by_name["TextPayload"] = _TEXTPAYLOAD -DESCRIPTOR.message_types_by_name["VideoThumbnail"] = _VIDEOTHUMBNAIL -DESCRIPTOR.message_types_by_name["VideoPayload"] = _VIDEOPAYLOAD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ImagePayload = _reflection.GeneratedProtocolMessageType( - "ImagePayload", - (_message.Message,), - { - "DESCRIPTOR": _IMAGEPAYLOAD, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_payloads_pb2", - "__doc__": """Container of information about an image. - - Attributes: - mime_type: - Image format. - image_thumbnail: - A byte string of a thumbnail image. - image_uri: - Image uri from the user bucket. - signed_uri: - Signed uri of the image file in the service bucket. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ImagePayload) - }, -) -_sym_db.RegisterMessage(ImagePayload) - -TextPayload = _reflection.GeneratedProtocolMessageType( - "TextPayload", - (_message.Message,), - { - "DESCRIPTOR": _TEXTPAYLOAD, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_payloads_pb2", - "__doc__": """Container of information about a piece of text. - - Attributes: - text_content: - Text content. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.TextPayload) - }, -) -_sym_db.RegisterMessage(TextPayload) - -VideoThumbnail = _reflection.GeneratedProtocolMessageType( - "VideoThumbnail", - (_message.Message,), - { - "DESCRIPTOR": _VIDEOTHUMBNAIL, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_payloads_pb2", - "__doc__": """Container of information of a video thumbnail. - - Attributes: - thumbnail: - A byte string of the video frame. - time_offset: - Time offset relative to the beginning of the video, - corresponding to the video frame where the thumbnail has been - extracted from. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.VideoThumbnail) - }, -) -_sym_db.RegisterMessage(VideoThumbnail) - -VideoPayload = _reflection.GeneratedProtocolMessageType( - "VideoPayload", - (_message.Message,), - { - "DESCRIPTOR": _VIDEOPAYLOAD, - "__module__": "google.cloud.datalabeling_v1beta1.proto.data_payloads_pb2", - "__doc__": """Container of information of a video. - - Attributes: - mime_type: - Video format. - video_uri: - Video uri from the user bucket. - video_thumbnails: - The list of video thumbnails. - frame_rate: - FPS of the video. - signed_uri: - Signed uri of the video file in the service bucket. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.VideoPayload) - }, -) -_sym_db.RegisterMessage(VideoPayload) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datalabeling_v1beta1/proto/data_payloads_pb2_grpc.py b/google/cloud/datalabeling_v1beta1/proto/data_payloads_pb2_grpc.py deleted file mode 100644 index 8a93939..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/data_payloads_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/datalabeling_v1beta1/proto/dataset_pb2.py b/google/cloud/datalabeling_v1beta1/proto/dataset_pb2.py deleted file mode 100644 index 684da51..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/dataset_pb2.py +++ /dev/null @@ -1,2278 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datalabeling_v1beta1/proto/dataset.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.datalabeling_v1beta1.proto import ( - annotation_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - annotation_spec_set_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - data_payloads_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__payloads__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - human_annotation_config_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2, -) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datalabeling_v1beta1/proto/dataset.proto", - package="google.cloud.datalabeling.v1beta1", - syntax="proto3", - serialized_options=b"\n%com.google.cloud.datalabeling.v1beta1P\001ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabeling", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n5google/cloud/datalabeling_v1beta1/proto/dataset.proto\x12!google.cloud.datalabeling.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x38google/cloud/datalabeling_v1beta1/proto/annotation.proto\x1a\x41google/cloud/datalabeling_v1beta1/proto/annotation_spec_set.proto\x1a;google/cloud/datalabeling_v1beta1/proto/data_payloads.proto\x1a\x45google/cloud/datalabeling_v1beta1/proto/human_annotation_config.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc0\x02\n\x07\x44\x61taset\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12/\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n\rinput_configs\x18\x05 \x03(\x0b\x32..google.cloud.datalabeling.v1beta1.InputConfig\x12\x1a\n\x12\x62locking_resources\x18\x06 \x03(\t\x12\x17\n\x0f\x64\x61ta_item_count\x18\x07 \x01(\x03:O\xea\x41L\n#datalabeling.googleapis.com/Dataset\x12%projects/{project}/datasets/{dataset}"\xf1\x03\n\x0bInputConfig\x12H\n\rtext_metadata\x18\x06 \x01(\x0b\x32/.google.cloud.datalabeling.v1beta1.TextMetadataH\x00\x12\x42\n\ngcs_source\x18\x02 \x01(\x0b\x32,.google.cloud.datalabeling.v1beta1.GcsSourceH\x01\x12L\n\x0f\x62igquery_source\x18\x05 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.BigQuerySourceH\x01\x12>\n\tdata_type\x18\x01 \x01(\x0e\x32+.google.cloud.datalabeling.v1beta1.DataType\x12J\n\x0f\x61nnotation_type\x18\x03 \x01(\x0e\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationType\x12Z\n\x17\x63lassification_metadata\x18\x04 \x01(\x0b\x32\x39.google.cloud.datalabeling.v1beta1.ClassificationMetadataB\x14\n\x12\x64\x61ta_type_metadataB\x08\n\x06source"%\n\x0cTextMetadata\x12\x15\n\rlanguage_code\x18\x01 \x01(\t"0\n\x16\x43lassificationMetadata\x12\x16\n\x0eis_multi_label\x18\x01 \x01(\x08"1\n\tGcsSource\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x11\n\tmime_type\x18\x02 \x01(\t"#\n\x0e\x42igQuerySource\x12\x11\n\tinput_uri\x18\x01 \x01(\t"\xc6\x01\n\x0cOutputConfig\x12L\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.GcsDestinationH\x00\x12Y\n\x16gcs_folder_destination\x18\x02 \x01(\x0b\x32\x37.google.cloud.datalabeling.v1beta1.GcsFolderDestinationH\x00\x42\r\n\x0b\x64\x65stination"7\n\x0eGcsDestination\x12\x12\n\noutput_uri\x18\x01 \x01(\t\x12\x11\n\tmime_type\x18\x02 \x01(\t"1\n\x14GcsFolderDestination\x12\x19\n\x11output_folder_uri\x18\x01 \x01(\t"\xe7\x02\n\x08\x44\x61taItem\x12H\n\rimage_payload\x18\x02 \x01(\x0b\x32/.google.cloud.datalabeling.v1beta1.ImagePayloadH\x00\x12\x46\n\x0ctext_payload\x18\x03 \x01(\x0b\x32..google.cloud.datalabeling.v1beta1.TextPayloadH\x00\x12H\n\rvideo_payload\x18\x04 \x01(\x0b\x32/.google.cloud.datalabeling.v1beta1.VideoPayloadH\x00\x12\x0c\n\x04name\x18\x01 \x01(\t:f\xea\x41\x63\n$datalabeling.googleapis.com/DataItem\x12;projects/{project}/datasets/{dataset}/dataItems/{data_item}B\t\n\x07payload"\xff\x04\n\x10\x41nnotatedDataset\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\t \x01(\t\x12N\n\x11\x61nnotation_source\x18\x03 \x01(\x0e\x32\x33.google.cloud.datalabeling.v1beta1.AnnotationSource\x12J\n\x0f\x61nnotation_type\x18\x08 \x01(\x0e\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationType\x12\x15\n\rexample_count\x18\x04 \x01(\x03\x12\x1f\n\x17\x63ompleted_example_count\x18\x05 \x01(\x03\x12\x42\n\x0blabel_stats\x18\x06 \x01(\x0b\x32-.google.cloud.datalabeling.v1beta1.LabelStats\x12/\n\x0b\x63reate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12M\n\x08metadata\x18\n \x01(\x0b\x32;.google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata\x12\x1a\n\x12\x62locking_resources\x18\x0b \x03(\t:~\xea\x41{\n,datalabeling.googleapis.com/AnnotatedDataset\x12Kprojects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}"\x99\x01\n\nLabelStats\x12V\n\rexample_count\x18\x01 \x03(\x0b\x32?.google.cloud.datalabeling.v1beta1.LabelStats.ExampleCountEntry\x1a\x33\n\x11\x45xampleCountEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\xa2\x08\n\x18\x41nnotatedDatasetMetadata\x12\x63\n\x1bimage_classification_config\x18\x02 \x01(\x0b\x32<.google.cloud.datalabeling.v1beta1.ImageClassificationConfigH\x00\x12U\n\x14\x62ounding_poly_config\x18\x03 \x01(\x0b\x32\x35.google.cloud.datalabeling.v1beta1.BoundingPolyConfigH\x00\x12L\n\x0fpolyline_config\x18\x04 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.PolylineConfigH\x00\x12T\n\x13segmentation_config\x18\x05 \x01(\x0b\x32\x35.google.cloud.datalabeling.v1beta1.SegmentationConfigH\x00\x12\x63\n\x1bvideo_classification_config\x18\x06 \x01(\x0b\x32<.google.cloud.datalabeling.v1beta1.VideoClassificationConfigH\x00\x12[\n\x17object_detection_config\x18\x07 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.ObjectDetectionConfigH\x00\x12Y\n\x16object_tracking_config\x18\x08 \x01(\x0b\x32\x37.google.cloud.datalabeling.v1beta1.ObjectTrackingConfigH\x00\x12\x46\n\x0c\x65vent_config\x18\t \x01(\x0b\x32..google.cloud.datalabeling.v1beta1.EventConfigH\x00\x12\x61\n\x1atext_classification_config\x18\n \x01(\x0b\x32;.google.cloud.datalabeling.v1beta1.TextClassificationConfigH\x00\x12\x66\n\x1dtext_entity_extraction_config\x18\x0b \x01(\x0b\x32=.google.cloud.datalabeling.v1beta1.TextEntityExtractionConfigH\x00\x12Y\n\x17human_annotation_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfigB\x1b\n\x19\x61nnotation_request_config"\xce\x03\n\x07\x45xample\x12H\n\rimage_payload\x18\x02 \x01(\x0b\x32/.google.cloud.datalabeling.v1beta1.ImagePayloadH\x00\x12\x46\n\x0ctext_payload\x18\x06 \x01(\x0b\x32..google.cloud.datalabeling.v1beta1.TextPayloadH\x00\x12H\n\rvideo_payload\x18\x07 \x01(\x0b\x32/.google.cloud.datalabeling.v1beta1.VideoPayloadH\x00\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x42\n\x0b\x61nnotations\x18\x05 \x03(\x0b\x32-.google.cloud.datalabeling.v1beta1.Annotation:\x89\x01\xea\x41\x85\x01\n#datalabeling.googleapis.com/Example\x12^projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}/examples/{example}B\t\n\x07payload*W\n\x08\x44\x61taType\x12\x19\n\x15\x44\x41TA_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05IMAGE\x10\x01\x12\t\n\x05VIDEO\x10\x02\x12\x08\n\x04TEXT\x10\x04\x12\x10\n\x0cGENERAL_DATA\x10\x06\x42x\n%com.google.cloud.datalabeling.v1beta1P\x01ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabelingb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__payloads__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - -_DATATYPE = _descriptor.EnumDescriptor( - name="DataType", - full_name="google.cloud.datalabeling.v1beta1.DataType", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="DATA_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="IMAGE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="VIDEO", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TEXT", - index=3, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GENERAL_DATA", - index=4, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4434, - serialized_end=4521, -) -_sym_db.RegisterEnumDescriptor(_DATATYPE) - -DataType = enum_type_wrapper.EnumTypeWrapper(_DATATYPE) -DATA_TYPE_UNSPECIFIED = 0 -IMAGE = 1 -VIDEO = 2 -TEXT = 4 -GENERAL_DATA = 6 - - -_DATASET = _descriptor.Descriptor( - name="Dataset", - full_name="google.cloud.datalabeling.v1beta1.Dataset", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.Dataset.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.datalabeling.v1beta1.Dataset.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.datalabeling.v1beta1.Dataset.description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.datalabeling.v1beta1.Dataset.create_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="input_configs", - full_name="google.cloud.datalabeling.v1beta1.Dataset.input_configs", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="blocking_resources", - full_name="google.cloud.datalabeling.v1beta1.Dataset.blocking_resources", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="data_item_count", - full_name="google.cloud.datalabeling.v1beta1.Dataset.data_item_count", - index=6, - number=7, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"\352AL\n#datalabeling.googleapis.com/Dataset\022%projects/{project}/datasets/{dataset}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=440, - serialized_end=760, -) - - -_INPUTCONFIG = _descriptor.Descriptor( - name="InputConfig", - full_name="google.cloud.datalabeling.v1beta1.InputConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="text_metadata", - full_name="google.cloud.datalabeling.v1beta1.InputConfig.text_metadata", - index=0, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="gcs_source", - full_name="google.cloud.datalabeling.v1beta1.InputConfig.gcs_source", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="bigquery_source", - full_name="google.cloud.datalabeling.v1beta1.InputConfig.bigquery_source", - index=2, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="data_type", - full_name="google.cloud.datalabeling.v1beta1.InputConfig.data_type", - index=3, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_type", - full_name="google.cloud.datalabeling.v1beta1.InputConfig.annotation_type", - index=4, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="classification_metadata", - full_name="google.cloud.datalabeling.v1beta1.InputConfig.classification_metadata", - index=5, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="data_type_metadata", - full_name="google.cloud.datalabeling.v1beta1.InputConfig.data_type_metadata", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - _descriptor.OneofDescriptor( - name="source", - full_name="google.cloud.datalabeling.v1beta1.InputConfig.source", - index=1, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=763, - serialized_end=1260, -) - - -_TEXTMETADATA = _descriptor.Descriptor( - name="TextMetadata", - full_name="google.cloud.datalabeling.v1beta1.TextMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="language_code", - full_name="google.cloud.datalabeling.v1beta1.TextMetadata.language_code", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1262, - serialized_end=1299, -) - - -_CLASSIFICATIONMETADATA = _descriptor.Descriptor( - name="ClassificationMetadata", - full_name="google.cloud.datalabeling.v1beta1.ClassificationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="is_multi_label", - full_name="google.cloud.datalabeling.v1beta1.ClassificationMetadata.is_multi_label", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1301, - serialized_end=1349, -) - - -_GCSSOURCE = _descriptor.Descriptor( - name="GcsSource", - full_name="google.cloud.datalabeling.v1beta1.GcsSource", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="input_uri", - full_name="google.cloud.datalabeling.v1beta1.GcsSource.input_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="mime_type", - full_name="google.cloud.datalabeling.v1beta1.GcsSource.mime_type", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1351, - serialized_end=1400, -) - - -_BIGQUERYSOURCE = _descriptor.Descriptor( - name="BigQuerySource", - full_name="google.cloud.datalabeling.v1beta1.BigQuerySource", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="input_uri", - full_name="google.cloud.datalabeling.v1beta1.BigQuerySource.input_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1402, - serialized_end=1437, -) - - -_OUTPUTCONFIG = _descriptor.Descriptor( - name="OutputConfig", - full_name="google.cloud.datalabeling.v1beta1.OutputConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="gcs_destination", - full_name="google.cloud.datalabeling.v1beta1.OutputConfig.gcs_destination", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="gcs_folder_destination", - full_name="google.cloud.datalabeling.v1beta1.OutputConfig.gcs_folder_destination", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="destination", - full_name="google.cloud.datalabeling.v1beta1.OutputConfig.destination", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=1440, - serialized_end=1638, -) - - -_GCSDESTINATION = _descriptor.Descriptor( - name="GcsDestination", - full_name="google.cloud.datalabeling.v1beta1.GcsDestination", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="output_uri", - full_name="google.cloud.datalabeling.v1beta1.GcsDestination.output_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="mime_type", - full_name="google.cloud.datalabeling.v1beta1.GcsDestination.mime_type", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1640, - serialized_end=1695, -) - - -_GCSFOLDERDESTINATION = _descriptor.Descriptor( - name="GcsFolderDestination", - full_name="google.cloud.datalabeling.v1beta1.GcsFolderDestination", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="output_folder_uri", - full_name="google.cloud.datalabeling.v1beta1.GcsFolderDestination.output_folder_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1697, - serialized_end=1746, -) - - -_DATAITEM = _descriptor.Descriptor( - name="DataItem", - full_name="google.cloud.datalabeling.v1beta1.DataItem", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="image_payload", - full_name="google.cloud.datalabeling.v1beta1.DataItem.image_payload", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="text_payload", - full_name="google.cloud.datalabeling.v1beta1.DataItem.text_payload", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_payload", - full_name="google.cloud.datalabeling.v1beta1.DataItem.video_payload", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.DataItem.name", - index=3, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"\352Ac\n$datalabeling.googleapis.com/DataItem\022;projects/{project}/datasets/{dataset}/dataItems/{data_item}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="payload", - full_name="google.cloud.datalabeling.v1beta1.DataItem.payload", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=1749, - serialized_end=2108, -) - - -_ANNOTATEDDATASET = _descriptor.Descriptor( - name="AnnotatedDataset", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset.description", - index=2, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_source", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset.annotation_source", - index=3, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_type", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset.annotation_type", - index=4, - number=8, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="example_count", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset.example_count", - index=5, - number=4, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="completed_example_count", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset.completed_example_count", - index=6, - number=5, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="label_stats", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset.label_stats", - index=7, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset.create_time", - index=8, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset.metadata", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="blocking_resources", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDataset.blocking_resources", - index=10, - number=11, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"\352A{\n,datalabeling.googleapis.com/AnnotatedDataset\022Kprojects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2111, - serialized_end=2750, -) - - -_LABELSTATS_EXAMPLECOUNTENTRY = _descriptor.Descriptor( - name="ExampleCountEntry", - full_name="google.cloud.datalabeling.v1beta1.LabelStats.ExampleCountEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.datalabeling.v1beta1.LabelStats.ExampleCountEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.datalabeling.v1beta1.LabelStats.ExampleCountEntry.value", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2855, - serialized_end=2906, -) - -_LABELSTATS = _descriptor.Descriptor( - name="LabelStats", - full_name="google.cloud.datalabeling.v1beta1.LabelStats", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="example_count", - full_name="google.cloud.datalabeling.v1beta1.LabelStats.example_count", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_LABELSTATS_EXAMPLECOUNTENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2753, - serialized_end=2906, -) - - -_ANNOTATEDDATASETMETADATA = _descriptor.Descriptor( - name="AnnotatedDatasetMetadata", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="image_classification_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.image_classification_config", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="bounding_poly_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.bounding_poly_config", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="polyline_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.polyline_config", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="segmentation_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.segmentation_config", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_classification_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.video_classification_config", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="object_detection_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.object_detection_config", - index=5, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="object_tracking_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.object_tracking_config", - index=6, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="event_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.event_config", - index=7, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="text_classification_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.text_classification_config", - index=8, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="text_entity_extraction_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.text_entity_extraction_config", - index=9, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="human_annotation_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.human_annotation_config", - index=10, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="annotation_request_config", - full_name="google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata.annotation_request_config", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=2909, - serialized_end=3967, -) - - -_EXAMPLE = _descriptor.Descriptor( - name="Example", - full_name="google.cloud.datalabeling.v1beta1.Example", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="image_payload", - full_name="google.cloud.datalabeling.v1beta1.Example.image_payload", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="text_payload", - full_name="google.cloud.datalabeling.v1beta1.Example.text_payload", - index=1, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_payload", - full_name="google.cloud.datalabeling.v1beta1.Example.video_payload", - index=2, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.Example.name", - index=3, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotations", - full_name="google.cloud.datalabeling.v1beta1.Example.annotations", - index=4, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"\352A\205\001\n#datalabeling.googleapis.com/Example\022^projects/{project}/datasets/{dataset}/annotatedDatasets/{annotated_dataset}/examples/{example}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="payload", - full_name="google.cloud.datalabeling.v1beta1.Example.payload", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=3970, - serialized_end=4432, -) - -_DATASET.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_DATASET.fields_by_name["input_configs"].message_type = _INPUTCONFIG -_INPUTCONFIG.fields_by_name["text_metadata"].message_type = _TEXTMETADATA -_INPUTCONFIG.fields_by_name["gcs_source"].message_type = _GCSSOURCE -_INPUTCONFIG.fields_by_name["bigquery_source"].message_type = _BIGQUERYSOURCE -_INPUTCONFIG.fields_by_name["data_type"].enum_type = _DATATYPE -_INPUTCONFIG.fields_by_name[ - "annotation_type" -].enum_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__pb2._ANNOTATIONTYPE -) -_INPUTCONFIG.fields_by_name[ - "classification_metadata" -].message_type = _CLASSIFICATIONMETADATA -_INPUTCONFIG.oneofs_by_name["data_type_metadata"].fields.append( - _INPUTCONFIG.fields_by_name["text_metadata"] -) -_INPUTCONFIG.fields_by_name[ - "text_metadata" -].containing_oneof = _INPUTCONFIG.oneofs_by_name["data_type_metadata"] -_INPUTCONFIG.oneofs_by_name["source"].fields.append( - _INPUTCONFIG.fields_by_name["gcs_source"] -) -_INPUTCONFIG.fields_by_name[ - "gcs_source" -].containing_oneof = _INPUTCONFIG.oneofs_by_name["source"] -_INPUTCONFIG.oneofs_by_name["source"].fields.append( - _INPUTCONFIG.fields_by_name["bigquery_source"] -) -_INPUTCONFIG.fields_by_name[ - "bigquery_source" -].containing_oneof = _INPUTCONFIG.oneofs_by_name["source"] -_OUTPUTCONFIG.fields_by_name["gcs_destination"].message_type = _GCSDESTINATION -_OUTPUTCONFIG.fields_by_name[ - "gcs_folder_destination" -].message_type = _GCSFOLDERDESTINATION -_OUTPUTCONFIG.oneofs_by_name["destination"].fields.append( - _OUTPUTCONFIG.fields_by_name["gcs_destination"] -) -_OUTPUTCONFIG.fields_by_name[ - "gcs_destination" -].containing_oneof = _OUTPUTCONFIG.oneofs_by_name["destination"] -_OUTPUTCONFIG.oneofs_by_name["destination"].fields.append( - _OUTPUTCONFIG.fields_by_name["gcs_folder_destination"] -) -_OUTPUTCONFIG.fields_by_name[ - "gcs_folder_destination" -].containing_oneof = _OUTPUTCONFIG.oneofs_by_name["destination"] -_DATAITEM.fields_by_name[ - "image_payload" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__payloads__pb2._IMAGEPAYLOAD -) -_DATAITEM.fields_by_name[ - "text_payload" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__payloads__pb2._TEXTPAYLOAD -) -_DATAITEM.fields_by_name[ - "video_payload" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__payloads__pb2._VIDEOPAYLOAD -) -_DATAITEM.oneofs_by_name["payload"].fields.append( - _DATAITEM.fields_by_name["image_payload"] -) -_DATAITEM.fields_by_name["image_payload"].containing_oneof = _DATAITEM.oneofs_by_name[ - "payload" -] -_DATAITEM.oneofs_by_name["payload"].fields.append( - _DATAITEM.fields_by_name["text_payload"] -) -_DATAITEM.fields_by_name["text_payload"].containing_oneof = _DATAITEM.oneofs_by_name[ - "payload" -] -_DATAITEM.oneofs_by_name["payload"].fields.append( - _DATAITEM.fields_by_name["video_payload"] -) -_DATAITEM.fields_by_name["video_payload"].containing_oneof = _DATAITEM.oneofs_by_name[ - "payload" -] -_ANNOTATEDDATASET.fields_by_name[ - "annotation_source" -].enum_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__pb2._ANNOTATIONSOURCE -) -_ANNOTATEDDATASET.fields_by_name[ - "annotation_type" -].enum_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__pb2._ANNOTATIONTYPE -) -_ANNOTATEDDATASET.fields_by_name["label_stats"].message_type = _LABELSTATS -_ANNOTATEDDATASET.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ANNOTATEDDATASET.fields_by_name["metadata"].message_type = _ANNOTATEDDATASETMETADATA -_LABELSTATS_EXAMPLECOUNTENTRY.containing_type = _LABELSTATS -_LABELSTATS.fields_by_name["example_count"].message_type = _LABELSTATS_EXAMPLECOUNTENTRY -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "image_classification_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._IMAGECLASSIFICATIONCONFIG -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "bounding_poly_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._BOUNDINGPOLYCONFIG -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "polyline_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._POLYLINECONFIG -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "segmentation_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._SEGMENTATIONCONFIG -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "video_classification_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._VIDEOCLASSIFICATIONCONFIG -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "object_detection_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._OBJECTDETECTIONCONFIG -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "object_tracking_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._OBJECTTRACKINGCONFIG -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "event_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._EVENTCONFIG -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "text_classification_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._TEXTCLASSIFICATIONCONFIG -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "text_entity_extraction_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._TEXTENTITYEXTRACTIONCONFIG -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "human_annotation_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_ANNOTATEDDATASETMETADATA.oneofs_by_name["annotation_request_config"].fields.append( - _ANNOTATEDDATASETMETADATA.fields_by_name["image_classification_config"] -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "image_classification_config" -].containing_oneof = _ANNOTATEDDATASETMETADATA.oneofs_by_name[ - "annotation_request_config" -] -_ANNOTATEDDATASETMETADATA.oneofs_by_name["annotation_request_config"].fields.append( - _ANNOTATEDDATASETMETADATA.fields_by_name["bounding_poly_config"] -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "bounding_poly_config" -].containing_oneof = _ANNOTATEDDATASETMETADATA.oneofs_by_name[ - "annotation_request_config" -] -_ANNOTATEDDATASETMETADATA.oneofs_by_name["annotation_request_config"].fields.append( - _ANNOTATEDDATASETMETADATA.fields_by_name["polyline_config"] -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "polyline_config" -].containing_oneof = _ANNOTATEDDATASETMETADATA.oneofs_by_name[ - "annotation_request_config" -] -_ANNOTATEDDATASETMETADATA.oneofs_by_name["annotation_request_config"].fields.append( - _ANNOTATEDDATASETMETADATA.fields_by_name["segmentation_config"] -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "segmentation_config" -].containing_oneof = _ANNOTATEDDATASETMETADATA.oneofs_by_name[ - "annotation_request_config" -] -_ANNOTATEDDATASETMETADATA.oneofs_by_name["annotation_request_config"].fields.append( - _ANNOTATEDDATASETMETADATA.fields_by_name["video_classification_config"] -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "video_classification_config" -].containing_oneof = _ANNOTATEDDATASETMETADATA.oneofs_by_name[ - "annotation_request_config" -] -_ANNOTATEDDATASETMETADATA.oneofs_by_name["annotation_request_config"].fields.append( - _ANNOTATEDDATASETMETADATA.fields_by_name["object_detection_config"] -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "object_detection_config" -].containing_oneof = _ANNOTATEDDATASETMETADATA.oneofs_by_name[ - "annotation_request_config" -] -_ANNOTATEDDATASETMETADATA.oneofs_by_name["annotation_request_config"].fields.append( - _ANNOTATEDDATASETMETADATA.fields_by_name["object_tracking_config"] -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "object_tracking_config" -].containing_oneof = _ANNOTATEDDATASETMETADATA.oneofs_by_name[ - "annotation_request_config" -] -_ANNOTATEDDATASETMETADATA.oneofs_by_name["annotation_request_config"].fields.append( - _ANNOTATEDDATASETMETADATA.fields_by_name["event_config"] -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "event_config" -].containing_oneof = _ANNOTATEDDATASETMETADATA.oneofs_by_name[ - "annotation_request_config" -] -_ANNOTATEDDATASETMETADATA.oneofs_by_name["annotation_request_config"].fields.append( - _ANNOTATEDDATASETMETADATA.fields_by_name["text_classification_config"] -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "text_classification_config" -].containing_oneof = _ANNOTATEDDATASETMETADATA.oneofs_by_name[ - "annotation_request_config" -] -_ANNOTATEDDATASETMETADATA.oneofs_by_name["annotation_request_config"].fields.append( - _ANNOTATEDDATASETMETADATA.fields_by_name["text_entity_extraction_config"] -) -_ANNOTATEDDATASETMETADATA.fields_by_name[ - "text_entity_extraction_config" -].containing_oneof = _ANNOTATEDDATASETMETADATA.oneofs_by_name[ - "annotation_request_config" -] -_EXAMPLE.fields_by_name[ - "image_payload" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__payloads__pb2._IMAGEPAYLOAD -) -_EXAMPLE.fields_by_name[ - "text_payload" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__payloads__pb2._TEXTPAYLOAD -) -_EXAMPLE.fields_by_name[ - "video_payload" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_data__payloads__pb2._VIDEOPAYLOAD -) -_EXAMPLE.fields_by_name[ - "annotations" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__pb2._ANNOTATION -) -_EXAMPLE.oneofs_by_name["payload"].fields.append( - _EXAMPLE.fields_by_name["image_payload"] -) -_EXAMPLE.fields_by_name["image_payload"].containing_oneof = _EXAMPLE.oneofs_by_name[ - "payload" -] -_EXAMPLE.oneofs_by_name["payload"].fields.append( - _EXAMPLE.fields_by_name["text_payload"] -) -_EXAMPLE.fields_by_name["text_payload"].containing_oneof = _EXAMPLE.oneofs_by_name[ - "payload" -] -_EXAMPLE.oneofs_by_name["payload"].fields.append( - _EXAMPLE.fields_by_name["video_payload"] -) -_EXAMPLE.fields_by_name["video_payload"].containing_oneof = _EXAMPLE.oneofs_by_name[ - "payload" -] -DESCRIPTOR.message_types_by_name["Dataset"] = _DATASET -DESCRIPTOR.message_types_by_name["InputConfig"] = _INPUTCONFIG -DESCRIPTOR.message_types_by_name["TextMetadata"] = _TEXTMETADATA -DESCRIPTOR.message_types_by_name["ClassificationMetadata"] = _CLASSIFICATIONMETADATA -DESCRIPTOR.message_types_by_name["GcsSource"] = _GCSSOURCE -DESCRIPTOR.message_types_by_name["BigQuerySource"] = _BIGQUERYSOURCE -DESCRIPTOR.message_types_by_name["OutputConfig"] = _OUTPUTCONFIG -DESCRIPTOR.message_types_by_name["GcsDestination"] = _GCSDESTINATION -DESCRIPTOR.message_types_by_name["GcsFolderDestination"] = _GCSFOLDERDESTINATION -DESCRIPTOR.message_types_by_name["DataItem"] = _DATAITEM -DESCRIPTOR.message_types_by_name["AnnotatedDataset"] = _ANNOTATEDDATASET -DESCRIPTOR.message_types_by_name["LabelStats"] = _LABELSTATS -DESCRIPTOR.message_types_by_name["AnnotatedDatasetMetadata"] = _ANNOTATEDDATASETMETADATA -DESCRIPTOR.message_types_by_name["Example"] = _EXAMPLE -DESCRIPTOR.enum_types_by_name["DataType"] = _DATATYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Dataset = _reflection.GeneratedProtocolMessageType( - "Dataset", - (_message.Message,), - { - "DESCRIPTOR": _DATASET, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """Dataset is the resource to hold your data. You can request multiple - labeling tasks for a dataset while each one will generate an - AnnotatedDataset. - - Attributes: - name: - Output only. Dataset resource name, format is: - projects/{project_id}/datasets/{dataset_id} - display_name: - Required. The display name of the dataset. Maximum of 64 - characters. - description: - Optional. User-provided description of the annotation - specification set. The description can be up to 10000 - characters long. - create_time: - Output only. Time the dataset is created. - input_configs: - Output only. This is populated with the original input configs - where ImportData is called. It is available only after the - clients import data to this dataset. - blocking_resources: - Output only. The names of any related resources that are - blocking changes to the dataset. - data_item_count: - Output only. The number of data items in the dataset. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.Dataset) - }, -) -_sym_db.RegisterMessage(Dataset) - -InputConfig = _reflection.GeneratedProtocolMessageType( - "InputConfig", - (_message.Message,), - { - "DESCRIPTOR": _INPUTCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """The configuration of input data, including data type, location, etc. - - Attributes: - data_type_metadata: - Optional. The metadata associated with each data type. - text_metadata: - Required for text import, as language code must be specified. - source: - Required. Where the data is from. - gcs_source: - Source located in Cloud Storage. - bigquery_source: - Source located in BigQuery. You must specify this field if you - are using this InputConfig in an [EvaluationJob][google.cloud. - datalabeling.v1beta1.EvaluationJob]. - data_type: - Required. Data type must be specifed when user tries to import - data. - annotation_type: - Optional. The type of annotation to be performed on this data. - You must specify this field if you are using this InputConfig - in an [EvaluationJob][google.cloud.datalabeling.v1beta1.Evalua - tionJob]. - classification_metadata: - Optional. Metadata about annotations for the input. You must - specify this field if you are using this InputConfig in an [Ev - aluationJob][google.cloud.datalabeling.v1beta1.EvaluationJob] - for a model version that performs classification. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.InputConfig) - }, -) -_sym_db.RegisterMessage(InputConfig) - -TextMetadata = _reflection.GeneratedProtocolMessageType( - "TextMetadata", - (_message.Message,), - { - "DESCRIPTOR": _TEXTMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """Metadata for the text. - - Attributes: - language_code: - The language of this text, as a `BCP-47 `__. Default value is en-US. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.TextMetadata) - }, -) -_sym_db.RegisterMessage(TextMetadata) - -ClassificationMetadata = _reflection.GeneratedProtocolMessageType( - "ClassificationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _CLASSIFICATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """Metadata for classification annotations. - - Attributes: - is_multi_label: - Whether the classification task is multi-label or not. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ClassificationMetadata) - }, -) -_sym_db.RegisterMessage(ClassificationMetadata) - -GcsSource = _reflection.GeneratedProtocolMessageType( - "GcsSource", - (_message.Message,), - { - "DESCRIPTOR": _GCSSOURCE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """Source of the Cloud Storage file to be imported. - - Attributes: - input_uri: - Required. The input URI of source file. This must be a Cloud - Storage path (``gs://...``). - mime_type: - Required. The format of the source file. Only “text/csv” is - supported. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.GcsSource) - }, -) -_sym_db.RegisterMessage(GcsSource) - -BigQuerySource = _reflection.GeneratedProtocolMessageType( - "BigQuerySource", - (_message.Message,), - { - "DESCRIPTOR": _BIGQUERYSOURCE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """The BigQuery location for input data. If used in an - [EvaluationJob][google.cloud.datalabeling.v1beta1.EvaluationJob], this - is where the service saves the prediction input and output sampled - from the model version. - - Attributes: - input_uri: - Required. BigQuery URI to a table, up to 2,000 characters - long. If you specify the URI of a table that does not exist, - Data Labeling Service creates a table at the URI with the - correct schema when you create your [EvaluationJob][google.clo - ud.datalabeling.v1beta1.EvaluationJob]. If you specify the URI - of a table that already exists, it must have the `correct - schema `__. Provide the table URI in the following - format: - “bq://{your_project_id}/{your_dataset_name}/{your_table_name}” - `Learn more `__. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.BigQuerySource) - }, -) -_sym_db.RegisterMessage(BigQuerySource) - -OutputConfig = _reflection.GeneratedProtocolMessageType( - "OutputConfig", - (_message.Message,), - { - "DESCRIPTOR": _OUTPUTCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """The configuration of output data. - - Attributes: - destination: - Required. Location to output data to. - gcs_destination: - Output to a file in Cloud Storage. Should be used for labeling - output other than image segmentation. - gcs_folder_destination: - Output to a folder in Cloud Storage. Should be used for image - segmentation labeling output. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.OutputConfig) - }, -) -_sym_db.RegisterMessage(OutputConfig) - -GcsDestination = _reflection.GeneratedProtocolMessageType( - "GcsDestination", - (_message.Message,), - { - "DESCRIPTOR": _GCSDESTINATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """Export destination of the data.Only gcs path is allowed in output_uri. - - Attributes: - output_uri: - Required. The output uri of destination file. - mime_type: - Required. The format of the gcs destination. Only “text/csv” - and “application/json” are supported. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.GcsDestination) - }, -) -_sym_db.RegisterMessage(GcsDestination) - -GcsFolderDestination = _reflection.GeneratedProtocolMessageType( - "GcsFolderDestination", - (_message.Message,), - { - "DESCRIPTOR": _GCSFOLDERDESTINATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """Export folder destination of the data. - - Attributes: - output_folder_uri: - Required. Cloud Storage directory to export data to. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.GcsFolderDestination) - }, -) -_sym_db.RegisterMessage(GcsFolderDestination) - -DataItem = _reflection.GeneratedProtocolMessageType( - "DataItem", - (_message.Message,), - { - "DESCRIPTOR": _DATAITEM, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """DataItem is a piece of data, without annotation. For example, an - image. - - Attributes: - payload: - Output only. - image_payload: - The image payload, a container of the image bytes/uri. - text_payload: - The text payload, a container of text content. - video_payload: - The video payload, a container of the video uri. - name: - Output only. Name of the data item, in format of: projects/{pr - oject_id}/datasets/{dataset_id}/dataItems/{data_item_id} - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.DataItem) - }, -) -_sym_db.RegisterMessage(DataItem) - -AnnotatedDataset = _reflection.GeneratedProtocolMessageType( - "AnnotatedDataset", - (_message.Message,), - { - "DESCRIPTOR": _ANNOTATEDDATASET, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """AnnotatedDataset is a set holding annotations for data in a Dataset. - Each labeling task will generate an AnnotatedDataset under the Dataset - that the task is requested for. - - Attributes: - name: - Output only. AnnotatedDataset resource name in format of: - projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ - {annotated_dataset_id} - display_name: - Output only. The display name of the AnnotatedDataset. It is - specified in HumanAnnotationConfig when user starts a labeling - task. Maximum of 64 characters. - description: - Output only. The description of the AnnotatedDataset. It is - specified in HumanAnnotationConfig when user starts a labeling - task. Maximum of 10000 characters. - annotation_source: - Output only. Source of the annotation. - annotation_type: - Output only. Type of the annotation. It is specified when - starting labeling task. - example_count: - Output only. Number of examples in the annotated dataset. - completed_example_count: - Output only. Number of examples that have annotation in the - annotated dataset. - label_stats: - Output only. Per label statistics. - create_time: - Output only. Time the AnnotatedDataset was created. - metadata: - Output only. Additional information about AnnotatedDataset. - blocking_resources: - Output only. The names of any related resources that are - blocking changes to the annotated dataset. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.AnnotatedDataset) - }, -) -_sym_db.RegisterMessage(AnnotatedDataset) - -LabelStats = _reflection.GeneratedProtocolMessageType( - "LabelStats", - (_message.Message,), - { - "ExampleCountEntry": _reflection.GeneratedProtocolMessageType( - "ExampleCountEntry", - (_message.Message,), - { - "DESCRIPTOR": _LABELSTATS_EXAMPLECOUNTENTRY, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelStats.ExampleCountEntry) - }, - ), - "DESCRIPTOR": _LABELSTATS, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """Statistics about annotation specs. - - Attributes: - example_count: - Map of each annotation spec’s example count. Key is the - annotation spec name and value is the number of examples for - that annotation spec. If the annotated dataset does not have - annotation spec, the map will return a pair where the key is - empty string and value is the total number of annotations. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelStats) - }, -) -_sym_db.RegisterMessage(LabelStats) -_sym_db.RegisterMessage(LabelStats.ExampleCountEntry) - -AnnotatedDatasetMetadata = _reflection.GeneratedProtocolMessageType( - "AnnotatedDatasetMetadata", - (_message.Message,), - { - "DESCRIPTOR": _ANNOTATEDDATASETMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """Metadata on AnnotatedDataset. - - Attributes: - annotation_request_config: - Specific request configuration used when requesting the - labeling task. - image_classification_config: - Configuration for image classification task. - bounding_poly_config: - Configuration for image bounding box and bounding poly task. - polyline_config: - Configuration for image polyline task. - segmentation_config: - Configuration for image segmentation task. - video_classification_config: - Configuration for video classification task. - object_detection_config: - Configuration for video object detection task. - object_tracking_config: - Configuration for video object tracking task. - event_config: - Configuration for video event labeling task. - text_classification_config: - Configuration for text classification task. - text_entity_extraction_config: - Configuration for text entity extraction task. - human_annotation_config: - HumanAnnotationConfig used when requesting the human labeling - task for this AnnotatedDataset. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata) - }, -) -_sym_db.RegisterMessage(AnnotatedDatasetMetadata) - -Example = _reflection.GeneratedProtocolMessageType( - "Example", - (_message.Message,), - { - "DESCRIPTOR": _EXAMPLE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.dataset_pb2", - "__doc__": """An Example is a piece of data and its annotation. For example, an - image with label “house”. - - Attributes: - payload: - Output only. The data part of Example. - image_payload: - The image payload, a container of the image bytes/uri. - text_payload: - The text payload, a container of the text content. - video_payload: - The video payload, a container of the video uri. - name: - Output only. Name of the example, in format of: - projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ - {annotated_dataset_id}/examples/{example_id} - annotations: - Output only. Annotations for the piece of data in Example. One - piece of data can have multiple annotations. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.Example) - }, -) -_sym_db.RegisterMessage(Example) - - -DESCRIPTOR._options = None -_DATASET._options = None -_DATAITEM._options = None -_ANNOTATEDDATASET._options = None -_LABELSTATS_EXAMPLECOUNTENTRY._options = None -_EXAMPLE._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datalabeling_v1beta1/proto/dataset_pb2_grpc.py b/google/cloud/datalabeling_v1beta1/proto/dataset_pb2_grpc.py deleted file mode 100644 index 8a93939..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/dataset_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/datalabeling_v1beta1/proto/evaluation_job_pb2.py b/google/cloud/datalabeling_v1beta1/proto/evaluation_job_pb2.py deleted file mode 100644 index 654cbb9..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/evaluation_job_pb2.py +++ /dev/null @@ -1,1036 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datalabeling_v1beta1/proto/evaluation_job.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.datalabeling_v1beta1.proto import ( - dataset_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - evaluation_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_evaluation__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - human_annotation_config_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2, -) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datalabeling_v1beta1/proto/evaluation_job.proto", - package="google.cloud.datalabeling.v1beta1", - syntax="proto3", - serialized_options=b"\n%com.google.cloud.datalabeling.v1beta1P\001ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabeling", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n`__ is the starting point for using continuous evaluation. - - Attributes: - name: - Output only. After you create a job, Data Labeling Service - assigns a name to the job with the following format: - “projects/{project_id}/evaluationJobs/{evaluation_job_id}” - description: - Required. Description of the job. The description can be up to - 25,000 characters long. - state: - Output only. Describes the current state of the job. - schedule: - Required. Describes the interval at which the job runs. This - interval must be at least 1 day, and it is rounded to the - nearest day. For example, if you specify a 50-hour interval, - the job runs every 2 days. You can provide the schedule in - `crontab format `__ or in an `English-like format `__. Regardless - of what you specify, the job will run at 10:00 AM UTC. Only - the interval from this schedule is used, not the specific time - of day. - model_version: - Required. The `AI Platform Prediction model version `__ to be evaluated. - Prediction input and output is sampled from this model - version. When creating an evaluation job, specify the model - version in the following format: “projects/{project_id}/model - s/{model_name}/versions/{version_name}” There can only be one - evaluation job per model version. - evaluation_job_config: - Required. Configuration details for the evaluation job. - annotation_spec_set: - Required. Name of the [AnnotationSpecSet][google.cloud.datalab - eling.v1beta1.AnnotationSpecSet] describing all the labels - that your machine learning model outputs. You must create this - resource before you create an evaluation job and provide its - name in the following format: “projects/{project_id}/annotati - onSpecSets/{annotation_spec_set_id}” - label_missing_ground_truth: - Required. Whether you want Data Labeling Service to provide - ground truth labels for prediction input. If you want the - service to assign human labelers to annotate your data, set - this to ``true``. If you want to provide your own ground truth - labels in the evaluation job’s BigQuery table, set this to - ``false``. - attempts: - Output only. Every time the evaluation job runs and an error - occurs, the failed attempt is appended to this array. - create_time: - Output only. Timestamp of when this evaluation job was - created. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.EvaluationJob) - }, -) -_sym_db.RegisterMessage(EvaluationJob) - -EvaluationJobConfig = _reflection.GeneratedProtocolMessageType( - "EvaluationJobConfig", - (_message.Message,), - { - "BigqueryImportKeysEntry": _reflection.GeneratedProtocolMessageType( - "BigqueryImportKeysEntry", - (_message.Message,), - { - "DESCRIPTOR": _EVALUATIONJOBCONFIG_BIGQUERYIMPORTKEYSENTRY, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_job_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.EvaluationJobConfig.BigqueryImportKeysEntry) - }, - ), - "DESCRIPTOR": _EVALUATIONJOBCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_job_pb2", - "__doc__": """Configures specific details of how a continuous evaluation job works. - Provide this configuration when you create an EvaluationJob. - - Attributes: - human_annotation_request_config: - Required. Details for how you want human reviewers to provide - ground truth labels. - image_classification_config: - Specify this field if your model version performs image - classification or general classification. - ``annotationSpecSet`` in this configuration must match [Evalua - tionJob.annotationSpecSet][google.cloud.datalabeling.v1beta1.E - valuationJob.annotation_spec_set]. ``allowMultiLabel`` in this - configuration must match - ``classificationMetadata.isMultiLabel`` in [input_config][goog - le.cloud.datalabeling.v1beta1.EvaluationJobConfig.input_config - ]. - bounding_poly_config: - Specify this field if your model version performs image object - detection (bounding box detection). ``annotationSpecSet`` in - this configuration must match [EvaluationJob.annotationSpecSet - ][google.cloud.datalabeling.v1beta1.EvaluationJob.annotation_s - pec_set]. - text_classification_config: - Specify this field if your model version performs text - classification. ``annotationSpecSet`` in this configuration - must match [EvaluationJob.annotationSpecSet][google.cloud.data - labeling.v1beta1.EvaluationJob.annotation_spec_set]. - ``allowMultiLabel`` in this configuration must match - ``classificationMetadata.isMultiLabel`` in [input_config][goog - le.cloud.datalabeling.v1beta1.EvaluationJobConfig.input_config - ]. - input_config: - Rquired. Details for the sampled prediction input. Within this - configuration, there are requirements for several fields: - - ``dataType`` must be one of ``IMAGE``, ``TEXT``, or - ``GENERAL_DATA``. - ``annotationType`` must be one of - ``IMAGE_CLASSIFICATION_ANNOTATION``, - ``TEXT_CLASSIFICATION_ANNOTATION``, - ``GENERAL_CLASSIFICATION_ANNOTATION``, or - ``IMAGE_BOUNDING_BOX_ANNOTATION`` (image object detection). - - If your machine learning model performs classification, you - must specify ``classificationMetadata.isMultiLabel``. - - You must specify ``bigquerySource`` (not ``gcsSource``). - evaluation_config: - Required. Details for calculating evaluation metrics and - creating - [Evaulations][google.cloud.datalabeling.v1beta1.Evaluation]. - If your model version performs image object detection, you - must specify the ``boundingBoxEvaluationOptions`` field within - this configuration. Otherwise, provide an empty object for - this configuration. - human_annotation_config: - Optional. Details for human annotation of your data. If you - set [labelMissingGroundTruth][google.cloud.datalabeling.v1beta - 1.EvaluationJob.label_missing_ground_truth] to ``true`` for - this evaluation job, then you must specify this field. If you - plan to provide your own ground truth labels, then omit this - field. Note that you must create an - [Instruction][google.cloud.datalabeling.v1beta1.Instruction] - resource before you can specify this field. Provide the name - of the instruction resource in the ``instruction`` field - within this configuration. - bigquery_import_keys: - Required. Prediction keys that tell Data Labeling Service - where to find the data for evaluation in your BigQuery table. - When the service samples prediction input and output from your - model version and saves it to BigQuery, the data gets stored - as JSON strings in the BigQuery table. These keys tell Data - Labeling Service how to parse the JSON. You can provide the - following entries in this field: - ``data_json_key``: the - data key for prediction input. You must provide either this - key or ``reference_json_key``. - ``reference_json_key``: the - data reference key for prediction input. You must provide - either this key or ``data_json_key``. - ``label_json_key``: - the label key for prediction output. Required. - - ``label_score_json_key``: the score key for prediction output. - Required. - ``bounding_box_json_key``: the bounding box key - for prediction output. Required if your model version - perform image object detection. Learn `how to configure - prediction keys `__. - example_count: - Required. The maximum number of predictions to sample and save - to BigQuery during each [evaluation interval][google.cloud.dat - alabeling.v1beta1.EvaluationJob.schedule]. This limit - overrides ``example_sample_percentage``: even if the service - has not sampled enough predictions to fulfill - ``example_sample_perecentage`` during an interval, it stops - sampling predictions when it meets this limit. - example_sample_percentage: - Required. Fraction of predictions to sample and save to - BigQuery during each [evaluation interval][google.cloud.datala - beling.v1beta1.EvaluationJob.schedule]. For example, 0.1 means - 10% of predictions served by your model version get saved to - BigQuery. - evaluation_job_alert_config: - Optional. Configuration details for evaluation job alerts. - Specify this field if you want to receive email alerts if the - evaluation job finds that your predictions have low mean - average precision during a run. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.EvaluationJobConfig) - }, -) -_sym_db.RegisterMessage(EvaluationJobConfig) -_sym_db.RegisterMessage(EvaluationJobConfig.BigqueryImportKeysEntry) - -EvaluationJobAlertConfig = _reflection.GeneratedProtocolMessageType( - "EvaluationJobAlertConfig", - (_message.Message,), - { - "DESCRIPTOR": _EVALUATIONJOBALERTCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_job_pb2", - "__doc__": """Provides details for how an evaluation job sends email alerts based on - the results of a run. - - Attributes: - email: - Required. An email address to send alerts to. - min_acceptable_mean_average_precision: - Required. A number between 0 and 1 that describes a minimum - mean average precision threshold. When the evaluation job - runs, if it calculates that your model version’s predictions - from the recent interval have [meanAveragePrecision][google.cl - oud.datalabeling.v1beta1.PrCurve.mean_average_precision] below - this threshold, then it sends an alert to your specified - email. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.EvaluationJobAlertConfig) - }, -) -_sym_db.RegisterMessage(EvaluationJobAlertConfig) - -Attempt = _reflection.GeneratedProtocolMessageType( - "Attempt", - (_message.Message,), - { - "DESCRIPTOR": _ATTEMPT, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_job_pb2", - "__doc__": """Records a failed evaluation job run. - - Attributes: - partial_failures: - Details of errors that occurred. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.Attempt) - }, -) -_sym_db.RegisterMessage(Attempt) - - -DESCRIPTOR._options = None -_EVALUATIONJOB._options = None -_EVALUATIONJOBCONFIG_BIGQUERYIMPORTKEYSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datalabeling_v1beta1/proto/evaluation_job_pb2_grpc.py b/google/cloud/datalabeling_v1beta1/proto/evaluation_job_pb2_grpc.py deleted file mode 100644 index 8a93939..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/evaluation_job_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/datalabeling_v1beta1/proto/evaluation_pb2.py b/google/cloud/datalabeling_v1beta1/proto/evaluation_pb2.py deleted file mode 100644 index 87a809c..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/evaluation_pb2.py +++ /dev/null @@ -1,1280 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datalabeling_v1beta1/proto/evaluation.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.datalabeling_v1beta1.proto import ( - annotation_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - annotation_spec_set_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2, -) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datalabeling_v1beta1/proto/evaluation.proto", - package="google.cloud.datalabeling.v1beta1", - syntax="proto3", - serialized_options=b"\n%com.google.cloud.datalabeling.v1beta1P\001ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabeling", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n8google/cloud/datalabeling_v1beta1/proto/evaluation.proto\x12!google.cloud.datalabeling.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x38google/cloud/datalabeling_v1beta1/proto/annotation.proto\x1a\x41google/cloud/datalabeling_v1beta1/proto/annotation_spec_set.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xf6\x03\n\nEvaluation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x43\n\x06\x63onfig\x18\x02 \x01(\x0b\x32\x33.google.cloud.datalabeling.v1beta1.EvaluationConfig\x12;\n\x17\x65valuation_job_run_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12P\n\x12\x65valuation_metrics\x18\x05 \x01(\x0b\x32\x34.google.cloud.datalabeling.v1beta1.EvaluationMetrics\x12J\n\x0f\x61nnotation_type\x18\x06 \x01(\x0e\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationType\x12\x1c\n\x14\x65valuated_item_count\x18\x07 \x01(\x03:k\xea\x41h\n&datalabeling.googleapis.com/Evaluation\x12>projects/{project}/datasets/{dataset}/evaluations/{evaluation}"\x91\x01\n\x10\x45valuationConfig\x12j\n\x1f\x62ounding_box_evaluation_options\x18\x01 \x01(\x0b\x32?.google.cloud.datalabeling.v1beta1.BoundingBoxEvaluationOptionsH\x00\x42\x11\n\x0fvertical_option"5\n\x1c\x42oundingBoxEvaluationOptions\x12\x15\n\riou_threshold\x18\x01 \x01(\x02"\xd9\x01\n\x11\x45valuationMetrics\x12Z\n\x16\x63lassification_metrics\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.ClassificationMetricsH\x00\x12]\n\x18object_detection_metrics\x18\x02 \x01(\x0b\x32\x39.google.cloud.datalabeling.v1beta1.ObjectDetectionMetricsH\x00\x42\t\n\x07metrics"\xa3\x01\n\x15\x43lassificationMetrics\x12<\n\x08pr_curve\x18\x01 \x01(\x0b\x32*.google.cloud.datalabeling.v1beta1.PrCurve\x12L\n\x10\x63onfusion_matrix\x18\x02 \x01(\x0b\x32\x32.google.cloud.datalabeling.v1beta1.ConfusionMatrix"V\n\x16ObjectDetectionMetrics\x12<\n\x08pr_curve\x18\x01 \x01(\x0b\x32*.google.cloud.datalabeling.v1beta1.PrCurve"\xe6\x03\n\x07PrCurve\x12J\n\x0f\x61nnotation_spec\x18\x01 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpec\x12\x18\n\x10\x61rea_under_curve\x18\x02 \x01(\x02\x12\x65\n\x1a\x63onfidence_metrics_entries\x18\x03 \x03(\x0b\x32\x41.google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry\x12\x1e\n\x16mean_average_precision\x18\x04 \x01(\x02\x1a\xed\x01\n\x16\x43onfidenceMetricsEntry\x12\x1c\n\x14\x63onfidence_threshold\x18\x01 \x01(\x02\x12\x0e\n\x06recall\x18\x02 \x01(\x02\x12\x11\n\tprecision\x18\x03 \x01(\x02\x12\x10\n\x08\x66\x31_score\x18\x04 \x01(\x02\x12\x12\n\nrecall_at1\x18\x05 \x01(\x02\x12\x15\n\rprecision_at1\x18\x06 \x01(\x02\x12\x14\n\x0c\x66\x31_score_at1\x18\x07 \x01(\x02\x12\x12\n\nrecall_at5\x18\x08 \x01(\x02\x12\x15\n\rprecision_at5\x18\t \x01(\x02\x12\x14\n\x0c\x66\x31_score_at5\x18\n \x01(\x02"\xfc\x02\n\x0f\x43onfusionMatrix\x12\x43\n\x03row\x18\x01 \x03(\x0b\x32\x36.google.cloud.datalabeling.v1beta1.ConfusionMatrix.Row\x1av\n\x14\x43onfusionMatrixEntry\x12J\n\x0f\x61nnotation_spec\x18\x01 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpec\x12\x12\n\nitem_count\x18\x02 \x01(\x05\x1a\xab\x01\n\x03Row\x12J\n\x0f\x61nnotation_spec\x18\x01 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.AnnotationSpec\x12X\n\x07\x65ntries\x18\x02 \x03(\x0b\x32G.google.cloud.datalabeling.v1beta1.ConfusionMatrix.ConfusionMatrixEntryBx\n%com.google.cloud.datalabeling.v1beta1P\x01ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabelingb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_EVALUATION = _descriptor.Descriptor( - name="Evaluation", - full_name="google.cloud.datalabeling.v1beta1.Evaluation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.Evaluation.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="config", - full_name="google.cloud.datalabeling.v1beta1.Evaluation.config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="evaluation_job_run_time", - full_name="google.cloud.datalabeling.v1beta1.Evaluation.evaluation_job_run_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.datalabeling.v1beta1.Evaluation.create_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="evaluation_metrics", - full_name="google.cloud.datalabeling.v1beta1.Evaluation.evaluation_metrics", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_type", - full_name="google.cloud.datalabeling.v1beta1.Evaluation.annotation_type", - index=5, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="evaluated_item_count", - full_name="google.cloud.datalabeling.v1beta1.Evaluation.evaluated_item_count", - index=6, - number=7, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"\352Ah\n&datalabeling.googleapis.com/Evaluation\022>projects/{project}/datasets/{dataset}/evaluations/{evaluation}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=311, - serialized_end=813, -) - - -_EVALUATIONCONFIG = _descriptor.Descriptor( - name="EvaluationConfig", - full_name="google.cloud.datalabeling.v1beta1.EvaluationConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="bounding_box_evaluation_options", - full_name="google.cloud.datalabeling.v1beta1.EvaluationConfig.bounding_box_evaluation_options", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="vertical_option", - full_name="google.cloud.datalabeling.v1beta1.EvaluationConfig.vertical_option", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=816, - serialized_end=961, -) - - -_BOUNDINGBOXEVALUATIONOPTIONS = _descriptor.Descriptor( - name="BoundingBoxEvaluationOptions", - full_name="google.cloud.datalabeling.v1beta1.BoundingBoxEvaluationOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="iou_threshold", - full_name="google.cloud.datalabeling.v1beta1.BoundingBoxEvaluationOptions.iou_threshold", - index=0, - number=1, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=963, - serialized_end=1016, -) - - -_EVALUATIONMETRICS = _descriptor.Descriptor( - name="EvaluationMetrics", - full_name="google.cloud.datalabeling.v1beta1.EvaluationMetrics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="classification_metrics", - full_name="google.cloud.datalabeling.v1beta1.EvaluationMetrics.classification_metrics", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="object_detection_metrics", - full_name="google.cloud.datalabeling.v1beta1.EvaluationMetrics.object_detection_metrics", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="metrics", - full_name="google.cloud.datalabeling.v1beta1.EvaluationMetrics.metrics", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=1019, - serialized_end=1236, -) - - -_CLASSIFICATIONMETRICS = _descriptor.Descriptor( - name="ClassificationMetrics", - full_name="google.cloud.datalabeling.v1beta1.ClassificationMetrics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="pr_curve", - full_name="google.cloud.datalabeling.v1beta1.ClassificationMetrics.pr_curve", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="confusion_matrix", - full_name="google.cloud.datalabeling.v1beta1.ClassificationMetrics.confusion_matrix", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1239, - serialized_end=1402, -) - - -_OBJECTDETECTIONMETRICS = _descriptor.Descriptor( - name="ObjectDetectionMetrics", - full_name="google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="pr_curve", - full_name="google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics.pr_curve", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1404, - serialized_end=1490, -) - - -_PRCURVE_CONFIDENCEMETRICSENTRY = _descriptor.Descriptor( - name="ConfidenceMetricsEntry", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="confidence_threshold", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.confidence_threshold", - index=0, - number=1, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="recall", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.recall", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="precision", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.precision", - index=2, - number=3, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="f1_score", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.f1_score", - index=3, - number=4, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="recall_at1", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.recall_at1", - index=4, - number=5, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="precision_at1", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.precision_at1", - index=5, - number=6, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="f1_score_at1", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.f1_score_at1", - index=6, - number=7, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="recall_at5", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.recall_at5", - index=7, - number=8, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="precision_at5", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.precision_at5", - index=8, - number=9, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="f1_score_at5", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.f1_score_at5", - index=9, - number=10, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1742, - serialized_end=1979, -) - -_PRCURVE = _descriptor.Descriptor( - name="PrCurve", - full_name="google.cloud.datalabeling.v1beta1.PrCurve", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.annotation_spec", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="area_under_curve", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.area_under_curve", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="confidence_metrics_entries", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.confidence_metrics_entries", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="mean_average_precision", - full_name="google.cloud.datalabeling.v1beta1.PrCurve.mean_average_precision", - index=3, - number=4, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_PRCURVE_CONFIDENCEMETRICSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1493, - serialized_end=1979, -) - - -_CONFUSIONMATRIX_CONFUSIONMATRIXENTRY = _descriptor.Descriptor( - name="ConfusionMatrixEntry", - full_name="google.cloud.datalabeling.v1beta1.ConfusionMatrix.ConfusionMatrixEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec", - full_name="google.cloud.datalabeling.v1beta1.ConfusionMatrix.ConfusionMatrixEntry.annotation_spec", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="item_count", - full_name="google.cloud.datalabeling.v1beta1.ConfusionMatrix.ConfusionMatrixEntry.item_count", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2070, - serialized_end=2188, -) - -_CONFUSIONMATRIX_ROW = _descriptor.Descriptor( - name="Row", - full_name="google.cloud.datalabeling.v1beta1.ConfusionMatrix.Row", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec", - full_name="google.cloud.datalabeling.v1beta1.ConfusionMatrix.Row.annotation_spec", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="entries", - full_name="google.cloud.datalabeling.v1beta1.ConfusionMatrix.Row.entries", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2191, - serialized_end=2362, -) - -_CONFUSIONMATRIX = _descriptor.Descriptor( - name="ConfusionMatrix", - full_name="google.cloud.datalabeling.v1beta1.ConfusionMatrix", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="row", - full_name="google.cloud.datalabeling.v1beta1.ConfusionMatrix.row", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_CONFUSIONMATRIX_CONFUSIONMATRIXENTRY, _CONFUSIONMATRIX_ROW,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1982, - serialized_end=2362, -) - -_EVALUATION.fields_by_name["config"].message_type = _EVALUATIONCONFIG -_EVALUATION.fields_by_name[ - "evaluation_job_run_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_EVALUATION.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_EVALUATION.fields_by_name["evaluation_metrics"].message_type = _EVALUATIONMETRICS -_EVALUATION.fields_by_name[ - "annotation_type" -].enum_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__pb2._ANNOTATIONTYPE -) -_EVALUATIONCONFIG.fields_by_name[ - "bounding_box_evaluation_options" -].message_type = _BOUNDINGBOXEVALUATIONOPTIONS -_EVALUATIONCONFIG.oneofs_by_name["vertical_option"].fields.append( - _EVALUATIONCONFIG.fields_by_name["bounding_box_evaluation_options"] -) -_EVALUATIONCONFIG.fields_by_name[ - "bounding_box_evaluation_options" -].containing_oneof = _EVALUATIONCONFIG.oneofs_by_name["vertical_option"] -_EVALUATIONMETRICS.fields_by_name[ - "classification_metrics" -].message_type = _CLASSIFICATIONMETRICS -_EVALUATIONMETRICS.fields_by_name[ - "object_detection_metrics" -].message_type = _OBJECTDETECTIONMETRICS -_EVALUATIONMETRICS.oneofs_by_name["metrics"].fields.append( - _EVALUATIONMETRICS.fields_by_name["classification_metrics"] -) -_EVALUATIONMETRICS.fields_by_name[ - "classification_metrics" -].containing_oneof = _EVALUATIONMETRICS.oneofs_by_name["metrics"] -_EVALUATIONMETRICS.oneofs_by_name["metrics"].fields.append( - _EVALUATIONMETRICS.fields_by_name["object_detection_metrics"] -) -_EVALUATIONMETRICS.fields_by_name[ - "object_detection_metrics" -].containing_oneof = _EVALUATIONMETRICS.oneofs_by_name["metrics"] -_CLASSIFICATIONMETRICS.fields_by_name["pr_curve"].message_type = _PRCURVE -_CLASSIFICATIONMETRICS.fields_by_name[ - "confusion_matrix" -].message_type = _CONFUSIONMATRIX -_OBJECTDETECTIONMETRICS.fields_by_name["pr_curve"].message_type = _PRCURVE -_PRCURVE_CONFIDENCEMETRICSENTRY.containing_type = _PRCURVE -_PRCURVE.fields_by_name[ - "annotation_spec" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_PRCURVE.fields_by_name[ - "confidence_metrics_entries" -].message_type = _PRCURVE_CONFIDENCEMETRICSENTRY -_CONFUSIONMATRIX_CONFUSIONMATRIXENTRY.fields_by_name[ - "annotation_spec" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_CONFUSIONMATRIX_CONFUSIONMATRIXENTRY.containing_type = _CONFUSIONMATRIX -_CONFUSIONMATRIX_ROW.fields_by_name[ - "annotation_spec" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_annotation__spec__set__pb2._ANNOTATIONSPEC -) -_CONFUSIONMATRIX_ROW.fields_by_name[ - "entries" -].message_type = _CONFUSIONMATRIX_CONFUSIONMATRIXENTRY -_CONFUSIONMATRIX_ROW.containing_type = _CONFUSIONMATRIX -_CONFUSIONMATRIX.fields_by_name["row"].message_type = _CONFUSIONMATRIX_ROW -DESCRIPTOR.message_types_by_name["Evaluation"] = _EVALUATION -DESCRIPTOR.message_types_by_name["EvaluationConfig"] = _EVALUATIONCONFIG -DESCRIPTOR.message_types_by_name[ - "BoundingBoxEvaluationOptions" -] = _BOUNDINGBOXEVALUATIONOPTIONS -DESCRIPTOR.message_types_by_name["EvaluationMetrics"] = _EVALUATIONMETRICS -DESCRIPTOR.message_types_by_name["ClassificationMetrics"] = _CLASSIFICATIONMETRICS -DESCRIPTOR.message_types_by_name["ObjectDetectionMetrics"] = _OBJECTDETECTIONMETRICS -DESCRIPTOR.message_types_by_name["PrCurve"] = _PRCURVE -DESCRIPTOR.message_types_by_name["ConfusionMatrix"] = _CONFUSIONMATRIX -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Evaluation = _reflection.GeneratedProtocolMessageType( - "Evaluation", - (_message.Message,), - { - "DESCRIPTOR": _EVALUATION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_pb2", - "__doc__": """Describes an evaluation between a machine learning model’s predictions - and ground truth labels. Created when an - [EvaluationJob][google.cloud.datalabeling.v1beta1.EvaluationJob] runs - successfully. - - Attributes: - name: - Output only. Resource name of an evaluation. The name has the - following format: "projects/{project_id}/datasets/{dataset_id - }/evaluations/{evaluation_id}’ - config: - Output only. Options used in the evaluation job that created - this evaluation. - evaluation_job_run_time: - Output only. Timestamp for when the evaluation job that - created this evaluation ran. - create_time: - Output only. Timestamp for when this evaluation was created. - evaluation_metrics: - Output only. Metrics comparing predictions to ground truth - labels. - annotation_type: - Output only. Type of task that the model version being - evaluated performs, as defined in the [evaluationJobConfig.in - putConfig.annotationType][google.cloud.datalabeling.v1beta1.Ev - aluationJobConfig.input_config] field of the evaluation job - that created this evaluation. - evaluated_item_count: - Output only. The number of items in the ground truth dataset - that were used for this evaluation. Only populated when the - evaulation is for certain AnnotationTypes. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.Evaluation) - }, -) -_sym_db.RegisterMessage(Evaluation) - -EvaluationConfig = _reflection.GeneratedProtocolMessageType( - "EvaluationConfig", - (_message.Message,), - { - "DESCRIPTOR": _EVALUATIONCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_pb2", - "__doc__": """Configuration details used for calculating evaluation metrics and - creating an - [Evaluation][google.cloud.datalabeling.v1beta1.Evaluation]. - - Attributes: - vertical_option: - Vertical specific options for general metrics. - bounding_box_evaluation_options: - Only specify this field if the related model performs image - object detection (``IMAGE_BOUNDING_BOX_ANNOTATION``). - Describes how to evaluate bounding boxes. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.EvaluationConfig) - }, -) -_sym_db.RegisterMessage(EvaluationConfig) - -BoundingBoxEvaluationOptions = _reflection.GeneratedProtocolMessageType( - "BoundingBoxEvaluationOptions", - (_message.Message,), - { - "DESCRIPTOR": _BOUNDINGBOXEVALUATIONOPTIONS, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_pb2", - "__doc__": """Options regarding evaluation between bounding boxes. - - Attributes: - iou_threshold: - Minimum [intersection-over-union - (IOU)](/vision/automl/object- - detection/docs/evaluate#intersection-over-union) required for - 2 bounding boxes to be considered a match. This must be a - number between 0 and 1. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.BoundingBoxEvaluationOptions) - }, -) -_sym_db.RegisterMessage(BoundingBoxEvaluationOptions) - -EvaluationMetrics = _reflection.GeneratedProtocolMessageType( - "EvaluationMetrics", - (_message.Message,), - { - "DESCRIPTOR": _EVALUATIONMETRICS, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_pb2", - "__doc__": """ - Attributes: - metrics: - Common metrics covering most general cases. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.EvaluationMetrics) - }, -) -_sym_db.RegisterMessage(EvaluationMetrics) - -ClassificationMetrics = _reflection.GeneratedProtocolMessageType( - "ClassificationMetrics", - (_message.Message,), - { - "DESCRIPTOR": _CLASSIFICATIONMETRICS, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_pb2", - "__doc__": """Metrics calculated for a classification model. - - Attributes: - pr_curve: - Precision-recall curve based on ground truth labels, predicted - labels, and scores for the predicted labels. - confusion_matrix: - Confusion matrix of predicted labels vs. ground truth labels. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ClassificationMetrics) - }, -) -_sym_db.RegisterMessage(ClassificationMetrics) - -ObjectDetectionMetrics = _reflection.GeneratedProtocolMessageType( - "ObjectDetectionMetrics", - (_message.Message,), - { - "DESCRIPTOR": _OBJECTDETECTIONMETRICS, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_pb2", - "__doc__": """Metrics calculated for an image object detection (bounding box) model. - - Attributes: - pr_curve: - Precision-recall curve. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ObjectDetectionMetrics) - }, -) -_sym_db.RegisterMessage(ObjectDetectionMetrics) - -PrCurve = _reflection.GeneratedProtocolMessageType( - "PrCurve", - (_message.Message,), - { - "ConfidenceMetricsEntry": _reflection.GeneratedProtocolMessageType( - "ConfidenceMetricsEntry", - (_message.Message,), - { - "DESCRIPTOR": _PRCURVE_CONFIDENCEMETRICSENTRY, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_pb2", - "__doc__": """ - Attributes: - confidence_threshold: - Threshold used for this entry. For classification tasks, this - is a classification threshold: a predicted label is - categorized as positive or negative (in the context of this - point on the PR curve) based on whether the label’s score - meets this threshold. For image object detection (bounding - box) tasks, this is the [intersection-over-union - (IOU)](/vision/automl/object- - detection/docs/evaluate#intersection-over-union) threshold for - the context of this point on the PR curve. - recall: - Recall value. - precision: - Precision value. - f1_score: - Harmonic mean of recall and precision. - recall_at1: - Recall value for entries with label that has highest score. - precision_at1: - Precision value for entries with label that has highest score. - f1_score_at1: - The harmonic mean of [recall_at1][google.cloud.datalabeling.v1 - beta1.PrCurve.ConfidenceMetricsEntry.recall_at1] and [precisio - n_at1][google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMet - ricsEntry.precision_at1]. - recall_at5: - Recall value for entries with label that has highest 5 scores. - precision_at5: - Precision value for entries with label that has highest 5 - scores. - f1_score_at5: - The harmonic mean of [recall_at5][google.cloud.datalabeling.v1 - beta1.PrCurve.ConfidenceMetricsEntry.recall_at5] and [precisio - n_at5][google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMet - ricsEntry.precision_at5]. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry) - }, - ), - "DESCRIPTOR": _PRCURVE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_pb2", - "__doc__": """ - Attributes: - annotation_spec: - The annotation spec of the label for which the precision- - recall curve calculated. If this field is empty, that means - the precision-recall curve is an aggregate curve for all - labels. - area_under_curve: - Area under the precision-recall curve. Not to be confused with - area under a receiver operating characteristic (ROC) curve. - confidence_metrics_entries: - Entries that make up the precision-recall graph. Each entry is - a “point” on the graph drawn for a different - ``confidence_threshold``. - mean_average_precision: - Mean average prcision of this curve. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.PrCurve) - }, -) -_sym_db.RegisterMessage(PrCurve) -_sym_db.RegisterMessage(PrCurve.ConfidenceMetricsEntry) - -ConfusionMatrix = _reflection.GeneratedProtocolMessageType( - "ConfusionMatrix", - (_message.Message,), - { - "ConfusionMatrixEntry": _reflection.GeneratedProtocolMessageType( - "ConfusionMatrixEntry", - (_message.Message,), - { - "DESCRIPTOR": _CONFUSIONMATRIX_CONFUSIONMATRIXENTRY, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_pb2", - "__doc__": """ - Attributes: - annotation_spec: - The annotation spec of a predicted label. - item_count: - Number of items predicted to have this label. (The ground - truth label for these items is the ``Row.annotationSpec`` of - this entry’s parent.) - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ConfusionMatrix.ConfusionMatrixEntry) - }, - ), - "Row": _reflection.GeneratedProtocolMessageType( - "Row", - (_message.Message,), - { - "DESCRIPTOR": _CONFUSIONMATRIX_ROW, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_pb2", - "__doc__": """A row in the confusion matrix. Each entry in this row has the same - ground truth label. - - Attributes: - annotation_spec: - The annotation spec of the ground truth label for this row. - entries: - A list of the confusion matrix entries. One entry for each - possible predicted label. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ConfusionMatrix.Row) - }, - ), - "DESCRIPTOR": _CONFUSIONMATRIX, - "__module__": "google.cloud.datalabeling_v1beta1.proto.evaluation_pb2", - "__doc__": """Confusion matrix of the model running the classification. Only - applicable when the metrics entry aggregates multiple labels. Not - applicable when the entry is for a single label.""", - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ConfusionMatrix) - }, -) -_sym_db.RegisterMessage(ConfusionMatrix) -_sym_db.RegisterMessage(ConfusionMatrix.ConfusionMatrixEntry) -_sym_db.RegisterMessage(ConfusionMatrix.Row) - - -DESCRIPTOR._options = None -_EVALUATION._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datalabeling_v1beta1/proto/evaluation_pb2_grpc.py b/google/cloud/datalabeling_v1beta1/proto/evaluation_pb2_grpc.py deleted file mode 100644 index 8a93939..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/evaluation_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/datalabeling_v1beta1/proto/human_annotation_config_pb2.py b/google/cloud/datalabeling_v1beta1/proto/human_annotation_config_pb2.py deleted file mode 100644 index 35de231..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/human_annotation_config_pb2.py +++ /dev/null @@ -1,1326 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datalabeling_v1beta1/proto/human_annotation_config.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datalabeling_v1beta1/proto/human_annotation_config.proto", - package="google.cloud.datalabeling.v1beta1", - syntax="proto3", - serialized_options=b"\n%com.google.cloud.datalabeling.v1beta1P\001ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabeling", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\nEgoogle/cloud/datalabeling_v1beta1/proto/human_annotation_config.proto\x12!google.cloud.datalabeling.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1egoogle/protobuf/duration.proto"\xd4\x02\n\x15HumanAnnotationConfig\x12\x18\n\x0binstruction\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12+\n\x1e\x61nnotated_dataset_display_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12*\n\x1d\x61nnotated_dataset_description\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0blabel_group\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rlanguage_code\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rreplica_count\x18\x06 \x01(\x05\x42\x03\xe0\x41\x01\x12\x39\n\x11question_duration\x18\x07 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x1f\n\x12\x63ontributor_emails\x18\t \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\x12user_email_address\x18\n \x01(\t"\xbd\x01\n\x19ImageClassificationConfig\x12 \n\x13\x61nnotation_spec_set\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x1e\n\x11\x61llow_multi_label\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01\x12^\n\x17\x61nswer_aggregation_type\x18\x03 \x01(\x0e\x32\x38.google.cloud.datalabeling.v1beta1.StringAggregationTypeB\x03\xe0\x41\x01"X\n\x12\x42oundingPolyConfig\x12 \n\x13\x61nnotation_spec_set\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12 \n\x13instruction_message\x18\x02 \x01(\tB\x03\xe0\x41\x01"T\n\x0ePolylineConfig\x12 \n\x13\x61nnotation_spec_set\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12 \n\x13instruction_message\x18\x02 \x01(\tB\x03\xe0\x41\x01"S\n\x12SegmentationConfig\x12 \n\x13\x61nnotation_spec_set\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x1b\n\x13instruction_message\x18\x02 \x01(\t"\x9b\x02\n\x19VideoClassificationConfig\x12~\n\x1b\x61nnotation_spec_set_configs\x18\x01 \x03(\x0b\x32T.google.cloud.datalabeling.v1beta1.VideoClassificationConfig.AnnotationSpecSetConfigB\x03\xe0\x41\x02\x12!\n\x14\x61pply_shot_detection\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01\x1a[\n\x17\x41nnotationSpecSetConfig\x12 \n\x13\x61nnotation_spec_set\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x1e\n\x11\x61llow_multi_label\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01"]\n\x15ObjectDetectionConfig\x12 \n\x13\x61nnotation_spec_set\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12"\n\x15\x65xtraction_frame_rate\x18\x03 \x01(\x01\x42\x03\xe0\x41\x02"8\n\x14ObjectTrackingConfig\x12 \n\x13\x61nnotation_spec_set\x18\x01 \x01(\tB\x03\xe0\x41\x02"0\n\x0b\x45ventConfig\x12!\n\x14\x61nnotation_spec_sets\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xaf\x01\n\x18TextClassificationConfig\x12\x1e\n\x11\x61llow_multi_label\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12 \n\x13\x61nnotation_spec_set\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12Q\n\x10sentiment_config\x18\x03 \x01(\x0b\x32\x32.google.cloud.datalabeling.v1beta1.SentimentConfigB\x03\xe0\x41\x01";\n\x0fSentimentConfig\x12(\n enable_label_sentiment_selection\x18\x01 \x01(\x08">\n\x1aTextEntityExtractionConfig\x12 \n\x13\x61nnotation_spec_set\x18\x01 \x01(\tB\x03\xe0\x41\x02*{\n\x15StringAggregationType\x12\'\n#STRING_AGGREGATION_TYPE_UNSPECIFIED\x10\x00\x12\x11\n\rMAJORITY_VOTE\x10\x01\x12\x12\n\x0eUNANIMOUS_VOTE\x10\x02\x12\x12\n\x0eNO_AGGREGATION\x10\x03\x42x\n%com.google.cloud.datalabeling.v1beta1P\x01ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabelingb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - ], -) - -_STRINGAGGREGATIONTYPE = _descriptor.EnumDescriptor( - name="StringAggregationType", - full_name="google.cloud.datalabeling.v1beta1.StringAggregationType", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="STRING_AGGREGATION_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MAJORITY_VOTE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="UNANIMOUS_VOTE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="NO_AGGREGATION", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1791, - serialized_end=1914, -) -_sym_db.RegisterEnumDescriptor(_STRINGAGGREGATIONTYPE) - -StringAggregationType = enum_type_wrapper.EnumTypeWrapper(_STRINGAGGREGATIONTYPE) -STRING_AGGREGATION_TYPE_UNSPECIFIED = 0 -MAJORITY_VOTE = 1 -UNANIMOUS_VOTE = 2 -NO_AGGREGATION = 3 - - -_HUMANANNOTATIONCONFIG = _descriptor.Descriptor( - name="HumanAnnotationConfig", - full_name="google.cloud.datalabeling.v1beta1.HumanAnnotationConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="instruction", - full_name="google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.instruction", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotated_dataset_display_name", - full_name="google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.annotated_dataset_display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotated_dataset_description", - full_name="google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.annotated_dataset_description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="label_group", - full_name="google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.label_group", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="language_code", - full_name="google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.language_code", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="replica_count", - full_name="google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.replica_count", - index=5, - number=6, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="question_duration", - full_name="google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.question_duration", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="contributor_emails", - full_name="google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.contributor_emails", - index=7, - number=9, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="user_email_address", - full_name="google.cloud.datalabeling.v1beta1.HumanAnnotationConfig.user_email_address", - index=8, - number=10, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=204, - serialized_end=544, -) - - -_IMAGECLASSIFICATIONCONFIG = _descriptor.Descriptor( - name="ImageClassificationConfig", - full_name="google.cloud.datalabeling.v1beta1.ImageClassificationConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec_set", - full_name="google.cloud.datalabeling.v1beta1.ImageClassificationConfig.annotation_spec_set", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="allow_multi_label", - full_name="google.cloud.datalabeling.v1beta1.ImageClassificationConfig.allow_multi_label", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="answer_aggregation_type", - full_name="google.cloud.datalabeling.v1beta1.ImageClassificationConfig.answer_aggregation_type", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=547, - serialized_end=736, -) - - -_BOUNDINGPOLYCONFIG = _descriptor.Descriptor( - name="BoundingPolyConfig", - full_name="google.cloud.datalabeling.v1beta1.BoundingPolyConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec_set", - full_name="google.cloud.datalabeling.v1beta1.BoundingPolyConfig.annotation_spec_set", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="instruction_message", - full_name="google.cloud.datalabeling.v1beta1.BoundingPolyConfig.instruction_message", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=738, - serialized_end=826, -) - - -_POLYLINECONFIG = _descriptor.Descriptor( - name="PolylineConfig", - full_name="google.cloud.datalabeling.v1beta1.PolylineConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec_set", - full_name="google.cloud.datalabeling.v1beta1.PolylineConfig.annotation_spec_set", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="instruction_message", - full_name="google.cloud.datalabeling.v1beta1.PolylineConfig.instruction_message", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=828, - serialized_end=912, -) - - -_SEGMENTATIONCONFIG = _descriptor.Descriptor( - name="SegmentationConfig", - full_name="google.cloud.datalabeling.v1beta1.SegmentationConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec_set", - full_name="google.cloud.datalabeling.v1beta1.SegmentationConfig.annotation_spec_set", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="instruction_message", - full_name="google.cloud.datalabeling.v1beta1.SegmentationConfig.instruction_message", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=914, - serialized_end=997, -) - - -_VIDEOCLASSIFICATIONCONFIG_ANNOTATIONSPECSETCONFIG = _descriptor.Descriptor( - name="AnnotationSpecSetConfig", - full_name="google.cloud.datalabeling.v1beta1.VideoClassificationConfig.AnnotationSpecSetConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec_set", - full_name="google.cloud.datalabeling.v1beta1.VideoClassificationConfig.AnnotationSpecSetConfig.annotation_spec_set", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="allow_multi_label", - full_name="google.cloud.datalabeling.v1beta1.VideoClassificationConfig.AnnotationSpecSetConfig.allow_multi_label", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1192, - serialized_end=1283, -) - -_VIDEOCLASSIFICATIONCONFIG = _descriptor.Descriptor( - name="VideoClassificationConfig", - full_name="google.cloud.datalabeling.v1beta1.VideoClassificationConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec_set_configs", - full_name="google.cloud.datalabeling.v1beta1.VideoClassificationConfig.annotation_spec_set_configs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="apply_shot_detection", - full_name="google.cloud.datalabeling.v1beta1.VideoClassificationConfig.apply_shot_detection", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_VIDEOCLASSIFICATIONCONFIG_ANNOTATIONSPECSETCONFIG,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1000, - serialized_end=1283, -) - - -_OBJECTDETECTIONCONFIG = _descriptor.Descriptor( - name="ObjectDetectionConfig", - full_name="google.cloud.datalabeling.v1beta1.ObjectDetectionConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec_set", - full_name="google.cloud.datalabeling.v1beta1.ObjectDetectionConfig.annotation_spec_set", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="extraction_frame_rate", - full_name="google.cloud.datalabeling.v1beta1.ObjectDetectionConfig.extraction_frame_rate", - index=1, - number=3, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1285, - serialized_end=1378, -) - - -_OBJECTTRACKINGCONFIG = _descriptor.Descriptor( - name="ObjectTrackingConfig", - full_name="google.cloud.datalabeling.v1beta1.ObjectTrackingConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec_set", - full_name="google.cloud.datalabeling.v1beta1.ObjectTrackingConfig.annotation_spec_set", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1380, - serialized_end=1436, -) - - -_EVENTCONFIG = _descriptor.Descriptor( - name="EventConfig", - full_name="google.cloud.datalabeling.v1beta1.EventConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec_sets", - full_name="google.cloud.datalabeling.v1beta1.EventConfig.annotation_spec_sets", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1438, - serialized_end=1486, -) - - -_TEXTCLASSIFICATIONCONFIG = _descriptor.Descriptor( - name="TextClassificationConfig", - full_name="google.cloud.datalabeling.v1beta1.TextClassificationConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="allow_multi_label", - full_name="google.cloud.datalabeling.v1beta1.TextClassificationConfig.allow_multi_label", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="annotation_spec_set", - full_name="google.cloud.datalabeling.v1beta1.TextClassificationConfig.annotation_spec_set", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="sentiment_config", - full_name="google.cloud.datalabeling.v1beta1.TextClassificationConfig.sentiment_config", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1489, - serialized_end=1664, -) - - -_SENTIMENTCONFIG = _descriptor.Descriptor( - name="SentimentConfig", - full_name="google.cloud.datalabeling.v1beta1.SentimentConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="enable_label_sentiment_selection", - full_name="google.cloud.datalabeling.v1beta1.SentimentConfig.enable_label_sentiment_selection", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1666, - serialized_end=1725, -) - - -_TEXTENTITYEXTRACTIONCONFIG = _descriptor.Descriptor( - name="TextEntityExtractionConfig", - full_name="google.cloud.datalabeling.v1beta1.TextEntityExtractionConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_spec_set", - full_name="google.cloud.datalabeling.v1beta1.TextEntityExtractionConfig.annotation_spec_set", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1727, - serialized_end=1789, -) - -_HUMANANNOTATIONCONFIG.fields_by_name[ - "question_duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_IMAGECLASSIFICATIONCONFIG.fields_by_name[ - "answer_aggregation_type" -].enum_type = _STRINGAGGREGATIONTYPE -_VIDEOCLASSIFICATIONCONFIG_ANNOTATIONSPECSETCONFIG.containing_type = ( - _VIDEOCLASSIFICATIONCONFIG -) -_VIDEOCLASSIFICATIONCONFIG.fields_by_name[ - "annotation_spec_set_configs" -].message_type = _VIDEOCLASSIFICATIONCONFIG_ANNOTATIONSPECSETCONFIG -_TEXTCLASSIFICATIONCONFIG.fields_by_name[ - "sentiment_config" -].message_type = _SENTIMENTCONFIG -DESCRIPTOR.message_types_by_name["HumanAnnotationConfig"] = _HUMANANNOTATIONCONFIG -DESCRIPTOR.message_types_by_name[ - "ImageClassificationConfig" -] = _IMAGECLASSIFICATIONCONFIG -DESCRIPTOR.message_types_by_name["BoundingPolyConfig"] = _BOUNDINGPOLYCONFIG -DESCRIPTOR.message_types_by_name["PolylineConfig"] = _POLYLINECONFIG -DESCRIPTOR.message_types_by_name["SegmentationConfig"] = _SEGMENTATIONCONFIG -DESCRIPTOR.message_types_by_name[ - "VideoClassificationConfig" -] = _VIDEOCLASSIFICATIONCONFIG -DESCRIPTOR.message_types_by_name["ObjectDetectionConfig"] = _OBJECTDETECTIONCONFIG -DESCRIPTOR.message_types_by_name["ObjectTrackingConfig"] = _OBJECTTRACKINGCONFIG -DESCRIPTOR.message_types_by_name["EventConfig"] = _EVENTCONFIG -DESCRIPTOR.message_types_by_name["TextClassificationConfig"] = _TEXTCLASSIFICATIONCONFIG -DESCRIPTOR.message_types_by_name["SentimentConfig"] = _SENTIMENTCONFIG -DESCRIPTOR.message_types_by_name[ - "TextEntityExtractionConfig" -] = _TEXTENTITYEXTRACTIONCONFIG -DESCRIPTOR.enum_types_by_name["StringAggregationType"] = _STRINGAGGREGATIONTYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -HumanAnnotationConfig = _reflection.GeneratedProtocolMessageType( - "HumanAnnotationConfig", - (_message.Message,), - { - "DESCRIPTOR": _HUMANANNOTATIONCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Configuration for how human labeling task should be done. - - Attributes: - instruction: - Required. Instruction resource name. - annotated_dataset_display_name: - Required. A human-readable name for AnnotatedDataset defined - by users. Maximum of 64 characters . - annotated_dataset_description: - Optional. A human-readable description for AnnotatedDataset. - The description can be up to 10000 characters long. - label_group: - Optional. A human-readable label used to logically group - labeling tasks. This string must match the regular expression - ``[a-zA-Z\\d_-]{0,128}``. - language_code: - Optional. The Language of this question, as a `BCP-47 - `__. Default - value is en-US. Only need to set this when task is language - related. For example, French text classification. - replica_count: - Optional. Replication of questions. Each question will be sent - to up to this number of contributors to label. Aggregated - answers will be returned. Default is set to 1. For image - related labeling, valid values are 1, 3, 5. - question_duration: - Optional. Maximum duration for contributors to answer a - question. Maximum is 3600 seconds. Default is 3600 seconds. - contributor_emails: - Optional. If you want your own labeling contributors to manage - and work on this labeling request, you can set these - contributors here. We will give them access to the question - types in crowdcompute. Note that these emails must be - registered in crowdcompute worker UI: https://crowd- - compute.appspot.com/ - user_email_address: - Email of the user who started the labeling task and should be - notified by email. If empty no notification will be sent. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.HumanAnnotationConfig) - }, -) -_sym_db.RegisterMessage(HumanAnnotationConfig) - -ImageClassificationConfig = _reflection.GeneratedProtocolMessageType( - "ImageClassificationConfig", - (_message.Message,), - { - "DESCRIPTOR": _IMAGECLASSIFICATIONCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Config for image classification human labeling task. - - Attributes: - annotation_spec_set: - Required. Annotation spec set resource name. - allow_multi_label: - Optional. If allow_multi_label is true, contributors are able - to choose multiple labels for one image. - answer_aggregation_type: - Optional. The type of how to aggregate answers. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ImageClassificationConfig) - }, -) -_sym_db.RegisterMessage(ImageClassificationConfig) - -BoundingPolyConfig = _reflection.GeneratedProtocolMessageType( - "BoundingPolyConfig", - (_message.Message,), - { - "DESCRIPTOR": _BOUNDINGPOLYCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Config for image bounding poly (and bounding box) human labeling task. - - Attributes: - annotation_spec_set: - Required. Annotation spec set resource name. - instruction_message: - Optional. Instruction message showed on contributors UI. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.BoundingPolyConfig) - }, -) -_sym_db.RegisterMessage(BoundingPolyConfig) - -PolylineConfig = _reflection.GeneratedProtocolMessageType( - "PolylineConfig", - (_message.Message,), - { - "DESCRIPTOR": _POLYLINECONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Config for image polyline human labeling task. - - Attributes: - annotation_spec_set: - Required. Annotation spec set resource name. - instruction_message: - Optional. Instruction message showed on contributors UI. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.PolylineConfig) - }, -) -_sym_db.RegisterMessage(PolylineConfig) - -SegmentationConfig = _reflection.GeneratedProtocolMessageType( - "SegmentationConfig", - (_message.Message,), - { - "DESCRIPTOR": _SEGMENTATIONCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Config for image segmentation - - Attributes: - annotation_spec_set: - Required. Annotation spec set resource name. format: projects/ - {project_id}/annotationSpecSets/{annotation_spec_set_id} - instruction_message: - Instruction message showed on labelers UI. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.SegmentationConfig) - }, -) -_sym_db.RegisterMessage(SegmentationConfig) - -VideoClassificationConfig = _reflection.GeneratedProtocolMessageType( - "VideoClassificationConfig", - (_message.Message,), - { - "AnnotationSpecSetConfig": _reflection.GeneratedProtocolMessageType( - "AnnotationSpecSetConfig", - (_message.Message,), - { - "DESCRIPTOR": _VIDEOCLASSIFICATIONCONFIG_ANNOTATIONSPECSETCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Annotation spec set with the setting of allowing multi labels or not. - - Attributes: - annotation_spec_set: - Required. Annotation spec set resource name. - allow_multi_label: - Optional. If allow_multi_label is true, contributors are able - to choose multiple labels from one annotation spec set. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.VideoClassificationConfig.AnnotationSpecSetConfig) - }, - ), - "DESCRIPTOR": _VIDEOCLASSIFICATIONCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Config for video classification human labeling task. Currently two - types of video classification are supported: 1. Assign labels on the - entire video. 2. Split the video into multiple video clips based on - camera shot, and assign labels on each video clip. - - Attributes: - annotation_spec_set_configs: - Required. The list of annotation spec set configs. Since - watching a video clip takes much longer time than an image, we - support label with multiple AnnotationSpecSet at the same - time. Labels in each AnnotationSpecSet will be shown in a - group to contributors. Contributors can select one or more - (depending on whether to allow multi label) from each group. - apply_shot_detection: - Optional. Option to apply shot detection on the video. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.VideoClassificationConfig) - }, -) -_sym_db.RegisterMessage(VideoClassificationConfig) -_sym_db.RegisterMessage(VideoClassificationConfig.AnnotationSpecSetConfig) - -ObjectDetectionConfig = _reflection.GeneratedProtocolMessageType( - "ObjectDetectionConfig", - (_message.Message,), - { - "DESCRIPTOR": _OBJECTDETECTIONCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Config for video object detection human labeling task. Object - detection will be conducted on the images extracted from the video, - and those objects will be labeled with bounding boxes. User need to - specify the number of images to be extracted per second as the - extraction frame rate. - - Attributes: - annotation_spec_set: - Required. Annotation spec set resource name. - extraction_frame_rate: - Required. Number of frames per second to be extracted from the - video. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ObjectDetectionConfig) - }, -) -_sym_db.RegisterMessage(ObjectDetectionConfig) - -ObjectTrackingConfig = _reflection.GeneratedProtocolMessageType( - "ObjectTrackingConfig", - (_message.Message,), - { - "DESCRIPTOR": _OBJECTTRACKINGCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Config for video object tracking human labeling task. - - Attributes: - annotation_spec_set: - Required. Annotation spec set resource name. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ObjectTrackingConfig) - }, -) -_sym_db.RegisterMessage(ObjectTrackingConfig) - -EventConfig = _reflection.GeneratedProtocolMessageType( - "EventConfig", - (_message.Message,), - { - "DESCRIPTOR": _EVENTCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Config for video event human labeling task. - - Attributes: - annotation_spec_sets: - Required. The list of annotation spec set resource name. - Similar to video classification, we support selecting event - from multiple AnnotationSpecSet at the same time. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.EventConfig) - }, -) -_sym_db.RegisterMessage(EventConfig) - -TextClassificationConfig = _reflection.GeneratedProtocolMessageType( - "TextClassificationConfig", - (_message.Message,), - { - "DESCRIPTOR": _TEXTCLASSIFICATIONCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Config for text classification human labeling task. - - Attributes: - allow_multi_label: - Optional. If allow_multi_label is true, contributors are able - to choose multiple labels for one text segment. - annotation_spec_set: - Required. Annotation spec set resource name. - sentiment_config: - Optional. Configs for sentiment selection. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.TextClassificationConfig) - }, -) -_sym_db.RegisterMessage(TextClassificationConfig) - -SentimentConfig = _reflection.GeneratedProtocolMessageType( - "SentimentConfig", - (_message.Message,), - { - "DESCRIPTOR": _SENTIMENTCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Config for setting up sentiments. - - Attributes: - enable_label_sentiment_selection: - If set to true, contributors will have the option to select - sentiment of the label they selected, to mark it as negative - or positive label. Default is false. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.SentimentConfig) - }, -) -_sym_db.RegisterMessage(SentimentConfig) - -TextEntityExtractionConfig = _reflection.GeneratedProtocolMessageType( - "TextEntityExtractionConfig", - (_message.Message,), - { - "DESCRIPTOR": _TEXTENTITYEXTRACTIONCONFIG, - "__module__": "google.cloud.datalabeling_v1beta1.proto.human_annotation_config_pb2", - "__doc__": """Config for text entity extraction human labeling task. - - Attributes: - annotation_spec_set: - Required. Annotation spec set resource name. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.TextEntityExtractionConfig) - }, -) -_sym_db.RegisterMessage(TextEntityExtractionConfig) - - -DESCRIPTOR._options = None -_HUMANANNOTATIONCONFIG.fields_by_name["instruction"]._options = None -_HUMANANNOTATIONCONFIG.fields_by_name["annotated_dataset_display_name"]._options = None -_HUMANANNOTATIONCONFIG.fields_by_name["annotated_dataset_description"]._options = None -_HUMANANNOTATIONCONFIG.fields_by_name["label_group"]._options = None -_HUMANANNOTATIONCONFIG.fields_by_name["language_code"]._options = None -_HUMANANNOTATIONCONFIG.fields_by_name["replica_count"]._options = None -_HUMANANNOTATIONCONFIG.fields_by_name["question_duration"]._options = None -_HUMANANNOTATIONCONFIG.fields_by_name["contributor_emails"]._options = None -_IMAGECLASSIFICATIONCONFIG.fields_by_name["annotation_spec_set"]._options = None -_IMAGECLASSIFICATIONCONFIG.fields_by_name["allow_multi_label"]._options = None -_IMAGECLASSIFICATIONCONFIG.fields_by_name["answer_aggregation_type"]._options = None -_BOUNDINGPOLYCONFIG.fields_by_name["annotation_spec_set"]._options = None -_BOUNDINGPOLYCONFIG.fields_by_name["instruction_message"]._options = None -_POLYLINECONFIG.fields_by_name["annotation_spec_set"]._options = None -_POLYLINECONFIG.fields_by_name["instruction_message"]._options = None -_SEGMENTATIONCONFIG.fields_by_name["annotation_spec_set"]._options = None -_VIDEOCLASSIFICATIONCONFIG_ANNOTATIONSPECSETCONFIG.fields_by_name[ - "annotation_spec_set" -]._options = None -_VIDEOCLASSIFICATIONCONFIG_ANNOTATIONSPECSETCONFIG.fields_by_name[ - "allow_multi_label" -]._options = None -_VIDEOCLASSIFICATIONCONFIG.fields_by_name["annotation_spec_set_configs"]._options = None -_VIDEOCLASSIFICATIONCONFIG.fields_by_name["apply_shot_detection"]._options = None -_OBJECTDETECTIONCONFIG.fields_by_name["annotation_spec_set"]._options = None -_OBJECTDETECTIONCONFIG.fields_by_name["extraction_frame_rate"]._options = None -_OBJECTTRACKINGCONFIG.fields_by_name["annotation_spec_set"]._options = None -_EVENTCONFIG.fields_by_name["annotation_spec_sets"]._options = None -_TEXTCLASSIFICATIONCONFIG.fields_by_name["allow_multi_label"]._options = None -_TEXTCLASSIFICATIONCONFIG.fields_by_name["annotation_spec_set"]._options = None -_TEXTCLASSIFICATIONCONFIG.fields_by_name["sentiment_config"]._options = None -_TEXTENTITYEXTRACTIONCONFIG.fields_by_name["annotation_spec_set"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datalabeling_v1beta1/proto/human_annotation_config_pb2_grpc.py b/google/cloud/datalabeling_v1beta1/proto/human_annotation_config_pb2_grpc.py deleted file mode 100644 index 8a93939..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/human_annotation_config_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/datalabeling_v1beta1/proto/instruction_pb2.py b/google/cloud/datalabeling_v1beta1/proto/instruction_pb2.py deleted file mode 100644 index 15eb13d..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/instruction_pb2.py +++ /dev/null @@ -1,414 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datalabeling_v1beta1/proto/instruction.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.datalabeling_v1beta1.proto import ( - dataset_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2, -) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datalabeling_v1beta1/proto/instruction.proto", - package="google.cloud.datalabeling.v1beta1", - syntax="proto3", - serialized_options=b"\n%com.google.cloud.datalabeling.v1beta1P\001ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabeling", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n9google/cloud/datalabeling_v1beta1/proto/instruction.proto\x12!google.cloud.datalabeling.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x35google/cloud/datalabeling_v1beta1/proto/dataset.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xfd\x03\n\x0bInstruction\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12/\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12>\n\tdata_type\x18\x06 \x01(\x0e\x32+.google.cloud.datalabeling.v1beta1.DataType\x12N\n\x0f\x63sv_instruction\x18\x07 \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.CsvInstructionB\x02\x18\x01\x12J\n\x0fpdf_instruction\x18\t \x01(\x0b\x32\x31.google.cloud.datalabeling.v1beta1.PdfInstruction\x12\x1a\n\x12\x62locking_resources\x18\n \x03(\t:[\xea\x41X\n\'datalabeling.googleapis.com/Instruction\x12-projects/{project}/instructions/{instruction}"&\n\x0e\x43svInstruction\x12\x14\n\x0cgcs_file_uri\x18\x01 \x01(\t"&\n\x0ePdfInstruction\x12\x14\n\x0cgcs_file_uri\x18\x01 \x01(\tBx\n%com.google.cloud.datalabeling.v1beta1P\x01ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabelingb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_INSTRUCTION = _descriptor.Descriptor( - name="Instruction", - full_name="google.cloud.datalabeling.v1beta1.Instruction", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.datalabeling.v1beta1.Instruction.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.datalabeling.v1beta1.Instruction.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.datalabeling.v1beta1.Instruction.description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.datalabeling.v1beta1.Instruction.create_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.cloud.datalabeling.v1beta1.Instruction.update_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="data_type", - full_name="google.cloud.datalabeling.v1beta1.Instruction.data_type", - index=5, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="csv_instruction", - full_name="google.cloud.datalabeling.v1beta1.Instruction.csv_instruction", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\030\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="pdf_instruction", - full_name="google.cloud.datalabeling.v1beta1.Instruction.pdf_instruction", - index=7, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="blocking_resources", - full_name="google.cloud.datalabeling.v1beta1.Instruction.blocking_resources", - index=8, - number=10, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"\352AX\n'datalabeling.googleapis.com/Instruction\022-projects/{project}/instructions/{instruction}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=242, - serialized_end=751, -) - - -_CSVINSTRUCTION = _descriptor.Descriptor( - name="CsvInstruction", - full_name="google.cloud.datalabeling.v1beta1.CsvInstruction", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="gcs_file_uri", - full_name="google.cloud.datalabeling.v1beta1.CsvInstruction.gcs_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=753, - serialized_end=791, -) - - -_PDFINSTRUCTION = _descriptor.Descriptor( - name="PdfInstruction", - full_name="google.cloud.datalabeling.v1beta1.PdfInstruction", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="gcs_file_uri", - full_name="google.cloud.datalabeling.v1beta1.PdfInstruction.gcs_file_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=793, - serialized_end=831, -) - -_INSTRUCTION.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_INSTRUCTION.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_INSTRUCTION.fields_by_name[ - "data_type" -].enum_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._DATATYPE -) -_INSTRUCTION.fields_by_name["csv_instruction"].message_type = _CSVINSTRUCTION -_INSTRUCTION.fields_by_name["pdf_instruction"].message_type = _PDFINSTRUCTION -DESCRIPTOR.message_types_by_name["Instruction"] = _INSTRUCTION -DESCRIPTOR.message_types_by_name["CsvInstruction"] = _CSVINSTRUCTION -DESCRIPTOR.message_types_by_name["PdfInstruction"] = _PDFINSTRUCTION -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Instruction = _reflection.GeneratedProtocolMessageType( - "Instruction", - (_message.Message,), - { - "DESCRIPTOR": _INSTRUCTION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.instruction_pb2", - "__doc__": """Instruction of how to perform the labeling task for human operators. - Currently only PDF instruction is supported. - - Attributes: - name: - Output only. Instruction resource name, format: - projects/{project_id}/instructions/{instruction_id} - display_name: - Required. The display name of the instruction. Maximum of 64 - characters. - description: - Optional. User-provided description of the instruction. The - description can be up to 10000 characters long. - create_time: - Output only. Creation time of instruction. - update_time: - Output only. Last update time of instruction. - data_type: - Required. The data type of this instruction. - csv_instruction: - Deprecated: this instruction format is not supported any more. - Instruction from a CSV file, such as for classification task. - The CSV file should have exact two columns, in the following - format: - The first column is labeled data, such as an image - reference, text. - The second column is comma separated - labels associated with data. - pdf_instruction: - Instruction from a PDF document. The PDF should be in a Cloud - Storage bucket. - blocking_resources: - Output only. The names of any related resources that are - blocking changes to the instruction. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.Instruction) - }, -) -_sym_db.RegisterMessage(Instruction) - -CsvInstruction = _reflection.GeneratedProtocolMessageType( - "CsvInstruction", - (_message.Message,), - { - "DESCRIPTOR": _CSVINSTRUCTION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.instruction_pb2", - "__doc__": """Deprecated: this instruction format is not supported any more. - Instruction from a CSV file. - - Attributes: - gcs_file_uri: - CSV file for the instruction. Only gcs path is allowed. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.CsvInstruction) - }, -) -_sym_db.RegisterMessage(CsvInstruction) - -PdfInstruction = _reflection.GeneratedProtocolMessageType( - "PdfInstruction", - (_message.Message,), - { - "DESCRIPTOR": _PDFINSTRUCTION, - "__module__": "google.cloud.datalabeling_v1beta1.proto.instruction_pb2", - "__doc__": """Instruction from a PDF file. - - Attributes: - gcs_file_uri: - PDF file for the instruction. Only gcs path is allowed. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.PdfInstruction) - }, -) -_sym_db.RegisterMessage(PdfInstruction) - - -DESCRIPTOR._options = None -_INSTRUCTION.fields_by_name["csv_instruction"]._options = None -_INSTRUCTION._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datalabeling_v1beta1/proto/instruction_pb2_grpc.py b/google/cloud/datalabeling_v1beta1/proto/instruction_pb2_grpc.py deleted file mode 100644 index 8a93939..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/instruction_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/datalabeling_v1beta1/proto/operations_pb2.py b/google/cloud/datalabeling_v1beta1/proto/operations_pb2.py deleted file mode 100644 index 038207c..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/operations_pb2.py +++ /dev/null @@ -1,1918 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/datalabeling_v1beta1/proto/operations.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.datalabeling_v1beta1.proto import ( - dataset_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2, -) -from google.cloud.datalabeling_v1beta1.proto import ( - human_annotation_config_pb2 as google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2, -) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/datalabeling_v1beta1/proto/operations.proto", - package="google.cloud.datalabeling.v1beta1", - syntax="proto3", - serialized_options=b"\n%com.google.cloud.datalabeling.v1beta1P\001ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabeling", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n8google/cloud/datalabeling_v1beta1/proto/operations.proto\x12!google.cloud.datalabeling.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x35google/cloud/datalabeling_v1beta1/proto/dataset.proto\x1a\x45google/cloud/datalabeling_v1beta1/proto/human_annotation_config.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"Y\n\x1bImportDataOperationResponse\x12\x0f\n\x07\x64\x61taset\x18\x01 \x01(\t\x12\x13\n\x0btotal_count\x18\x02 \x01(\x05\x12\x14\n\x0cimport_count\x18\x03 \x01(\x05"\xe5\x01\n\x1b\x45xportDataOperationResponse\x12\x0f\n\x07\x64\x61taset\x18\x01 \x01(\t\x12\x13\n\x0btotal_count\x18\x02 \x01(\x05\x12\x14\n\x0c\x65xport_count\x18\x03 \x01(\x05\x12\x42\n\x0blabel_stats\x18\x04 \x01(\x0b\x32-.google.cloud.datalabeling.v1beta1.LabelStats\x12\x46\n\routput_config\x18\x05 \x01(\x0b\x32/.google.cloud.datalabeling.v1beta1.OutputConfig"\x8d\x01\n\x1bImportDataOperationMetadata\x12\x0f\n\x07\x64\x61taset\x18\x01 \x01(\t\x12,\n\x10partial_failures\x18\x02 \x03(\x0b\x32\x12.google.rpc.Status\x12/\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x8d\x01\n\x1b\x45xportDataOperationMetadata\x12\x0f\n\x07\x64\x61taset\x18\x01 \x01(\t\x12,\n\x10partial_failures\x18\x02 \x03(\x0b\x32\x12.google.rpc.Status\x12/\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x8c\x0c\n\x16LabelOperationMetadata\x12t\n\x1cimage_classification_details\x18\x03 \x01(\x0b\x32L.google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadataH\x00\x12o\n\x1aimage_bounding_box_details\x18\x04 \x01(\x0b\x32I.google.cloud.datalabeling.v1beta1.LabelImageBoundingBoxOperationMetadataH\x00\x12q\n\x1bimage_bounding_poly_details\x18\x0b \x01(\x0b\x32J.google.cloud.datalabeling.v1beta1.LabelImageBoundingPolyOperationMetadataH\x00\x12\x80\x01\n#image_oriented_bounding_box_details\x18\x0e \x01(\x0b\x32Q.google.cloud.datalabeling.v1beta1.LabelImageOrientedBoundingBoxOperationMetadataH\x00\x12h\n\x16image_polyline_details\x18\x0c \x01(\x0b\x32\x46.google.cloud.datalabeling.v1beta1.LabelImagePolylineOperationMetadataH\x00\x12p\n\x1aimage_segmentation_details\x18\x0f \x01(\x0b\x32J.google.cloud.datalabeling.v1beta1.LabelImageSegmentationOperationMetadataH\x00\x12t\n\x1cvideo_classification_details\x18\x05 \x01(\x0b\x32L.google.cloud.datalabeling.v1beta1.LabelVideoClassificationOperationMetadataH\x00\x12w\n\x1evideo_object_detection_details\x18\x06 \x01(\x0b\x32M.google.cloud.datalabeling.v1beta1.LabelVideoObjectDetectionOperationMetadataH\x00\x12u\n\x1dvideo_object_tracking_details\x18\x07 \x01(\x0b\x32L.google.cloud.datalabeling.v1beta1.LabelVideoObjectTrackingOperationMetadataH\x00\x12\x62\n\x13video_event_details\x18\x08 \x01(\x0b\x32\x43.google.cloud.datalabeling.v1beta1.LabelVideoEventOperationMetadataH\x00\x12r\n\x1btext_classification_details\x18\t \x01(\x0b\x32K.google.cloud.datalabeling.v1beta1.LabelTextClassificationOperationMetadataH\x00\x12w\n\x1etext_entity_extraction_details\x18\r \x01(\x0b\x32M.google.cloud.datalabeling.v1beta1.LabelTextEntityExtractionOperationMetadataH\x00\x12\x18\n\x10progress_percent\x18\x01 \x01(\x05\x12,\n\x10partial_failures\x18\x02 \x03(\x0b\x32\x12.google.rpc.Status\x12/\n\x0b\x63reate_time\x18\x10 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\t\n\x07\x64\x65tails"{\n)LabelImageClassificationOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"x\n&LabelImageBoundingBoxOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"\x80\x01\n.LabelImageOrientedBoundingBoxOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"y\n\'LabelImageBoundingPolyOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"u\n#LabelImagePolylineOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"y\n\'LabelImageSegmentationOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"{\n)LabelVideoClassificationOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"|\n*LabelVideoObjectDetectionOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"{\n)LabelVideoObjectTrackingOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"r\n LabelVideoEventOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"z\n(LabelTextClassificationOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"|\n*LabelTextEntityExtractionOperationMetadata\x12N\n\x0c\x62\x61sic_config\x18\x01 \x01(\x0b\x32\x38.google.cloud.datalabeling.v1beta1.HumanAnnotationConfig"\x8f\x01\n\x19\x43reateInstructionMetadata\x12\x13\n\x0binstruction\x18\x01 \x01(\t\x12,\n\x10partial_failures\x18\x02 \x03(\x0b\x32\x12.google.rpc.Status\x12/\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampBx\n%com.google.cloud.datalabeling.v1beta1P\x01ZMgoogle.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1;datalabelingb\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2.DESCRIPTOR, - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - ], -) - - -_IMPORTDATAOPERATIONRESPONSE = _descriptor.Descriptor( - name="ImportDataOperationResponse", - full_name="google.cloud.datalabeling.v1beta1.ImportDataOperationResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="dataset", - full_name="google.cloud.datalabeling.v1beta1.ImportDataOperationResponse.dataset", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="total_count", - full_name="google.cloud.datalabeling.v1beta1.ImportDataOperationResponse.total_count", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="import_count", - full_name="google.cloud.datalabeling.v1beta1.ImportDataOperationResponse.import_count", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=336, - serialized_end=425, -) - - -_EXPORTDATAOPERATIONRESPONSE = _descriptor.Descriptor( - name="ExportDataOperationResponse", - full_name="google.cloud.datalabeling.v1beta1.ExportDataOperationResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="dataset", - full_name="google.cloud.datalabeling.v1beta1.ExportDataOperationResponse.dataset", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="total_count", - full_name="google.cloud.datalabeling.v1beta1.ExportDataOperationResponse.total_count", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="export_count", - full_name="google.cloud.datalabeling.v1beta1.ExportDataOperationResponse.export_count", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="label_stats", - full_name="google.cloud.datalabeling.v1beta1.ExportDataOperationResponse.label_stats", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="output_config", - full_name="google.cloud.datalabeling.v1beta1.ExportDataOperationResponse.output_config", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=428, - serialized_end=657, -) - - -_IMPORTDATAOPERATIONMETADATA = _descriptor.Descriptor( - name="ImportDataOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.ImportDataOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="dataset", - full_name="google.cloud.datalabeling.v1beta1.ImportDataOperationMetadata.dataset", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="partial_failures", - full_name="google.cloud.datalabeling.v1beta1.ImportDataOperationMetadata.partial_failures", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.datalabeling.v1beta1.ImportDataOperationMetadata.create_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=660, - serialized_end=801, -) - - -_EXPORTDATAOPERATIONMETADATA = _descriptor.Descriptor( - name="ExportDataOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.ExportDataOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="dataset", - full_name="google.cloud.datalabeling.v1beta1.ExportDataOperationMetadata.dataset", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="partial_failures", - full_name="google.cloud.datalabeling.v1beta1.ExportDataOperationMetadata.partial_failures", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.datalabeling.v1beta1.ExportDataOperationMetadata.create_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=804, - serialized_end=945, -) - - -_LABELOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="image_classification_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.image_classification_details", - index=0, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="image_bounding_box_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.image_bounding_box_details", - index=1, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="image_bounding_poly_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.image_bounding_poly_details", - index=2, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="image_oriented_bounding_box_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.image_oriented_bounding_box_details", - index=3, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="image_polyline_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.image_polyline_details", - index=4, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="image_segmentation_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.image_segmentation_details", - index=5, - number=15, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_classification_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.video_classification_details", - index=6, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_object_detection_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.video_object_detection_details", - index=7, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_object_tracking_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.video_object_tracking_details", - index=8, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="video_event_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.video_event_details", - index=9, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="text_classification_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.text_classification_details", - index=10, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="text_entity_extraction_details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.text_entity_extraction_details", - index=11, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="progress_percent", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.progress_percent", - index=12, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="partial_failures", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.partial_failures", - index=13, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.create_time", - index=14, - number=16, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="details", - full_name="google.cloud.datalabeling.v1beta1.LabelOperationMetadata.details", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=948, - serialized_end=2496, -) - - -_LABELIMAGECLASSIFICATIONOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelImageClassificationOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2498, - serialized_end=2621, -) - - -_LABELIMAGEBOUNDINGBOXOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelImageBoundingBoxOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelImageBoundingBoxOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImageBoundingBoxOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2623, - serialized_end=2743, -) - - -_LABELIMAGEORIENTEDBOUNDINGBOXOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelImageOrientedBoundingBoxOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelImageOrientedBoundingBoxOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImageOrientedBoundingBoxOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2746, - serialized_end=2874, -) - - -_LABELIMAGEBOUNDINGPOLYOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelImageBoundingPolyOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelImageBoundingPolyOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImageBoundingPolyOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2876, - serialized_end=2997, -) - - -_LABELIMAGEPOLYLINEOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelImagePolylineOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelImagePolylineOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImagePolylineOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2999, - serialized_end=3116, -) - - -_LABELIMAGESEGMENTATIONOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelImageSegmentationOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelImageSegmentationOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelImageSegmentationOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3118, - serialized_end=3239, -) - - -_LABELVIDEOCLASSIFICATIONOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelVideoClassificationOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoClassificationOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoClassificationOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3241, - serialized_end=3364, -) - - -_LABELVIDEOOBJECTDETECTIONOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelVideoObjectDetectionOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoObjectDetectionOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoObjectDetectionOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3366, - serialized_end=3490, -) - - -_LABELVIDEOOBJECTTRACKINGOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelVideoObjectTrackingOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoObjectTrackingOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoObjectTrackingOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3492, - serialized_end=3615, -) - - -_LABELVIDEOEVENTOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelVideoEventOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoEventOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelVideoEventOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3617, - serialized_end=3731, -) - - -_LABELTEXTCLASSIFICATIONOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelTextClassificationOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelTextClassificationOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelTextClassificationOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3733, - serialized_end=3855, -) - - -_LABELTEXTENTITYEXTRACTIONOPERATIONMETADATA = _descriptor.Descriptor( - name="LabelTextEntityExtractionOperationMetadata", - full_name="google.cloud.datalabeling.v1beta1.LabelTextEntityExtractionOperationMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="basic_config", - full_name="google.cloud.datalabeling.v1beta1.LabelTextEntityExtractionOperationMetadata.basic_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3857, - serialized_end=3981, -) - - -_CREATEINSTRUCTIONMETADATA = _descriptor.Descriptor( - name="CreateInstructionMetadata", - full_name="google.cloud.datalabeling.v1beta1.CreateInstructionMetadata", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="instruction", - full_name="google.cloud.datalabeling.v1beta1.CreateInstructionMetadata.instruction", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="partial_failures", - full_name="google.cloud.datalabeling.v1beta1.CreateInstructionMetadata.partial_failures", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.cloud.datalabeling.v1beta1.CreateInstructionMetadata.create_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3984, - serialized_end=4127, -) - -_EXPORTDATAOPERATIONRESPONSE.fields_by_name[ - "label_stats" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._LABELSTATS -) -_EXPORTDATAOPERATIONRESPONSE.fields_by_name[ - "output_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_dataset__pb2._OUTPUTCONFIG -) -_IMPORTDATAOPERATIONMETADATA.fields_by_name[ - "partial_failures" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_IMPORTDATAOPERATIONMETADATA.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_EXPORTDATAOPERATIONMETADATA.fields_by_name[ - "partial_failures" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_EXPORTDATAOPERATIONMETADATA.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LABELOPERATIONMETADATA.fields_by_name[ - "image_classification_details" -].message_type = _LABELIMAGECLASSIFICATIONOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "image_bounding_box_details" -].message_type = _LABELIMAGEBOUNDINGBOXOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "image_bounding_poly_details" -].message_type = _LABELIMAGEBOUNDINGPOLYOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "image_oriented_bounding_box_details" -].message_type = _LABELIMAGEORIENTEDBOUNDINGBOXOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "image_polyline_details" -].message_type = _LABELIMAGEPOLYLINEOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "image_segmentation_details" -].message_type = _LABELIMAGESEGMENTATIONOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "video_classification_details" -].message_type = _LABELVIDEOCLASSIFICATIONOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "video_object_detection_details" -].message_type = _LABELVIDEOOBJECTDETECTIONOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "video_object_tracking_details" -].message_type = _LABELVIDEOOBJECTTRACKINGOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "video_event_details" -].message_type = _LABELVIDEOEVENTOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "text_classification_details" -].message_type = _LABELTEXTCLASSIFICATIONOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "text_entity_extraction_details" -].message_type = _LABELTEXTENTITYEXTRACTIONOPERATIONMETADATA -_LABELOPERATIONMETADATA.fields_by_name[ - "partial_failures" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_LABELOPERATIONMETADATA.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["image_classification_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "image_classification_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["image_bounding_box_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "image_bounding_box_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["image_bounding_poly_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "image_bounding_poly_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["image_oriented_bounding_box_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "image_oriented_bounding_box_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["image_polyline_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "image_polyline_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["image_segmentation_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "image_segmentation_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["video_classification_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "video_classification_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["video_object_detection_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "video_object_detection_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["video_object_tracking_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "video_object_tracking_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["video_event_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "video_event_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["text_classification_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "text_classification_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELOPERATIONMETADATA.oneofs_by_name["details"].fields.append( - _LABELOPERATIONMETADATA.fields_by_name["text_entity_extraction_details"] -) -_LABELOPERATIONMETADATA.fields_by_name[ - "text_entity_extraction_details" -].containing_oneof = _LABELOPERATIONMETADATA.oneofs_by_name["details"] -_LABELIMAGECLASSIFICATIONOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELIMAGEBOUNDINGBOXOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELIMAGEORIENTEDBOUNDINGBOXOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELIMAGEBOUNDINGPOLYOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELIMAGEPOLYLINEOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELIMAGESEGMENTATIONOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELVIDEOCLASSIFICATIONOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELVIDEOOBJECTDETECTIONOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELVIDEOOBJECTTRACKINGOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELVIDEOEVENTOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELTEXTCLASSIFICATIONOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_LABELTEXTENTITYEXTRACTIONOPERATIONMETADATA.fields_by_name[ - "basic_config" -].message_type = ( - google_dot_cloud_dot_datalabeling__v1beta1_dot_proto_dot_human__annotation__config__pb2._HUMANANNOTATIONCONFIG -) -_CREATEINSTRUCTIONMETADATA.fields_by_name[ - "partial_failures" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_CREATEINSTRUCTIONMETADATA.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -DESCRIPTOR.message_types_by_name[ - "ImportDataOperationResponse" -] = _IMPORTDATAOPERATIONRESPONSE -DESCRIPTOR.message_types_by_name[ - "ExportDataOperationResponse" -] = _EXPORTDATAOPERATIONRESPONSE -DESCRIPTOR.message_types_by_name[ - "ImportDataOperationMetadata" -] = _IMPORTDATAOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "ExportDataOperationMetadata" -] = _EXPORTDATAOPERATIONMETADATA -DESCRIPTOR.message_types_by_name["LabelOperationMetadata"] = _LABELOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelImageClassificationOperationMetadata" -] = _LABELIMAGECLASSIFICATIONOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelImageBoundingBoxOperationMetadata" -] = _LABELIMAGEBOUNDINGBOXOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelImageOrientedBoundingBoxOperationMetadata" -] = _LABELIMAGEORIENTEDBOUNDINGBOXOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelImageBoundingPolyOperationMetadata" -] = _LABELIMAGEBOUNDINGPOLYOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelImagePolylineOperationMetadata" -] = _LABELIMAGEPOLYLINEOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelImageSegmentationOperationMetadata" -] = _LABELIMAGESEGMENTATIONOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelVideoClassificationOperationMetadata" -] = _LABELVIDEOCLASSIFICATIONOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelVideoObjectDetectionOperationMetadata" -] = _LABELVIDEOOBJECTDETECTIONOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelVideoObjectTrackingOperationMetadata" -] = _LABELVIDEOOBJECTTRACKINGOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelVideoEventOperationMetadata" -] = _LABELVIDEOEVENTOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelTextClassificationOperationMetadata" -] = _LABELTEXTCLASSIFICATIONOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "LabelTextEntityExtractionOperationMetadata" -] = _LABELTEXTENTITYEXTRACTIONOPERATIONMETADATA -DESCRIPTOR.message_types_by_name[ - "CreateInstructionMetadata" -] = _CREATEINSTRUCTIONMETADATA -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ImportDataOperationResponse = _reflection.GeneratedProtocolMessageType( - "ImportDataOperationResponse", - (_message.Message,), - { - "DESCRIPTOR": _IMPORTDATAOPERATIONRESPONSE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Response used for ImportData longrunning operation. - - Attributes: - dataset: - Ouptut only. The name of imported dataset. - total_count: - Output only. Total number of examples requested to import - import_count: - Output only. Number of examples imported successfully. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ImportDataOperationResponse) - }, -) -_sym_db.RegisterMessage(ImportDataOperationResponse) - -ExportDataOperationResponse = _reflection.GeneratedProtocolMessageType( - "ExportDataOperationResponse", - (_message.Message,), - { - "DESCRIPTOR": _EXPORTDATAOPERATIONRESPONSE, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Response used for ExportDataset longrunning operation. - - Attributes: - dataset: - Ouptut only. The name of dataset. “projects/\ */datasets/*” - total_count: - Output only. Total number of examples requested to export - export_count: - Output only. Number of examples exported successfully. - label_stats: - Output only. Statistic infos of labels in the exported - dataset. - output_config: - Output only. output_config in the ExportData request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ExportDataOperationResponse) - }, -) -_sym_db.RegisterMessage(ExportDataOperationResponse) - -ImportDataOperationMetadata = _reflection.GeneratedProtocolMessageType( - "ImportDataOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _IMPORTDATAOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Metadata of an ImportData operation. - - Attributes: - dataset: - Output only. The name of imported dataset. “projects/\ - */datasets/*” - partial_failures: - Output only. Partial failures encountered. E.g. single files - that couldn’t be read. Status details field will contain - standard GCP error details. - create_time: - Output only. Timestamp when import dataset request was - created. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ImportDataOperationMetadata) - }, -) -_sym_db.RegisterMessage(ImportDataOperationMetadata) - -ExportDataOperationMetadata = _reflection.GeneratedProtocolMessageType( - "ExportDataOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _EXPORTDATAOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Metadata of an ExportData operation. - - Attributes: - dataset: - Output only. The name of dataset to be exported. “projects/\ - */datasets/*” - partial_failures: - Output only. Partial failures encountered. E.g. single files - that couldn’t be read. Status details field will contain - standard GCP error details. - create_time: - Output only. Timestamp when export dataset request was - created. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ExportDataOperationMetadata) - }, -) -_sym_db.RegisterMessage(ExportDataOperationMetadata) - -LabelOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Metadata of a labeling operation, such as LabelImage or LabelVideo. - Next tag: 20 - - Attributes: - details: - Ouptut only. Details of specific label operation. - image_classification_details: - Details of label image classification operation. - image_bounding_box_details: - Details of label image bounding box operation. - image_bounding_poly_details: - Details of label image bounding poly operation. - image_oriented_bounding_box_details: - Details of label image oriented bounding box operation. - image_polyline_details: - Details of label image polyline operation. - image_segmentation_details: - Details of label image segmentation operation. - video_classification_details: - Details of label video classification operation. - video_object_detection_details: - Details of label video object detection operation. - video_object_tracking_details: - Details of label video object tracking operation. - video_event_details: - Details of label video event operation. - text_classification_details: - Details of label text classification operation. - text_entity_extraction_details: - Details of label text entity extraction operation. - progress_percent: - Output only. Progress of label operation. Range: [0, 100]. - partial_failures: - Output only. Partial failures encountered. E.g. single files - that couldn’t be read. Status details field will contain - standard GCP error details. - create_time: - Output only. Timestamp when labeling request was created. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelOperationMetadata) - -LabelImageClassificationOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelImageClassificationOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELIMAGECLASSIFICATIONOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Metadata of a LabelImageClassification operation. - - Attributes: - basic_config: - Basic human annotation config used in labeling request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelImageClassificationOperationMetadata) - -LabelImageBoundingBoxOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelImageBoundingBoxOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELIMAGEBOUNDINGBOXOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Details of a LabelImageBoundingBox operation metadata. - - Attributes: - basic_config: - Basic human annotation config used in labeling request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelImageBoundingBoxOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelImageBoundingBoxOperationMetadata) - -LabelImageOrientedBoundingBoxOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelImageOrientedBoundingBoxOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELIMAGEORIENTEDBOUNDINGBOXOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Details of a LabelImageOrientedBoundingBox operation metadata. - - Attributes: - basic_config: - Basic human annotation config. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelImageOrientedBoundingBoxOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelImageOrientedBoundingBoxOperationMetadata) - -LabelImageBoundingPolyOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelImageBoundingPolyOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELIMAGEBOUNDINGPOLYOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Details of LabelImageBoundingPoly operation metadata. - - Attributes: - basic_config: - Basic human annotation config used in labeling request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelImageBoundingPolyOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelImageBoundingPolyOperationMetadata) - -LabelImagePolylineOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelImagePolylineOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELIMAGEPOLYLINEOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Details of LabelImagePolyline operation metadata. - - Attributes: - basic_config: - Basic human annotation config used in labeling request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelImagePolylineOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelImagePolylineOperationMetadata) - -LabelImageSegmentationOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelImageSegmentationOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELIMAGESEGMENTATIONOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Details of a LabelImageSegmentation operation metadata. - - Attributes: - basic_config: - Basic human annotation config. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelImageSegmentationOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelImageSegmentationOperationMetadata) - -LabelVideoClassificationOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelVideoClassificationOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELVIDEOCLASSIFICATIONOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Details of a LabelVideoClassification operation metadata. - - Attributes: - basic_config: - Basic human annotation config used in labeling request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelVideoClassificationOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelVideoClassificationOperationMetadata) - -LabelVideoObjectDetectionOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelVideoObjectDetectionOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELVIDEOOBJECTDETECTIONOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Details of a LabelVideoObjectDetection operation metadata. - - Attributes: - basic_config: - Basic human annotation config used in labeling request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelVideoObjectDetectionOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelVideoObjectDetectionOperationMetadata) - -LabelVideoObjectTrackingOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelVideoObjectTrackingOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELVIDEOOBJECTTRACKINGOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Details of a LabelVideoObjectTracking operation metadata. - - Attributes: - basic_config: - Basic human annotation config used in labeling request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelVideoObjectTrackingOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelVideoObjectTrackingOperationMetadata) - -LabelVideoEventOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelVideoEventOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELVIDEOEVENTOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Details of a LabelVideoEvent operation metadata. - - Attributes: - basic_config: - Basic human annotation config used in labeling request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelVideoEventOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelVideoEventOperationMetadata) - -LabelTextClassificationOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelTextClassificationOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELTEXTCLASSIFICATIONOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Details of a LabelTextClassification operation metadata. - - Attributes: - basic_config: - Basic human annotation config used in labeling request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelTextClassificationOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelTextClassificationOperationMetadata) - -LabelTextEntityExtractionOperationMetadata = _reflection.GeneratedProtocolMessageType( - "LabelTextEntityExtractionOperationMetadata", - (_message.Message,), - { - "DESCRIPTOR": _LABELTEXTENTITYEXTRACTIONOPERATIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Details of a LabelTextEntityExtraction operation metadata. - - Attributes: - basic_config: - Basic human annotation config used in labeling request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.LabelTextEntityExtractionOperationMetadata) - }, -) -_sym_db.RegisterMessage(LabelTextEntityExtractionOperationMetadata) - -CreateInstructionMetadata = _reflection.GeneratedProtocolMessageType( - "CreateInstructionMetadata", - (_message.Message,), - { - "DESCRIPTOR": _CREATEINSTRUCTIONMETADATA, - "__module__": "google.cloud.datalabeling_v1beta1.proto.operations_pb2", - "__doc__": """Metadata of a CreateInstruction operation. - - Attributes: - instruction: - The name of the created Instruction. - projects/{project_id}/instructions/{instruction_id} - partial_failures: - Partial failures encountered. E.g. single files that couldn’t - be read. Status details field will contain standard GCP error - details. - create_time: - Timestamp when create instruction request was created. - """, - # @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.CreateInstructionMetadata) - }, -) -_sym_db.RegisterMessage(CreateInstructionMetadata) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/datalabeling_v1beta1/proto/operations_pb2_grpc.py b/google/cloud/datalabeling_v1beta1/proto/operations_pb2_grpc.py deleted file mode 100644 index 8a93939..0000000 --- a/google/cloud/datalabeling_v1beta1/proto/operations_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/google/cloud/datalabeling_v1beta1/py.typed b/google/cloud/datalabeling_v1beta1/py.typed new file mode 100644 index 0000000..1d27d78 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-datalabeling package uses inline types. diff --git a/google/__init__.py b/google/cloud/datalabeling_v1beta1/services/__init__.py similarity index 71% rename from google/__init__.py rename to google/cloud/datalabeling_v1beta1/services/__init__.py index 9a1b64a..42ffdf2 100644 --- a/google/__init__.py +++ b/google/cloud/datalabeling_v1beta1/services/__init__.py @@ -1,24 +1,16 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) +# diff --git a/google/cloud/__init__.py b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/__init__.py similarity index 70% rename from google/cloud/__init__.py rename to google/cloud/datalabeling_v1beta1/services/data_labeling_service/__init__.py index 9a1b64a..d432b13 100644 --- a/google/cloud/__init__.py +++ b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/__init__.py @@ -1,24 +1,24 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil +from .client import DataLabelingServiceClient +from .async_client import DataLabelingServiceAsyncClient - __path__ = pkgutil.extend_path(__path__, __name__) +__all__ = ( + "DataLabelingServiceClient", + "DataLabelingServiceAsyncClient", +) diff --git a/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py new file mode 100644 index 0000000..5a5c908 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/async_client.py @@ -0,0 +1,3140 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation +from google.api_core import operation_async +from google.cloud.datalabeling_v1beta1.services.data_labeling_service import pagers +from google.cloud.datalabeling_v1beta1.types import annotation +from google.cloud.datalabeling_v1beta1.types import annotation_spec_set +from google.cloud.datalabeling_v1beta1.types import ( + annotation_spec_set as gcd_annotation_spec_set, +) +from google.cloud.datalabeling_v1beta1.types import data_labeling_service +from google.cloud.datalabeling_v1beta1.types import data_payloads +from google.cloud.datalabeling_v1beta1.types import dataset +from google.cloud.datalabeling_v1beta1.types import dataset as gcd_dataset +from google.cloud.datalabeling_v1beta1.types import evaluation +from google.cloud.datalabeling_v1beta1.types import evaluation_job +from google.cloud.datalabeling_v1beta1.types import evaluation_job as gcd_evaluation_job +from google.cloud.datalabeling_v1beta1.types import human_annotation_config +from google.cloud.datalabeling_v1beta1.types import instruction +from google.cloud.datalabeling_v1beta1.types import instruction as gcd_instruction +from google.cloud.datalabeling_v1beta1.types import operations +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import DataLabelingServiceTransport +from .transports.grpc_asyncio import DataLabelingServiceGrpcAsyncIOTransport +from .client import DataLabelingServiceClient + + +class DataLabelingServiceAsyncClient: + """""" + + _client: DataLabelingServiceClient + + DEFAULT_ENDPOINT = DataLabelingServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataLabelingServiceClient.DEFAULT_MTLS_ENDPOINT + + instruction_path = staticmethod(DataLabelingServiceClient.instruction_path) + + annotation_spec_set_path = staticmethod( + DataLabelingServiceClient.annotation_spec_set_path + ) + + evaluation_job_path = staticmethod(DataLabelingServiceClient.evaluation_job_path) + + dataset_path = staticmethod(DataLabelingServiceClient.dataset_path) + + from_service_account_file = DataLabelingServiceClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(DataLabelingServiceClient).get_transport_class, + type(DataLabelingServiceClient), + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DataLabelingServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + ) -> None: + """Instantiate the data labeling service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DataLabelingServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = DataLabelingServiceClient( + credentials=credentials, transport=transport, client_options=client_options, + ) + + async def create_dataset( + self, + request: data_labeling_service.CreateDatasetRequest = None, + *, + parent: str = None, + dataset: gcd_dataset.Dataset = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_dataset.Dataset: + r"""Creates dataset. If success return a Dataset + resource. + + Args: + request (:class:`~.data_labeling_service.CreateDatasetRequest`): + The request object. Request message for CreateDataset. + parent (:class:`str`): + Required. Dataset resource parent, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dataset (:class:`~.gcd_dataset.Dataset`): + Required. The dataset to be created. + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_dataset.Dataset: + Dataset is the resource to hold your + data. You can request multiple labeling + tasks for a dataset while each one will + generate an AnnotatedDataset. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, dataset]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.CreateDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if dataset is not None: + request.dataset = dataset + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_dataset, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_dataset( + self, + request: data_labeling_service.GetDatasetRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.Dataset: + r"""Gets dataset by resource name. + + Args: + request (:class:`~.data_labeling_service.GetDatasetRequest`): + The request object. Request message for GetDataSet. + name (:class:`str`): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dataset.Dataset: + Dataset is the resource to hold your + data. You can request multiple labeling + tasks for a dataset while each one will + generate an AnnotatedDataset. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.GetDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dataset, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_datasets( + self, + request: data_labeling_service.ListDatasetsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatasetsAsyncPager: + r"""Lists datasets under a project. Pagination is + supported. + + Args: + request (:class:`~.data_labeling_service.ListDatasetsRequest`): + The request object. Request message for ListDataset. + parent (:class:`str`): + Required. Dataset resource parent, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter on dataset is not + supported at this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDatasetsAsyncPager: + Results of listing datasets within a + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.ListDatasetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_datasets, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDatasetsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dataset( + self, + request: data_labeling_service.DeleteDatasetRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a dataset by resource name. + + Args: + request (:class:`~.data_labeling_service.DeleteDatasetRequest`): + The request object. Request message for DeleteDataset. + name (:class:`str`): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.DeleteDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dataset, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def import_data( + self, + request: data_labeling_service.ImportDataRequest = None, + *, + name: str = None, + input_config: dataset.InputConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports data into dataset based on source locations + defined in request. It can be called multiple times for + the same dataset. Each dataset can only have one long + running operation running on it. For example, no + labeling task (also long running operation) can be + started while importing is still ongoing. Vice versa. + + Args: + request (:class:`~.data_labeling_service.ImportDataRequest`): + The request object. Request message for ImportData API. + name (:class:`str`): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + input_config (:class:`~.dataset.InputConfig`): + Required. Specify the input source of + the data. + This corresponds to the ``input_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.operations.ImportDataOperationResponse``: + Response used for ImportData longrunning operation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, input_config]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.ImportDataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if input_config is not None: + request.input_config = input_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_data, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + operations.ImportDataOperationResponse, + metadata_type=operations.ImportDataOperationMetadata, + ) + + # Done; return the response. + return response + + async def export_data( + self, + request: data_labeling_service.ExportDataRequest = None, + *, + name: str = None, + annotated_dataset: str = None, + filter: str = None, + output_config: dataset.OutputConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports data and annotations from dataset. + + Args: + request (:class:`~.data_labeling_service.ExportDataRequest`): + The request object. Request message for ExportData API. + name (:class:`str`): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + annotated_dataset (:class:`str`): + Required. Annotated dataset resource name. DataItem in + Dataset and their annotations in specified annotated + dataset will be exported. It's in format of + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id} + This corresponds to the ``annotated_dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter is not supported at + this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + output_config (:class:`~.dataset.OutputConfig`): + Required. Specify the output + destination. + This corresponds to the ``output_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.operations.ExportDataOperationResponse``: + Response used for ExportDataset longrunning operation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any( + [name, annotated_dataset, filter, output_config] + ): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.ExportDataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if annotated_dataset is not None: + request.annotated_dataset = annotated_dataset + if filter is not None: + request.filter = filter + if output_config is not None: + request.output_config = output_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_data, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + operations.ExportDataOperationResponse, + metadata_type=operations.ExportDataOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_data_item( + self, + request: data_labeling_service.GetDataItemRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.DataItem: + r"""Gets a data item in a dataset by resource name. This + API can be called after data are imported into dataset. + + Args: + request (:class:`~.data_labeling_service.GetDataItemRequest`): + The request object. Request message for GetDataItem. + name (:class:`str`): + Required. The name of the data item to get, format: + projects/{project_id}/datasets/{dataset_id}/dataItems/{data_item_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dataset.DataItem: + DataItem is a piece of data, without + annotation. For example, an image. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.GetDataItemRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_data_item, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_data_items( + self, + request: data_labeling_service.ListDataItemsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataItemsAsyncPager: + r"""Lists data items in a dataset. This API can be called + after data are imported into dataset. Pagination is + supported. + + Args: + request (:class:`~.data_labeling_service.ListDataItemsRequest`): + The request object. Request message for ListDataItems. + parent (:class:`str`): + Required. Name of the dataset to list data items, + format: projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter is not supported at + this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDataItemsAsyncPager: + Results of listing data items in a + dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.ListDataItemsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_data_items, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataItemsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_annotated_dataset( + self, + request: data_labeling_service.GetAnnotatedDatasetRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.AnnotatedDataset: + r"""Gets an annotated dataset by resource name. + + Args: + request (:class:`~.data_labeling_service.GetAnnotatedDatasetRequest`): + The request object. Request message for + GetAnnotatedDataset. + name (:class:`str`): + Required. Name of the annotated dataset to get, format: + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dataset.AnnotatedDataset: + AnnotatedDataset is a set holding + annotations for data in a Dataset. Each + labeling task will generate an + AnnotatedDataset under the Dataset that + the task is requested for. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.GetAnnotatedDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_annotated_dataset, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_annotated_datasets( + self, + request: data_labeling_service.ListAnnotatedDatasetsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAnnotatedDatasetsAsyncPager: + r"""Lists annotated datasets for a dataset. Pagination is + supported. + + Args: + request (:class:`~.data_labeling_service.ListAnnotatedDatasetsRequest`): + The request object. Request message for + ListAnnotatedDatasets. + parent (:class:`str`): + Required. Name of the dataset to list annotated + datasets, format: + projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter is not supported at + this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListAnnotatedDatasetsAsyncPager: + Results of listing annotated datasets + for a dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.ListAnnotatedDatasetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_annotated_datasets, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAnnotatedDatasetsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_annotated_dataset( + self, + request: data_labeling_service.DeleteAnnotatedDatasetRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an annotated dataset by resource name. + + Args: + request (:class:`~.data_labeling_service.DeleteAnnotatedDatasetRequest`): + The request object. Request message for + DeleteAnnotatedDataset. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = data_labeling_service.DeleteAnnotatedDatasetRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_annotated_dataset, + default_timeout=None, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def label_image( + self, + request: data_labeling_service.LabelImageRequest = None, + *, + parent: str = None, + basic_config: human_annotation_config.HumanAnnotationConfig = None, + feature: data_labeling_service.LabelImageRequest.Feature = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts a labeling task for image. The type of image + labeling task is configured by feature in the request. + + Args: + request (:class:`~.data_labeling_service.LabelImageRequest`): + The request object. Request message for starting an + image labeling task. + parent (:class:`str`): + Required. Name of the dataset to request labeling task, + format: projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + basic_config (:class:`~.human_annotation_config.HumanAnnotationConfig`): + Required. Basic human annotation + config. + This corresponds to the ``basic_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + feature (:class:`~.data_labeling_service.LabelImageRequest.Feature`): + Required. The type of image labeling + task. + This corresponds to the ``feature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.dataset.AnnotatedDataset``: AnnotatedDataset + is a set holding annotations for data in a Dataset. Each + labeling task will generate an AnnotatedDataset under + the Dataset that the task is requested for. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, basic_config, feature]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.LabelImageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if basic_config is not None: + request.basic_config = basic_config + if feature is not None: + request.feature = feature + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.label_image, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + dataset.AnnotatedDataset, + metadata_type=operations.LabelOperationMetadata, + ) + + # Done; return the response. + return response + + async def label_video( + self, + request: data_labeling_service.LabelVideoRequest = None, + *, + parent: str = None, + basic_config: human_annotation_config.HumanAnnotationConfig = None, + feature: data_labeling_service.LabelVideoRequest.Feature = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts a labeling task for video. The type of video + labeling task is configured by feature in the request. + + Args: + request (:class:`~.data_labeling_service.LabelVideoRequest`): + The request object. Request message for LabelVideo. + parent (:class:`str`): + Required. Name of the dataset to request labeling task, + format: projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + basic_config (:class:`~.human_annotation_config.HumanAnnotationConfig`): + Required. Basic human annotation + config. + This corresponds to the ``basic_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + feature (:class:`~.data_labeling_service.LabelVideoRequest.Feature`): + Required. The type of video labeling + task. + This corresponds to the ``feature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.dataset.AnnotatedDataset``: AnnotatedDataset + is a set holding annotations for data in a Dataset. Each + labeling task will generate an AnnotatedDataset under + the Dataset that the task is requested for. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, basic_config, feature]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.LabelVideoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if basic_config is not None: + request.basic_config = basic_config + if feature is not None: + request.feature = feature + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.label_video, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + dataset.AnnotatedDataset, + metadata_type=operations.LabelOperationMetadata, + ) + + # Done; return the response. + return response + + async def label_text( + self, + request: data_labeling_service.LabelTextRequest = None, + *, + parent: str = None, + basic_config: human_annotation_config.HumanAnnotationConfig = None, + feature: data_labeling_service.LabelTextRequest.Feature = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts a labeling task for text. The type of text + labeling task is configured by feature in the request. + + Args: + request (:class:`~.data_labeling_service.LabelTextRequest`): + The request object. Request message for LabelText. + parent (:class:`str`): + Required. Name of the data set to request labeling task, + format: projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + basic_config (:class:`~.human_annotation_config.HumanAnnotationConfig`): + Required. Basic human annotation + config. + This corresponds to the ``basic_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + feature (:class:`~.data_labeling_service.LabelTextRequest.Feature`): + Required. The type of text labeling + task. + This corresponds to the ``feature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.dataset.AnnotatedDataset``: AnnotatedDataset + is a set holding annotations for data in a Dataset. Each + labeling task will generate an AnnotatedDataset under + the Dataset that the task is requested for. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, basic_config, feature]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.LabelTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if basic_config is not None: + request.basic_config = basic_config + if feature is not None: + request.feature = feature + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.label_text, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + dataset.AnnotatedDataset, + metadata_type=operations.LabelOperationMetadata, + ) + + # Done; return the response. + return response + + async def get_example( + self, + request: data_labeling_service.GetExampleRequest = None, + *, + name: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.Example: + r"""Gets an example by resource name, including both data + and annotation. + + Args: + request (:class:`~.data_labeling_service.GetExampleRequest`): + The request object. Request message for GetExample + name (:class:`str`): + Required. Name of example, format: + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id}/examples/{example_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. An expression for filtering Examples. Filter + by annotation_spec.display_name is supported. Format + "annotation_spec.display_name = {display_name}". + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dataset.Example: + An Example is a piece of data and its + annotation. For example, an image with + label "house". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.GetExampleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_example, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_examples( + self, + request: data_labeling_service.ListExamplesRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExamplesAsyncPager: + r"""Lists examples in an annotated dataset. Pagination is + supported. + + Args: + request (:class:`~.data_labeling_service.ListExamplesRequest`): + The request object. Request message for ListExamples. + parent (:class:`str`): + Required. Example resource parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. An expression for filtering Examples. For + annotated datasets that have annotation spec set, filter + by annotation_spec.display_name is supported. Format + "annotation_spec.display_name = {display_name}". + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListExamplesAsyncPager: + Results of listing Examples in and + annotated dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.ListExamplesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_examples, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExamplesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_annotation_spec_set( + self, + request: data_labeling_service.CreateAnnotationSpecSetRequest = None, + *, + parent: str = None, + annotation_spec_set: gcd_annotation_spec_set.AnnotationSpecSet = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_annotation_spec_set.AnnotationSpecSet: + r"""Creates an annotation spec set by providing a set of + labels. + + Args: + request (:class:`~.data_labeling_service.CreateAnnotationSpecSetRequest`): + The request object. Request message for + CreateAnnotationSpecSet. + parent (:class:`str`): + Required. AnnotationSpecSet resource parent, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + annotation_spec_set (:class:`~.gcd_annotation_spec_set.AnnotationSpecSet`): + Required. Annotation spec set to create. Annotation + specs must be included. Only one annotation spec will be + accepted for annotation specs with same display_name. + This corresponds to the ``annotation_spec_set`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_annotation_spec_set.AnnotationSpecSet: + An AnnotationSpecSet is a collection + of label definitions. For example, in + image classification tasks, you define a + set of possible labels for images as an + AnnotationSpecSet. An AnnotationSpecSet + is immutable upon creation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, annotation_spec_set]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.CreateAnnotationSpecSetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if annotation_spec_set is not None: + request.annotation_spec_set = annotation_spec_set + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_annotation_spec_set, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_annotation_spec_set( + self, + request: data_labeling_service.GetAnnotationSpecSetRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> annotation_spec_set.AnnotationSpecSet: + r"""Gets an annotation spec set by resource name. + + Args: + request (:class:`~.data_labeling_service.GetAnnotationSpecSetRequest`): + The request object. Request message for + GetAnnotationSpecSet. + name (:class:`str`): + Required. AnnotationSpecSet resource name, format: + projects/{project_id}/annotationSpecSets/{annotation_spec_set_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.annotation_spec_set.AnnotationSpecSet: + An AnnotationSpecSet is a collection + of label definitions. For example, in + image classification tasks, you define a + set of possible labels for images as an + AnnotationSpecSet. An AnnotationSpecSet + is immutable upon creation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.GetAnnotationSpecSetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_annotation_spec_set, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_annotation_spec_sets( + self, + request: data_labeling_service.ListAnnotationSpecSetsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAnnotationSpecSetsAsyncPager: + r"""Lists annotation spec sets for a project. Pagination + is supported. + + Args: + request (:class:`~.data_labeling_service.ListAnnotationSpecSetsRequest`): + The request object. Request message for + ListAnnotationSpecSets. + parent (:class:`str`): + Required. Parent of AnnotationSpecSet resource, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter is not supported at + this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListAnnotationSpecSetsAsyncPager: + Results of listing annotation spec + set under a project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.ListAnnotationSpecSetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_annotation_spec_sets, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAnnotationSpecSetsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_annotation_spec_set( + self, + request: data_labeling_service.DeleteAnnotationSpecSetRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an annotation spec set by resource name. + + Args: + request (:class:`~.data_labeling_service.DeleteAnnotationSpecSetRequest`): + The request object. Request message for + DeleteAnnotationSpecSet. + name (:class:`str`): + Required. AnnotationSpec resource name, format: + ``projects/{project_id}/annotationSpecSets/{annotation_spec_set_id}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.DeleteAnnotationSpecSetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_annotation_spec_set, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def create_instruction( + self, + request: data_labeling_service.CreateInstructionRequest = None, + *, + parent: str = None, + instruction: gcd_instruction.Instruction = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an instruction for how data should be + labeled. + + Args: + request (:class:`~.data_labeling_service.CreateInstructionRequest`): + The request object. Request message for + CreateInstruction. + parent (:class:`str`): + Required. Instruction resource parent, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instruction (:class:`~.gcd_instruction.Instruction`): + Required. Instruction of how to + perform the labeling task. + This corresponds to the ``instruction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.gcd_instruction.Instruction``: Instruction of + how to perform the labeling task for human operators. + Currently only PDF instruction is supported. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, instruction]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.CreateInstructionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if instruction is not None: + request.instruction = instruction + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_instruction, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcd_instruction.Instruction, + metadata_type=operations.CreateInstructionMetadata, + ) + + # Done; return the response. + return response + + async def get_instruction( + self, + request: data_labeling_service.GetInstructionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instruction.Instruction: + r"""Gets an instruction by resource name. + + Args: + request (:class:`~.data_labeling_service.GetInstructionRequest`): + The request object. Request message for GetInstruction. + name (:class:`str`): + Required. Instruction resource name, format: + projects/{project_id}/instructions/{instruction_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.instruction.Instruction: + Instruction of how to perform the + labeling task for human operators. + Currently only PDF instruction is + supported. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.GetInstructionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instruction, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_instructions( + self, + request: data_labeling_service.ListInstructionsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstructionsAsyncPager: + r"""Lists instructions for a project. Pagination is + supported. + + Args: + request (:class:`~.data_labeling_service.ListInstructionsRequest`): + The request object. Request message for + ListInstructions. + parent (:class:`str`): + Required. Instruction resource parent, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter is not supported at + this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListInstructionsAsyncPager: + Results of listing instructions under + a project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.ListInstructionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instructions, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstructionsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_instruction( + self, + request: data_labeling_service.DeleteInstructionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an instruction object by resource name. + + Args: + request (:class:`~.data_labeling_service.DeleteInstructionRequest`): + The request object. Request message for + DeleteInstruction. + name (:class:`str`): + Required. Instruction resource name, format: + projects/{project_id}/instructions/{instruction_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.DeleteInstructionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_instruction, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def get_evaluation( + self, + request: data_labeling_service.GetEvaluationRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> evaluation.Evaluation: + r"""Gets an evaluation by resource name (to search, use + [projects.evaluations.search][google.cloud.datalabeling.v1beta1.DataLabelingService.SearchEvaluations]). + + Args: + request (:class:`~.data_labeling_service.GetEvaluationRequest`): + The request object. Request message for GetEvaluation. + name (:class:`str`): + Required. Name of the evaluation. Format: + + "projects/{project_id}/datasets/{dataset_id}/evaluations/{evaluation_id}' + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.evaluation.Evaluation: + Describes an evaluation between a machine learning + model's predictions and ground truth labels. Created + when an + [EvaluationJob][google.cloud.datalabeling.v1beta1.EvaluationJob] + runs successfully. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.GetEvaluationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_evaluation, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def search_evaluations( + self, + request: data_labeling_service.SearchEvaluationsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchEvaluationsAsyncPager: + r"""Searches + [evaluations][google.cloud.datalabeling.v1beta1.Evaluation] + within a project. + + Args: + request (:class:`~.data_labeling_service.SearchEvaluationsRequest`): + The request object. Request message for + SearchEvaluation. + parent (:class:`str`): + Required. Evaluation search parent (project ID). Format: + "projects/{project_id}". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. To search evaluations, you can filter by the + following: + + - evaluation\_job.evaluation_job_id (the last part of + [EvaluationJob.name][google.cloud.datalabeling.v1beta1.EvaluationJob.name]) + - evaluation\_job.model_id (the {model_name} portion of + [EvaluationJob.modelVersion][google.cloud.datalabeling.v1beta1.EvaluationJob.model_version]) + - evaluation\_job.evaluation_job_run_time_start + (Minimum threshold for the + [evaluationJobRunTime][google.cloud.datalabeling.v1beta1.Evaluation.evaluation_job_run_time] + that created the evaluation) + - evaluation\_job.evaluation_job_run_time_end (Maximum + threshold for the + [evaluationJobRunTime][google.cloud.datalabeling.v1beta1.Evaluation.evaluation_job_run_time] + that created the evaluation) + - evaluation\_job.job_state + ([EvaluationJob.state][google.cloud.datalabeling.v1beta1.EvaluationJob.state]) + - annotation\_spec.display_name (the Evaluation + contains a metric for the annotation spec with this + [displayName][google.cloud.datalabeling.v1beta1.AnnotationSpec.display_name]) + + To filter by multiple critiera, use the ``AND`` operator + or the ``OR`` operator. The following examples shows a + string that filters by several critiera: + + "evaluation\ *job.evaluation_job_id = + {evaluation_job_id} AND evaluation*\ job.model_id = + {model_name} AND + evaluation\ *job.evaluation_job_run_time_start = + {timestamp_1} AND + evaluation*\ job.evaluation_job_run_time_end = + {timestamp_2} AND annotation\_spec.display_name = + {display_name}". + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.SearchEvaluationsAsyncPager: + Results of searching evaluations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.SearchEvaluationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_evaluations, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchEvaluationsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_example_comparisons( + self, + request: data_labeling_service.SearchExampleComparisonsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchExampleComparisonsAsyncPager: + r"""Searches example comparisons from an evaluation. The + return format is a list of example comparisons that show + ground truth and prediction(s) for a single input. + Search by providing an evaluation ID. + + Args: + request (:class:`~.data_labeling_service.SearchExampleComparisonsRequest`): + The request object. Request message of + SearchExampleComparisons. + parent (:class:`str`): + Required. Name of the + [Evaluation][google.cloud.datalabeling.v1beta1.Evaluation] + resource to search for example comparisons from. Format: + + "projects/{project_id}/datasets/{dataset_id}/evaluations/{evaluation_id}". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.SearchExampleComparisonsAsyncPager: + Results of searching example + comparisons. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.SearchExampleComparisonsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_example_comparisons, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchExampleComparisonsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_evaluation_job( + self, + request: data_labeling_service.CreateEvaluationJobRequest = None, + *, + parent: str = None, + job: evaluation_job.EvaluationJob = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> evaluation_job.EvaluationJob: + r"""Creates an evaluation job. + + Args: + request (:class:`~.data_labeling_service.CreateEvaluationJobRequest`): + The request object. Request message for + CreateEvaluationJob. + parent (:class:`str`): + Required. Evaluation job resource parent. Format: + "projects/{project_id}". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (:class:`~.evaluation_job.EvaluationJob`): + Required. The evaluation job to + create. + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.evaluation_job.EvaluationJob: + Defines an evaluation job that runs periodically to + generate + [Evaluations][google.cloud.datalabeling.v1beta1.Evaluation]. + `Creating an evaluation + job `__ + is the starting point for using continuous evaluation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, job]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.CreateEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_evaluation_job, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_evaluation_job( + self, + request: data_labeling_service.UpdateEvaluationJobRequest = None, + *, + evaluation_job: gcd_evaluation_job.EvaluationJob = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_evaluation_job.EvaluationJob: + r"""Updates an evaluation job. You can only update certain fields of + the job's + [EvaluationJobConfig][google.cloud.datalabeling.v1beta1.EvaluationJobConfig]: + ``humanAnnotationConfig.instruction``, ``exampleCount``, and + ``exampleSamplePercentage``. + + If you want to change any other aspect of the evaluation job, + you must delete the job and create a new one. + + Args: + request (:class:`~.data_labeling_service.UpdateEvaluationJobRequest`): + The request object. Request message for + UpdateEvaluationJob. + evaluation_job (:class:`~.gcd_evaluation_job.EvaluationJob`): + Required. Evaluation job that is + going to be updated. + This corresponds to the ``evaluation_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Optional. Mask for which fields to update. You can only + provide the following fields: + + - ``evaluationJobConfig.humanAnnotationConfig.instruction`` + - ``evaluationJobConfig.exampleCount`` + - ``evaluationJobConfig.exampleSamplePercentage`` + + You can provide more than one of these fields by + separating them with commas. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_evaluation_job.EvaluationJob: + Defines an evaluation job that runs periodically to + generate + [Evaluations][google.cloud.datalabeling.v1beta1.Evaluation]. + `Creating an evaluation + job `__ + is the starting point for using continuous evaluation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([evaluation_job, update_mask]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.UpdateEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if evaluation_job is not None: + request.evaluation_job = evaluation_job + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_evaluation_job, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("evaluation_job.name", request.evaluation_job.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_evaluation_job( + self, + request: data_labeling_service.GetEvaluationJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> evaluation_job.EvaluationJob: + r"""Gets an evaluation job by resource name. + + Args: + request (:class:`~.data_labeling_service.GetEvaluationJobRequest`): + The request object. Request message for + GetEvaluationJob. + name (:class:`str`): + Required. Name of the evaluation job. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.evaluation_job.EvaluationJob: + Defines an evaluation job that runs periodically to + generate + [Evaluations][google.cloud.datalabeling.v1beta1.Evaluation]. + `Creating an evaluation + job `__ + is the starting point for using continuous evaluation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.GetEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_evaluation_job, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def pause_evaluation_job( + self, + request: data_labeling_service.PauseEvaluationJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Pauses an evaluation job. Pausing an evaluation job that is + already in a ``PAUSED`` state is a no-op. + + Args: + request (:class:`~.data_labeling_service.PauseEvaluationJobRequest`): + The request object. Request message for + PauseEvaluationJob. + name (:class:`str`): + Required. Name of the evaluation job that is going to be + paused. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.PauseEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pause_evaluation_job, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def resume_evaluation_job( + self, + request: data_labeling_service.ResumeEvaluationJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Resumes a paused evaluation job. A deleted evaluation + job can't be resumed. Resuming a running or scheduled + evaluation job is a no-op. + + Args: + request (:class:`~.data_labeling_service.ResumeEvaluationJobRequest`): + The request object. Request message ResumeEvaluationJob. + name (:class:`str`): + Required. Name of the evaluation job that is going to be + resumed. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.ResumeEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.resume_evaluation_job, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def delete_evaluation_job( + self, + request: data_labeling_service.DeleteEvaluationJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Stops and deletes an evaluation job. + + Args: + request (:class:`~.data_labeling_service.DeleteEvaluationJobRequest`): + The request object. Request message DeleteEvaluationJob. + name (:class:`str`): + Required. Name of the evaluation job that is going to be + deleted. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.DeleteEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_evaluation_job, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def list_evaluation_jobs( + self, + request: data_labeling_service.ListEvaluationJobsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEvaluationJobsAsyncPager: + r"""Lists all evaluation jobs within a project with + possible filters. Pagination is supported. + + Args: + request (:class:`~.data_labeling_service.ListEvaluationJobsRequest`): + The request object. Request message for + ListEvaluationJobs. + parent (:class:`str`): + Required. Evaluation job resource parent. Format: + "projects/{project_id}". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. You can filter the jobs to list by model_id + (also known as model_name, as described in + [EvaluationJob.modelVersion][google.cloud.datalabeling.v1beta1.EvaluationJob.model_version]) + or by evaluation job state (as described in + [EvaluationJob.state][google.cloud.datalabeling.v1beta1.EvaluationJob.state]). + To filter by both criteria, use the ``AND`` operator or + the ``OR`` operator. For example, you can use the + following string for your filter: + "evaluation\ *job.model_id = {model_name} AND + evaluation*\ job.state = {evaluation_job_state}". + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListEvaluationJobsAsyncPager: + Results for listing evaluation jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, filter]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = data_labeling_service.ListEvaluationJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_evaluation_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEvaluationJobsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datalabeling", + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DataLabelingServiceAsyncClient",) diff --git a/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py new file mode 100644 index 0000000..9359be5 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py @@ -0,0 +1,3227 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation +from google.api_core import operation_async +from google.cloud.datalabeling_v1beta1.services.data_labeling_service import pagers +from google.cloud.datalabeling_v1beta1.types import annotation +from google.cloud.datalabeling_v1beta1.types import annotation_spec_set +from google.cloud.datalabeling_v1beta1.types import ( + annotation_spec_set as gcd_annotation_spec_set, +) +from google.cloud.datalabeling_v1beta1.types import data_labeling_service +from google.cloud.datalabeling_v1beta1.types import data_payloads +from google.cloud.datalabeling_v1beta1.types import dataset +from google.cloud.datalabeling_v1beta1.types import dataset as gcd_dataset +from google.cloud.datalabeling_v1beta1.types import evaluation +from google.cloud.datalabeling_v1beta1.types import evaluation_job +from google.cloud.datalabeling_v1beta1.types import evaluation_job as gcd_evaluation_job +from google.cloud.datalabeling_v1beta1.types import human_annotation_config +from google.cloud.datalabeling_v1beta1.types import instruction +from google.cloud.datalabeling_v1beta1.types import instruction as gcd_instruction +from google.cloud.datalabeling_v1beta1.types import operations +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import DataLabelingServiceTransport +from .transports.grpc import DataLabelingServiceGrpcTransport +from .transports.grpc_asyncio import DataLabelingServiceGrpcAsyncIOTransport + + +class DataLabelingServiceClientMeta(type): + """Metaclass for the DataLabelingService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DataLabelingServiceTransport]] + _transport_registry["grpc"] = DataLabelingServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataLabelingServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, label: str = None, + ) -> Type[DataLabelingServiceTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataLabelingServiceClient(metaclass=DataLabelingServiceClientMeta): + """""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "datalabeling.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def annotation_spec_set_path(project: str, annotation_spec_set: str,) -> str: + """Return a fully-qualified annotation_spec_set string.""" + return "projects/{project}/annotationSpecSets/{annotation_spec_set}".format( + project=project, annotation_spec_set=annotation_spec_set, + ) + + @staticmethod + def parse_annotation_spec_set_path(path: str) -> Dict[str, str]: + """Parse a annotation_spec_set path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/annotationSpecSets/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def dataset_path(project: str, dataset: str,) -> str: + """Return a fully-qualified dataset string.""" + return "projects/{project}/datasets/{dataset}".format( + project=project, dataset=dataset, + ) + + @staticmethod + def parse_dataset_path(path: str) -> Dict[str, str]: + """Parse a dataset path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def evaluation_job_path(project: str, evaluation_job: str,) -> str: + """Return a fully-qualified evaluation_job string.""" + return "projects/{project}/evaluationJobs/{evaluation_job}".format( + project=project, evaluation_job=evaluation_job, + ) + + @staticmethod + def parse_evaluation_job_path(path: str) -> Dict[str, str]: + """Parse a evaluation_job path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/evaluationJobs/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def instruction_path(project: str, instruction: str,) -> str: + """Return a fully-qualified instruction string.""" + return "projects/{project}/instructions/{instruction}".format( + project=project, instruction=instruction, + ) + + @staticmethod + def parse_instruction_path(path: str) -> Dict[str, str]: + """Parse a instruction path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/instructions/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DataLabelingServiceTransport] = None, + client_options: ClientOptions = None, + ) -> None: + """Instantiate the data labeling service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DataLabelingServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DataLabelingServiceTransport): + # transport is a DataLabelingServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=client_options.api_endpoint, + scopes=client_options.scopes, + api_mtls_endpoint=client_options.api_endpoint, + client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + ) + + def create_dataset( + self, + request: data_labeling_service.CreateDatasetRequest = None, + *, + parent: str = None, + dataset: gcd_dataset.Dataset = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_dataset.Dataset: + r"""Creates dataset. If success return a Dataset + resource. + + Args: + request (:class:`~.data_labeling_service.CreateDatasetRequest`): + The request object. Request message for CreateDataset. + parent (:class:`str`): + Required. Dataset resource parent, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dataset (:class:`~.gcd_dataset.Dataset`): + Required. The dataset to be created. + This corresponds to the ``dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_dataset.Dataset: + Dataset is the resource to hold your + data. You can request multiple labeling + tasks for a dataset while each one will + generate an AnnotatedDataset. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, dataset]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.CreateDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.CreateDatasetRequest): + request = data_labeling_service.CreateDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if dataset is not None: + request.dataset = dataset + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_dataset( + self, + request: data_labeling_service.GetDatasetRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.Dataset: + r"""Gets dataset by resource name. + + Args: + request (:class:`~.data_labeling_service.GetDatasetRequest`): + The request object. Request message for GetDataSet. + name (:class:`str`): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dataset.Dataset: + Dataset is the resource to hold your + data. You can request multiple labeling + tasks for a dataset while each one will + generate an AnnotatedDataset. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.GetDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.GetDatasetRequest): + request = data_labeling_service.GetDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_datasets( + self, + request: data_labeling_service.ListDatasetsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDatasetsPager: + r"""Lists datasets under a project. Pagination is + supported. + + Args: + request (:class:`~.data_labeling_service.ListDatasetsRequest`): + The request object. Request message for ListDataset. + parent (:class:`str`): + Required. Dataset resource parent, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter on dataset is not + supported at this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDatasetsPager: + Results of listing datasets within a + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.ListDatasetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.ListDatasetsRequest): + request = data_labeling_service.ListDatasetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_datasets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDatasetsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dataset( + self, + request: data_labeling_service.DeleteDatasetRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a dataset by resource name. + + Args: + request (:class:`~.data_labeling_service.DeleteDatasetRequest`): + The request object. Request message for DeleteDataset. + name (:class:`str`): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.DeleteDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.DeleteDatasetRequest): + request = data_labeling_service.DeleteDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def import_data( + self, + request: data_labeling_service.ImportDataRequest = None, + *, + name: str = None, + input_config: dataset.InputConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports data into dataset based on source locations + defined in request. It can be called multiple times for + the same dataset. Each dataset can only have one long + running operation running on it. For example, no + labeling task (also long running operation) can be + started while importing is still ongoing. Vice versa. + + Args: + request (:class:`~.data_labeling_service.ImportDataRequest`): + The request object. Request message for ImportData API. + name (:class:`str`): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + input_config (:class:`~.dataset.InputConfig`): + Required. Specify the input source of + the data. + This corresponds to the ``input_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.operations.ImportDataOperationResponse``: + Response used for ImportData longrunning operation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, input_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.ImportDataRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.ImportDataRequest): + request = data_labeling_service.ImportDataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if input_config is not None: + request.input_config = input_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_data] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + operations.ImportDataOperationResponse, + metadata_type=operations.ImportDataOperationMetadata, + ) + + # Done; return the response. + return response + + def export_data( + self, + request: data_labeling_service.ExportDataRequest = None, + *, + name: str = None, + annotated_dataset: str = None, + filter: str = None, + output_config: dataset.OutputConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports data and annotations from dataset. + + Args: + request (:class:`~.data_labeling_service.ExportDataRequest`): + The request object. Request message for ExportData API. + name (:class:`str`): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + annotated_dataset (:class:`str`): + Required. Annotated dataset resource name. DataItem in + Dataset and their annotations in specified annotated + dataset will be exported. It's in format of + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id} + This corresponds to the ``annotated_dataset`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter is not supported at + this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + output_config (:class:`~.dataset.OutputConfig`): + Required. Specify the output + destination. + This corresponds to the ``output_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.operations.ExportDataOperationResponse``: + Response used for ExportDataset longrunning operation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, annotated_dataset, filter, output_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.ExportDataRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.ExportDataRequest): + request = data_labeling_service.ExportDataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if annotated_dataset is not None: + request.annotated_dataset = annotated_dataset + if filter is not None: + request.filter = filter + if output_config is not None: + request.output_config = output_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_data] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + operations.ExportDataOperationResponse, + metadata_type=operations.ExportDataOperationMetadata, + ) + + # Done; return the response. + return response + + def get_data_item( + self, + request: data_labeling_service.GetDataItemRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.DataItem: + r"""Gets a data item in a dataset by resource name. This + API can be called after data are imported into dataset. + + Args: + request (:class:`~.data_labeling_service.GetDataItemRequest`): + The request object. Request message for GetDataItem. + name (:class:`str`): + Required. The name of the data item to get, format: + projects/{project_id}/datasets/{dataset_id}/dataItems/{data_item_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dataset.DataItem: + DataItem is a piece of data, without + annotation. For example, an image. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.GetDataItemRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.GetDataItemRequest): + request = data_labeling_service.GetDataItemRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_item] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_data_items( + self, + request: data_labeling_service.ListDataItemsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataItemsPager: + r"""Lists data items in a dataset. This API can be called + after data are imported into dataset. Pagination is + supported. + + Args: + request (:class:`~.data_labeling_service.ListDataItemsRequest`): + The request object. Request message for ListDataItems. + parent (:class:`str`): + Required. Name of the dataset to list data items, + format: projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter is not supported at + this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDataItemsPager: + Results of listing data items in a + dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.ListDataItemsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.ListDataItemsRequest): + request = data_labeling_service.ListDataItemsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_items] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataItemsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_annotated_dataset( + self, + request: data_labeling_service.GetAnnotatedDatasetRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.AnnotatedDataset: + r"""Gets an annotated dataset by resource name. + + Args: + request (:class:`~.data_labeling_service.GetAnnotatedDatasetRequest`): + The request object. Request message for + GetAnnotatedDataset. + name (:class:`str`): + Required. Name of the annotated dataset to get, format: + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dataset.AnnotatedDataset: + AnnotatedDataset is a set holding + annotations for data in a Dataset. Each + labeling task will generate an + AnnotatedDataset under the Dataset that + the task is requested for. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.GetAnnotatedDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.GetAnnotatedDatasetRequest): + request = data_labeling_service.GetAnnotatedDatasetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_annotated_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_annotated_datasets( + self, + request: data_labeling_service.ListAnnotatedDatasetsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAnnotatedDatasetsPager: + r"""Lists annotated datasets for a dataset. Pagination is + supported. + + Args: + request (:class:`~.data_labeling_service.ListAnnotatedDatasetsRequest`): + The request object. Request message for + ListAnnotatedDatasets. + parent (:class:`str`): + Required. Name of the dataset to list annotated + datasets, format: + projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter is not supported at + this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListAnnotatedDatasetsPager: + Results of listing annotated datasets + for a dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.ListAnnotatedDatasetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.ListAnnotatedDatasetsRequest): + request = data_labeling_service.ListAnnotatedDatasetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_annotated_datasets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAnnotatedDatasetsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_annotated_dataset( + self, + request: data_labeling_service.DeleteAnnotatedDatasetRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an annotated dataset by resource name. + + Args: + request (:class:`~.data_labeling_service.DeleteAnnotatedDatasetRequest`): + The request object. Request message for + DeleteAnnotatedDataset. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.DeleteAnnotatedDatasetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.DeleteAnnotatedDatasetRequest): + request = data_labeling_service.DeleteAnnotatedDatasetRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_annotated_dataset] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def label_image( + self, + request: data_labeling_service.LabelImageRequest = None, + *, + parent: str = None, + basic_config: human_annotation_config.HumanAnnotationConfig = None, + feature: data_labeling_service.LabelImageRequest.Feature = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts a labeling task for image. The type of image + labeling task is configured by feature in the request. + + Args: + request (:class:`~.data_labeling_service.LabelImageRequest`): + The request object. Request message for starting an + image labeling task. + parent (:class:`str`): + Required. Name of the dataset to request labeling task, + format: projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + basic_config (:class:`~.human_annotation_config.HumanAnnotationConfig`): + Required. Basic human annotation + config. + This corresponds to the ``basic_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + feature (:class:`~.data_labeling_service.LabelImageRequest.Feature`): + Required. The type of image labeling + task. + This corresponds to the ``feature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.dataset.AnnotatedDataset``: AnnotatedDataset + is a set holding annotations for data in a Dataset. Each + labeling task will generate an AnnotatedDataset under + the Dataset that the task is requested for. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, basic_config, feature]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.LabelImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.LabelImageRequest): + request = data_labeling_service.LabelImageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if basic_config is not None: + request.basic_config = basic_config + if feature is not None: + request.feature = feature + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.label_image] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + dataset.AnnotatedDataset, + metadata_type=operations.LabelOperationMetadata, + ) + + # Done; return the response. + return response + + def label_video( + self, + request: data_labeling_service.LabelVideoRequest = None, + *, + parent: str = None, + basic_config: human_annotation_config.HumanAnnotationConfig = None, + feature: data_labeling_service.LabelVideoRequest.Feature = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts a labeling task for video. The type of video + labeling task is configured by feature in the request. + + Args: + request (:class:`~.data_labeling_service.LabelVideoRequest`): + The request object. Request message for LabelVideo. + parent (:class:`str`): + Required. Name of the dataset to request labeling task, + format: projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + basic_config (:class:`~.human_annotation_config.HumanAnnotationConfig`): + Required. Basic human annotation + config. + This corresponds to the ``basic_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + feature (:class:`~.data_labeling_service.LabelVideoRequest.Feature`): + Required. The type of video labeling + task. + This corresponds to the ``feature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.dataset.AnnotatedDataset``: AnnotatedDataset + is a set holding annotations for data in a Dataset. Each + labeling task will generate an AnnotatedDataset under + the Dataset that the task is requested for. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, basic_config, feature]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.LabelVideoRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.LabelVideoRequest): + request = data_labeling_service.LabelVideoRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if basic_config is not None: + request.basic_config = basic_config + if feature is not None: + request.feature = feature + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.label_video] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + dataset.AnnotatedDataset, + metadata_type=operations.LabelOperationMetadata, + ) + + # Done; return the response. + return response + + def label_text( + self, + request: data_labeling_service.LabelTextRequest = None, + *, + parent: str = None, + basic_config: human_annotation_config.HumanAnnotationConfig = None, + feature: data_labeling_service.LabelTextRequest.Feature = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts a labeling task for text. The type of text + labeling task is configured by feature in the request. + + Args: + request (:class:`~.data_labeling_service.LabelTextRequest`): + The request object. Request message for LabelText. + parent (:class:`str`): + Required. Name of the data set to request labeling task, + format: projects/{project_id}/datasets/{dataset_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + basic_config (:class:`~.human_annotation_config.HumanAnnotationConfig`): + Required. Basic human annotation + config. + This corresponds to the ``basic_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + feature (:class:`~.data_labeling_service.LabelTextRequest.Feature`): + Required. The type of text labeling + task. + This corresponds to the ``feature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.dataset.AnnotatedDataset``: AnnotatedDataset + is a set holding annotations for data in a Dataset. Each + labeling task will generate an AnnotatedDataset under + the Dataset that the task is requested for. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, basic_config, feature]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.LabelTextRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.LabelTextRequest): + request = data_labeling_service.LabelTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if basic_config is not None: + request.basic_config = basic_config + if feature is not None: + request.feature = feature + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.label_text] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + dataset.AnnotatedDataset, + metadata_type=operations.LabelOperationMetadata, + ) + + # Done; return the response. + return response + + def get_example( + self, + request: data_labeling_service.GetExampleRequest = None, + *, + name: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dataset.Example: + r"""Gets an example by resource name, including both data + and annotation. + + Args: + request (:class:`~.data_labeling_service.GetExampleRequest`): + The request object. Request message for GetExample + name (:class:`str`): + Required. Name of example, format: + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id}/examples/{example_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. An expression for filtering Examples. Filter + by annotation_spec.display_name is supported. Format + "annotation_spec.display_name = {display_name}". + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dataset.Example: + An Example is a piece of data and its + annotation. For example, an image with + label "house". + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.GetExampleRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.GetExampleRequest): + request = data_labeling_service.GetExampleRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_example] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_examples( + self, + request: data_labeling_service.ListExamplesRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExamplesPager: + r"""Lists examples in an annotated dataset. Pagination is + supported. + + Args: + request (:class:`~.data_labeling_service.ListExamplesRequest`): + The request object. Request message for ListExamples. + parent (:class:`str`): + Required. Example resource parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. An expression for filtering Examples. For + annotated datasets that have annotation spec set, filter + by annotation_spec.display_name is supported. Format + "annotation_spec.display_name = {display_name}". + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListExamplesPager: + Results of listing Examples in and + annotated dataset. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.ListExamplesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.ListExamplesRequest): + request = data_labeling_service.ListExamplesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_examples] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExamplesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def create_annotation_spec_set( + self, + request: data_labeling_service.CreateAnnotationSpecSetRequest = None, + *, + parent: str = None, + annotation_spec_set: gcd_annotation_spec_set.AnnotationSpecSet = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_annotation_spec_set.AnnotationSpecSet: + r"""Creates an annotation spec set by providing a set of + labels. + + Args: + request (:class:`~.data_labeling_service.CreateAnnotationSpecSetRequest`): + The request object. Request message for + CreateAnnotationSpecSet. + parent (:class:`str`): + Required. AnnotationSpecSet resource parent, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + annotation_spec_set (:class:`~.gcd_annotation_spec_set.AnnotationSpecSet`): + Required. Annotation spec set to create. Annotation + specs must be included. Only one annotation spec will be + accepted for annotation specs with same display_name. + This corresponds to the ``annotation_spec_set`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_annotation_spec_set.AnnotationSpecSet: + An AnnotationSpecSet is a collection + of label definitions. For example, in + image classification tasks, you define a + set of possible labels for images as an + AnnotationSpecSet. An AnnotationSpecSet + is immutable upon creation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, annotation_spec_set]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.CreateAnnotationSpecSetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, data_labeling_service.CreateAnnotationSpecSetRequest + ): + request = data_labeling_service.CreateAnnotationSpecSetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if annotation_spec_set is not None: + request.annotation_spec_set = annotation_spec_set + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_annotation_spec_set + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_annotation_spec_set( + self, + request: data_labeling_service.GetAnnotationSpecSetRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> annotation_spec_set.AnnotationSpecSet: + r"""Gets an annotation spec set by resource name. + + Args: + request (:class:`~.data_labeling_service.GetAnnotationSpecSetRequest`): + The request object. Request message for + GetAnnotationSpecSet. + name (:class:`str`): + Required. AnnotationSpecSet resource name, format: + projects/{project_id}/annotationSpecSets/{annotation_spec_set_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.annotation_spec_set.AnnotationSpecSet: + An AnnotationSpecSet is a collection + of label definitions. For example, in + image classification tasks, you define a + set of possible labels for images as an + AnnotationSpecSet. An AnnotationSpecSet + is immutable upon creation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.GetAnnotationSpecSetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.GetAnnotationSpecSetRequest): + request = data_labeling_service.GetAnnotationSpecSetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_annotation_spec_set] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_annotation_spec_sets( + self, + request: data_labeling_service.ListAnnotationSpecSetsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAnnotationSpecSetsPager: + r"""Lists annotation spec sets for a project. Pagination + is supported. + + Args: + request (:class:`~.data_labeling_service.ListAnnotationSpecSetsRequest`): + The request object. Request message for + ListAnnotationSpecSets. + parent (:class:`str`): + Required. Parent of AnnotationSpecSet resource, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter is not supported at + this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListAnnotationSpecSetsPager: + Results of listing annotation spec + set under a project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.ListAnnotationSpecSetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.ListAnnotationSpecSetsRequest): + request = data_labeling_service.ListAnnotationSpecSetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_annotation_spec_sets + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAnnotationSpecSetsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_annotation_spec_set( + self, + request: data_labeling_service.DeleteAnnotationSpecSetRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an annotation spec set by resource name. + + Args: + request (:class:`~.data_labeling_service.DeleteAnnotationSpecSetRequest`): + The request object. Request message for + DeleteAnnotationSpecSet. + name (:class:`str`): + Required. AnnotationSpec resource name, format: + ``projects/{project_id}/annotationSpecSets/{annotation_spec_set_id}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.DeleteAnnotationSpecSetRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, data_labeling_service.DeleteAnnotationSpecSetRequest + ): + request = data_labeling_service.DeleteAnnotationSpecSetRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_annotation_spec_set + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def create_instruction( + self, + request: data_labeling_service.CreateInstructionRequest = None, + *, + parent: str = None, + instruction: gcd_instruction.Instruction = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates an instruction for how data should be + labeled. + + Args: + request (:class:`~.data_labeling_service.CreateInstructionRequest`): + The request object. Request message for + CreateInstruction. + parent (:class:`str`): + Required. Instruction resource parent, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instruction (:class:`~.gcd_instruction.Instruction`): + Required. Instruction of how to + perform the labeling task. + This corresponds to the ``instruction`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:``~.gcd_instruction.Instruction``: Instruction of + how to perform the labeling task for human operators. + Currently only PDF instruction is supported. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instruction]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.CreateInstructionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.CreateInstructionRequest): + request = data_labeling_service.CreateInstructionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if instruction is not None: + request.instruction = instruction + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instruction] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcd_instruction.Instruction, + metadata_type=operations.CreateInstructionMetadata, + ) + + # Done; return the response. + return response + + def get_instruction( + self, + request: data_labeling_service.GetInstructionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> instruction.Instruction: + r"""Gets an instruction by resource name. + + Args: + request (:class:`~.data_labeling_service.GetInstructionRequest`): + The request object. Request message for GetInstruction. + name (:class:`str`): + Required. Instruction resource name, format: + projects/{project_id}/instructions/{instruction_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.instruction.Instruction: + Instruction of how to perform the + labeling task for human operators. + Currently only PDF instruction is + supported. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.GetInstructionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.GetInstructionRequest): + request = data_labeling_service.GetInstructionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instruction] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_instructions( + self, + request: data_labeling_service.ListInstructionsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstructionsPager: + r"""Lists instructions for a project. Pagination is + supported. + + Args: + request (:class:`~.data_labeling_service.ListInstructionsRequest`): + The request object. Request message for + ListInstructions. + parent (:class:`str`): + Required. Instruction resource parent, format: + projects/{project_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Filter is not supported at + this moment. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListInstructionsPager: + Results of listing instructions under + a project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.ListInstructionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.ListInstructionsRequest): + request = data_labeling_service.ListInstructionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instructions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstructionsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_instruction( + self, + request: data_labeling_service.DeleteInstructionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an instruction object by resource name. + + Args: + request (:class:`~.data_labeling_service.DeleteInstructionRequest`): + The request object. Request message for + DeleteInstruction. + name (:class:`str`): + Required. Instruction resource name, format: + projects/{project_id}/instructions/{instruction_id} + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.DeleteInstructionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.DeleteInstructionRequest): + request = data_labeling_service.DeleteInstructionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instruction] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def get_evaluation( + self, + request: data_labeling_service.GetEvaluationRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> evaluation.Evaluation: + r"""Gets an evaluation by resource name (to search, use + [projects.evaluations.search][google.cloud.datalabeling.v1beta1.DataLabelingService.SearchEvaluations]). + + Args: + request (:class:`~.data_labeling_service.GetEvaluationRequest`): + The request object. Request message for GetEvaluation. + name (:class:`str`): + Required. Name of the evaluation. Format: + + "projects/{project_id}/datasets/{dataset_id}/evaluations/{evaluation_id}' + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.evaluation.Evaluation: + Describes an evaluation between a machine learning + model's predictions and ground truth labels. Created + when an + [EvaluationJob][google.cloud.datalabeling.v1beta1.EvaluationJob] + runs successfully. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.GetEvaluationRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.GetEvaluationRequest): + request = data_labeling_service.GetEvaluationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_evaluation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def search_evaluations( + self, + request: data_labeling_service.SearchEvaluationsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchEvaluationsPager: + r"""Searches + [evaluations][google.cloud.datalabeling.v1beta1.Evaluation] + within a project. + + Args: + request (:class:`~.data_labeling_service.SearchEvaluationsRequest`): + The request object. Request message for + SearchEvaluation. + parent (:class:`str`): + Required. Evaluation search parent (project ID). Format: + "projects/{project_id}". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. To search evaluations, you can filter by the + following: + + - evaluation\_job.evaluation_job_id (the last part of + [EvaluationJob.name][google.cloud.datalabeling.v1beta1.EvaluationJob.name]) + - evaluation\_job.model_id (the {model_name} portion of + [EvaluationJob.modelVersion][google.cloud.datalabeling.v1beta1.EvaluationJob.model_version]) + - evaluation\_job.evaluation_job_run_time_start + (Minimum threshold for the + [evaluationJobRunTime][google.cloud.datalabeling.v1beta1.Evaluation.evaluation_job_run_time] + that created the evaluation) + - evaluation\_job.evaluation_job_run_time_end (Maximum + threshold for the + [evaluationJobRunTime][google.cloud.datalabeling.v1beta1.Evaluation.evaluation_job_run_time] + that created the evaluation) + - evaluation\_job.job_state + ([EvaluationJob.state][google.cloud.datalabeling.v1beta1.EvaluationJob.state]) + - annotation\_spec.display_name (the Evaluation + contains a metric for the annotation spec with this + [displayName][google.cloud.datalabeling.v1beta1.AnnotationSpec.display_name]) + + To filter by multiple critiera, use the ``AND`` operator + or the ``OR`` operator. The following examples shows a + string that filters by several critiera: + + "evaluation\ *job.evaluation_job_id = + {evaluation_job_id} AND evaluation*\ job.model_id = + {model_name} AND + evaluation\ *job.evaluation_job_run_time_start = + {timestamp_1} AND + evaluation*\ job.evaluation_job_run_time_end = + {timestamp_2} AND annotation\_spec.display_name = + {display_name}". + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.SearchEvaluationsPager: + Results of searching evaluations. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.SearchEvaluationsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.SearchEvaluationsRequest): + request = data_labeling_service.SearchEvaluationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_evaluations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchEvaluationsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def search_example_comparisons( + self, + request: data_labeling_service.SearchExampleComparisonsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchExampleComparisonsPager: + r"""Searches example comparisons from an evaluation. The + return format is a list of example comparisons that show + ground truth and prediction(s) for a single input. + Search by providing an evaluation ID. + + Args: + request (:class:`~.data_labeling_service.SearchExampleComparisonsRequest`): + The request object. Request message of + SearchExampleComparisons. + parent (:class:`str`): + Required. Name of the + [Evaluation][google.cloud.datalabeling.v1beta1.Evaluation] + resource to search for example comparisons from. Format: + + "projects/{project_id}/datasets/{dataset_id}/evaluations/{evaluation_id}". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.SearchExampleComparisonsPager: + Results of searching example + comparisons. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.SearchExampleComparisonsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, data_labeling_service.SearchExampleComparisonsRequest + ): + request = data_labeling_service.SearchExampleComparisonsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.search_example_comparisons + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchExampleComparisonsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def create_evaluation_job( + self, + request: data_labeling_service.CreateEvaluationJobRequest = None, + *, + parent: str = None, + job: evaluation_job.EvaluationJob = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> evaluation_job.EvaluationJob: + r"""Creates an evaluation job. + + Args: + request (:class:`~.data_labeling_service.CreateEvaluationJobRequest`): + The request object. Request message for + CreateEvaluationJob. + parent (:class:`str`): + Required. Evaluation job resource parent. Format: + "projects/{project_id}". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (:class:`~.evaluation_job.EvaluationJob`): + Required. The evaluation job to + create. + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.evaluation_job.EvaluationJob: + Defines an evaluation job that runs periodically to + generate + [Evaluations][google.cloud.datalabeling.v1beta1.Evaluation]. + `Creating an evaluation + job `__ + is the starting point for using continuous evaluation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.CreateEvaluationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.CreateEvaluationJobRequest): + request = data_labeling_service.CreateEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_evaluation_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_evaluation_job( + self, + request: data_labeling_service.UpdateEvaluationJobRequest = None, + *, + evaluation_job: gcd_evaluation_job.EvaluationJob = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gcd_evaluation_job.EvaluationJob: + r"""Updates an evaluation job. You can only update certain fields of + the job's + [EvaluationJobConfig][google.cloud.datalabeling.v1beta1.EvaluationJobConfig]: + ``humanAnnotationConfig.instruction``, ``exampleCount``, and + ``exampleSamplePercentage``. + + If you want to change any other aspect of the evaluation job, + you must delete the job and create a new one. + + Args: + request (:class:`~.data_labeling_service.UpdateEvaluationJobRequest`): + The request object. Request message for + UpdateEvaluationJob. + evaluation_job (:class:`~.gcd_evaluation_job.EvaluationJob`): + Required. Evaluation job that is + going to be updated. + This corresponds to the ``evaluation_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Optional. Mask for which fields to update. You can only + provide the following fields: + + - ``evaluationJobConfig.humanAnnotationConfig.instruction`` + - ``evaluationJobConfig.exampleCount`` + - ``evaluationJobConfig.exampleSamplePercentage`` + + You can provide more than one of these fields by + separating them with commas. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gcd_evaluation_job.EvaluationJob: + Defines an evaluation job that runs periodically to + generate + [Evaluations][google.cloud.datalabeling.v1beta1.Evaluation]. + `Creating an evaluation + job `__ + is the starting point for using continuous evaluation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([evaluation_job, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.UpdateEvaluationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.UpdateEvaluationJobRequest): + request = data_labeling_service.UpdateEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if evaluation_job is not None: + request.evaluation_job = evaluation_job + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_evaluation_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("evaluation_job.name", request.evaluation_job.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_evaluation_job( + self, + request: data_labeling_service.GetEvaluationJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> evaluation_job.EvaluationJob: + r"""Gets an evaluation job by resource name. + + Args: + request (:class:`~.data_labeling_service.GetEvaluationJobRequest`): + The request object. Request message for + GetEvaluationJob. + name (:class:`str`): + Required. Name of the evaluation job. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.evaluation_job.EvaluationJob: + Defines an evaluation job that runs periodically to + generate + [Evaluations][google.cloud.datalabeling.v1beta1.Evaluation]. + `Creating an evaluation + job `__ + is the starting point for using continuous evaluation. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.GetEvaluationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.GetEvaluationJobRequest): + request = data_labeling_service.GetEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_evaluation_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def pause_evaluation_job( + self, + request: data_labeling_service.PauseEvaluationJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Pauses an evaluation job. Pausing an evaluation job that is + already in a ``PAUSED`` state is a no-op. + + Args: + request (:class:`~.data_labeling_service.PauseEvaluationJobRequest`): + The request object. Request message for + PauseEvaluationJob. + name (:class:`str`): + Required. Name of the evaluation job that is going to be + paused. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.PauseEvaluationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.PauseEvaluationJobRequest): + request = data_labeling_service.PauseEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pause_evaluation_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def resume_evaluation_job( + self, + request: data_labeling_service.ResumeEvaluationJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Resumes a paused evaluation job. A deleted evaluation + job can't be resumed. Resuming a running or scheduled + evaluation job is a no-op. + + Args: + request (:class:`~.data_labeling_service.ResumeEvaluationJobRequest`): + The request object. Request message ResumeEvaluationJob. + name (:class:`str`): + Required. Name of the evaluation job that is going to be + resumed. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.ResumeEvaluationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.ResumeEvaluationJobRequest): + request = data_labeling_service.ResumeEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.resume_evaluation_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def delete_evaluation_job( + self, + request: data_labeling_service.DeleteEvaluationJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Stops and deletes an evaluation job. + + Args: + request (:class:`~.data_labeling_service.DeleteEvaluationJobRequest`): + The request object. Request message DeleteEvaluationJob. + name (:class:`str`): + Required. Name of the evaluation job that is going to be + deleted. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.DeleteEvaluationJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.DeleteEvaluationJobRequest): + request = data_labeling_service.DeleteEvaluationJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_evaluation_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def list_evaluation_jobs( + self, + request: data_labeling_service.ListEvaluationJobsRequest = None, + *, + parent: str = None, + filter: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEvaluationJobsPager: + r"""Lists all evaluation jobs within a project with + possible filters. Pagination is supported. + + Args: + request (:class:`~.data_labeling_service.ListEvaluationJobsRequest`): + The request object. Request message for + ListEvaluationJobs. + parent (:class:`str`): + Required. Evaluation job resource parent. Format: + "projects/{project_id}". + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. You can filter the jobs to list by model_id + (also known as model_name, as described in + [EvaluationJob.modelVersion][google.cloud.datalabeling.v1beta1.EvaluationJob.model_version]) + or by evaluation job state (as described in + [EvaluationJob.state][google.cloud.datalabeling.v1beta1.EvaluationJob.state]). + To filter by both criteria, use the ``AND`` operator or + the ``OR`` operator. For example, you can use the + following string for your filter: + "evaluation\ *job.model_id = {model_name} AND + evaluation*\ job.state = {evaluation_job_state}". + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListEvaluationJobsPager: + Results for listing evaluation jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, filter]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a data_labeling_service.ListEvaluationJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, data_labeling_service.ListEvaluationJobsRequest): + request = data_labeling_service.ListEvaluationJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_evaluation_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEvaluationJobsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datalabeling", + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DataLabelingServiceClient",) diff --git a/google/cloud/datalabeling_v1beta1/services/data_labeling_service/pagers.py b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/pagers.py new file mode 100644 index 0000000..1b12902 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/pagers.py @@ -0,0 +1,1209 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.cloud.datalabeling_v1beta1.types import annotation_spec_set +from google.cloud.datalabeling_v1beta1.types import data_labeling_service +from google.cloud.datalabeling_v1beta1.types import dataset +from google.cloud.datalabeling_v1beta1.types import evaluation +from google.cloud.datalabeling_v1beta1.types import evaluation_job +from google.cloud.datalabeling_v1beta1.types import instruction + + +class ListDatasetsPager: + """A pager for iterating through ``list_datasets`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListDatasetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``datasets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDatasets`` requests and continue to iterate + through the ``datasets`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_labeling_service.ListDatasetsResponse], + request: data_labeling_service.ListDatasetsRequest, + response: data_labeling_service.ListDatasetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListDatasetsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListDatasetsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListDatasetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[data_labeling_service.ListDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dataset.Dataset]: + for page in self.pages: + yield from page.datasets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDatasetsAsyncPager: + """A pager for iterating through ``list_datasets`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListDatasetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``datasets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDatasets`` requests and continue to iterate + through the ``datasets`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[data_labeling_service.ListDatasetsResponse]], + request: data_labeling_service.ListDatasetsRequest, + response: data_labeling_service.ListDatasetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListDatasetsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListDatasetsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListDatasetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[data_labeling_service.ListDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dataset.Dataset]: + async def async_generator(): + async for page in self.pages: + for response in page.datasets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataItemsPager: + """A pager for iterating through ``list_data_items`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListDataItemsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataItems`` requests and continue to iterate + through the ``data_items`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListDataItemsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_labeling_service.ListDataItemsResponse], + request: data_labeling_service.ListDataItemsRequest, + response: data_labeling_service.ListDataItemsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListDataItemsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListDataItemsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListDataItemsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[data_labeling_service.ListDataItemsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dataset.DataItem]: + for page in self.pages: + yield from page.data_items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataItemsAsyncPager: + """A pager for iterating through ``list_data_items`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListDataItemsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_items`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataItems`` requests and continue to iterate + through the ``data_items`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListDataItemsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[data_labeling_service.ListDataItemsResponse]], + request: data_labeling_service.ListDataItemsRequest, + response: data_labeling_service.ListDataItemsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListDataItemsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListDataItemsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListDataItemsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[data_labeling_service.ListDataItemsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dataset.DataItem]: + async def async_generator(): + async for page in self.pages: + for response in page.data_items: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAnnotatedDatasetsPager: + """A pager for iterating through ``list_annotated_datasets`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListAnnotatedDatasetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``annotated_datasets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAnnotatedDatasets`` requests and continue to iterate + through the ``annotated_datasets`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListAnnotatedDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_labeling_service.ListAnnotatedDatasetsResponse], + request: data_labeling_service.ListAnnotatedDatasetsRequest, + response: data_labeling_service.ListAnnotatedDatasetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListAnnotatedDatasetsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListAnnotatedDatasetsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListAnnotatedDatasetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[data_labeling_service.ListAnnotatedDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dataset.AnnotatedDataset]: + for page in self.pages: + yield from page.annotated_datasets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAnnotatedDatasetsAsyncPager: + """A pager for iterating through ``list_annotated_datasets`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListAnnotatedDatasetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``annotated_datasets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAnnotatedDatasets`` requests and continue to iterate + through the ``annotated_datasets`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListAnnotatedDatasetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[data_labeling_service.ListAnnotatedDatasetsResponse] + ], + request: data_labeling_service.ListAnnotatedDatasetsRequest, + response: data_labeling_service.ListAnnotatedDatasetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListAnnotatedDatasetsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListAnnotatedDatasetsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListAnnotatedDatasetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterable[data_labeling_service.ListAnnotatedDatasetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dataset.AnnotatedDataset]: + async def async_generator(): + async for page in self.pages: + for response in page.annotated_datasets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExamplesPager: + """A pager for iterating through ``list_examples`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListExamplesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``examples`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListExamples`` requests and continue to iterate + through the ``examples`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListExamplesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_labeling_service.ListExamplesResponse], + request: data_labeling_service.ListExamplesRequest, + response: data_labeling_service.ListExamplesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListExamplesRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListExamplesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListExamplesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[data_labeling_service.ListExamplesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dataset.Example]: + for page in self.pages: + yield from page.examples + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListExamplesAsyncPager: + """A pager for iterating through ``list_examples`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListExamplesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``examples`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListExamples`` requests and continue to iterate + through the ``examples`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListExamplesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[data_labeling_service.ListExamplesResponse]], + request: data_labeling_service.ListExamplesRequest, + response: data_labeling_service.ListExamplesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListExamplesRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListExamplesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListExamplesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[data_labeling_service.ListExamplesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dataset.Example]: + async def async_generator(): + async for page in self.pages: + for response in page.examples: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAnnotationSpecSetsPager: + """A pager for iterating through ``list_annotation_spec_sets`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListAnnotationSpecSetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``annotation_spec_sets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAnnotationSpecSets`` requests and continue to iterate + through the ``annotation_spec_sets`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListAnnotationSpecSetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_labeling_service.ListAnnotationSpecSetsResponse], + request: data_labeling_service.ListAnnotationSpecSetsRequest, + response: data_labeling_service.ListAnnotationSpecSetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListAnnotationSpecSetsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListAnnotationSpecSetsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListAnnotationSpecSetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[data_labeling_service.ListAnnotationSpecSetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[annotation_spec_set.AnnotationSpecSet]: + for page in self.pages: + yield from page.annotation_spec_sets + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAnnotationSpecSetsAsyncPager: + """A pager for iterating through ``list_annotation_spec_sets`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListAnnotationSpecSetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``annotation_spec_sets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAnnotationSpecSets`` requests and continue to iterate + through the ``annotation_spec_sets`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListAnnotationSpecSetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[data_labeling_service.ListAnnotationSpecSetsResponse] + ], + request: data_labeling_service.ListAnnotationSpecSetsRequest, + response: data_labeling_service.ListAnnotationSpecSetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListAnnotationSpecSetsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListAnnotationSpecSetsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListAnnotationSpecSetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterable[data_labeling_service.ListAnnotationSpecSetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[annotation_spec_set.AnnotationSpecSet]: + async def async_generator(): + async for page in self.pages: + for response in page.annotation_spec_sets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstructionsPager: + """A pager for iterating through ``list_instructions`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListInstructionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instructions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstructions`` requests and continue to iterate + through the ``instructions`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListInstructionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_labeling_service.ListInstructionsResponse], + request: data_labeling_service.ListInstructionsRequest, + response: data_labeling_service.ListInstructionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListInstructionsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListInstructionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListInstructionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[data_labeling_service.ListInstructionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[instruction.Instruction]: + for page in self.pages: + yield from page.instructions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInstructionsAsyncPager: + """A pager for iterating through ``list_instructions`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListInstructionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instructions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstructions`` requests and continue to iterate + through the ``instructions`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListInstructionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[data_labeling_service.ListInstructionsResponse] + ], + request: data_labeling_service.ListInstructionsRequest, + response: data_labeling_service.ListInstructionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListInstructionsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListInstructionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListInstructionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterable[data_labeling_service.ListInstructionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[instruction.Instruction]: + async def async_generator(): + async for page in self.pages: + for response in page.instructions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchEvaluationsPager: + """A pager for iterating through ``search_evaluations`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.SearchEvaluationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``evaluations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchEvaluations`` requests and continue to iterate + through the ``evaluations`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.SearchEvaluationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_labeling_service.SearchEvaluationsResponse], + request: data_labeling_service.SearchEvaluationsRequest, + response: data_labeling_service.SearchEvaluationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.SearchEvaluationsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.SearchEvaluationsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.SearchEvaluationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[data_labeling_service.SearchEvaluationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[evaluation.Evaluation]: + for page in self.pages: + yield from page.evaluations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchEvaluationsAsyncPager: + """A pager for iterating through ``search_evaluations`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.SearchEvaluationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``evaluations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchEvaluations`` requests and continue to iterate + through the ``evaluations`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.SearchEvaluationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[data_labeling_service.SearchEvaluationsResponse] + ], + request: data_labeling_service.SearchEvaluationsRequest, + response: data_labeling_service.SearchEvaluationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.SearchEvaluationsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.SearchEvaluationsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.SearchEvaluationsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterable[data_labeling_service.SearchEvaluationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[evaluation.Evaluation]: + async def async_generator(): + async for page in self.pages: + for response in page.evaluations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchExampleComparisonsPager: + """A pager for iterating through ``search_example_comparisons`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.SearchExampleComparisonsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``example_comparisons`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchExampleComparisons`` requests and continue to iterate + through the ``example_comparisons`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.SearchExampleComparisonsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_labeling_service.SearchExampleComparisonsResponse], + request: data_labeling_service.SearchExampleComparisonsRequest, + response: data_labeling_service.SearchExampleComparisonsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.SearchExampleComparisonsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.SearchExampleComparisonsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.SearchExampleComparisonsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[data_labeling_service.SearchExampleComparisonsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__( + self, + ) -> Iterable[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison + ]: + for page in self.pages: + yield from page.example_comparisons + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchExampleComparisonsAsyncPager: + """A pager for iterating through ``search_example_comparisons`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.SearchExampleComparisonsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``example_comparisons`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchExampleComparisons`` requests and continue to iterate + through the ``example_comparisons`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.SearchExampleComparisonsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[data_labeling_service.SearchExampleComparisonsResponse] + ], + request: data_labeling_service.SearchExampleComparisonsRequest, + response: data_labeling_service.SearchExampleComparisonsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.SearchExampleComparisonsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.SearchExampleComparisonsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.SearchExampleComparisonsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterable[data_labeling_service.SearchExampleComparisonsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterable[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison + ]: + async def async_generator(): + async for page in self.pages: + for response in page.example_comparisons: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEvaluationJobsPager: + """A pager for iterating through ``list_evaluation_jobs`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListEvaluationJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``evaluation_jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEvaluationJobs`` requests and continue to iterate + through the ``evaluation_jobs`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListEvaluationJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_labeling_service.ListEvaluationJobsResponse], + request: data_labeling_service.ListEvaluationJobsRequest, + response: data_labeling_service.ListEvaluationJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListEvaluationJobsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListEvaluationJobsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListEvaluationJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[data_labeling_service.ListEvaluationJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[evaluation_job.EvaluationJob]: + for page in self.pages: + yield from page.evaluation_jobs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEvaluationJobsAsyncPager: + """A pager for iterating through ``list_evaluation_jobs`` requests. + + This class thinly wraps an initial + :class:`~.data_labeling_service.ListEvaluationJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``evaluation_jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEvaluationJobs`` requests and continue to iterate + through the ``evaluation_jobs`` field on the + corresponding responses. + + All the usual :class:`~.data_labeling_service.ListEvaluationJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[data_labeling_service.ListEvaluationJobsResponse] + ], + request: data_labeling_service.ListEvaluationJobsRequest, + response: data_labeling_service.ListEvaluationJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.data_labeling_service.ListEvaluationJobsRequest`): + The initial request object. + response (:class:`~.data_labeling_service.ListEvaluationJobsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = data_labeling_service.ListEvaluationJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterable[data_labeling_service.ListEvaluationJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[evaluation_job.EvaluationJob]: + async def async_generator(): + async for page in self.pages: + for response in page.evaluation_jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/__init__.py b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/__init__.py new file mode 100644 index 0000000..fcc0f42 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataLabelingServiceTransport +from .grpc import DataLabelingServiceGrpcTransport +from .grpc_asyncio import DataLabelingServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[DataLabelingServiceTransport]] +_transport_registry["grpc"] = DataLabelingServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DataLabelingServiceGrpcAsyncIOTransport + + +__all__ = ( + "DataLabelingServiceTransport", + "DataLabelingServiceGrpcTransport", + "DataLabelingServiceGrpcAsyncIOTransport", +) diff --git a/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/base.py b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/base.py new file mode 100644 index 0000000..e38ee72 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/base.py @@ -0,0 +1,800 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.datalabeling_v1beta1.types import annotation_spec_set +from google.cloud.datalabeling_v1beta1.types import ( + annotation_spec_set as gcd_annotation_spec_set, +) +from google.cloud.datalabeling_v1beta1.types import data_labeling_service +from google.cloud.datalabeling_v1beta1.types import dataset +from google.cloud.datalabeling_v1beta1.types import dataset as gcd_dataset +from google.cloud.datalabeling_v1beta1.types import evaluation +from google.cloud.datalabeling_v1beta1.types import evaluation_job +from google.cloud.datalabeling_v1beta1.types import evaluation_job as gcd_evaluation_job +from google.cloud.datalabeling_v1beta1.types import instruction +from google.longrunning import operations_pb2 as operations # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-datalabeling", + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +class DataLabelingServiceTransport(abc.ABC): + """Abstract transport class for DataLabelingService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "datalabeling.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages() + + def _prep_wrapped_messages(self): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_dataset: gapic_v1.method.wrap_method( + self.create_dataset, default_timeout=30.0, client_info=_client_info, + ), + self.get_dataset: gapic_v1.method.wrap_method( + self.get_dataset, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.list_datasets: gapic_v1.method.wrap_method( + self.list_datasets, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.delete_dataset: gapic_v1.method.wrap_method( + self.delete_dataset, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.import_data: gapic_v1.method.wrap_method( + self.import_data, default_timeout=30.0, client_info=_client_info, + ), + self.export_data: gapic_v1.method.wrap_method( + self.export_data, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.get_data_item: gapic_v1.method.wrap_method( + self.get_data_item, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.list_data_items: gapic_v1.method.wrap_method( + self.list_data_items, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.get_annotated_dataset: gapic_v1.method.wrap_method( + self.get_annotated_dataset, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.list_annotated_datasets: gapic_v1.method.wrap_method( + self.list_annotated_datasets, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.delete_annotated_dataset: gapic_v1.method.wrap_method( + self.delete_annotated_dataset, + default_timeout=None, + client_info=_client_info, + ), + self.label_image: gapic_v1.method.wrap_method( + self.label_image, default_timeout=30.0, client_info=_client_info, + ), + self.label_video: gapic_v1.method.wrap_method( + self.label_video, default_timeout=30.0, client_info=_client_info, + ), + self.label_text: gapic_v1.method.wrap_method( + self.label_text, default_timeout=30.0, client_info=_client_info, + ), + self.get_example: gapic_v1.method.wrap_method( + self.get_example, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.list_examples: gapic_v1.method.wrap_method( + self.list_examples, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.create_annotation_spec_set: gapic_v1.method.wrap_method( + self.create_annotation_spec_set, + default_timeout=30.0, + client_info=_client_info, + ), + self.get_annotation_spec_set: gapic_v1.method.wrap_method( + self.get_annotation_spec_set, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.list_annotation_spec_sets: gapic_v1.method.wrap_method( + self.list_annotation_spec_sets, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.delete_annotation_spec_set: gapic_v1.method.wrap_method( + self.delete_annotation_spec_set, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.create_instruction: gapic_v1.method.wrap_method( + self.create_instruction, default_timeout=30.0, client_info=_client_info, + ), + self.get_instruction: gapic_v1.method.wrap_method( + self.get_instruction, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.list_instructions: gapic_v1.method.wrap_method( + self.list_instructions, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.delete_instruction: gapic_v1.method.wrap_method( + self.delete_instruction, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.get_evaluation: gapic_v1.method.wrap_method( + self.get_evaluation, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.search_evaluations: gapic_v1.method.wrap_method( + self.search_evaluations, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.search_example_comparisons: gapic_v1.method.wrap_method( + self.search_example_comparisons, + default_timeout=30.0, + client_info=_client_info, + ), + self.create_evaluation_job: gapic_v1.method.wrap_method( + self.create_evaluation_job, + default_timeout=30.0, + client_info=_client_info, + ), + self.update_evaluation_job: gapic_v1.method.wrap_method( + self.update_evaluation_job, + default_timeout=30.0, + client_info=_client_info, + ), + self.get_evaluation_job: gapic_v1.method.wrap_method( + self.get_evaluation_job, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.pause_evaluation_job: gapic_v1.method.wrap_method( + self.pause_evaluation_job, + default_timeout=30.0, + client_info=_client_info, + ), + self.resume_evaluation_job: gapic_v1.method.wrap_method( + self.resume_evaluation_job, + default_timeout=30.0, + client_info=_client_info, + ), + self.delete_evaluation_job: gapic_v1.method.wrap_method( + self.delete_evaluation_job, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + self.list_evaluation_jobs: gapic_v1.method.wrap_method( + self.list_evaluation_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=30.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=30.0, + client_info=_client_info, + ), + } + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_dataset( + self, + ) -> typing.Callable[ + [data_labeling_service.CreateDatasetRequest], + typing.Union[gcd_dataset.Dataset, typing.Awaitable[gcd_dataset.Dataset]], + ]: + raise NotImplementedError() + + @property + def get_dataset( + self, + ) -> typing.Callable[ + [data_labeling_service.GetDatasetRequest], + typing.Union[dataset.Dataset, typing.Awaitable[dataset.Dataset]], + ]: + raise NotImplementedError() + + @property + def list_datasets( + self, + ) -> typing.Callable[ + [data_labeling_service.ListDatasetsRequest], + typing.Union[ + data_labeling_service.ListDatasetsResponse, + typing.Awaitable[data_labeling_service.ListDatasetsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_dataset( + self, + ) -> typing.Callable[ + [data_labeling_service.DeleteDatasetRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def import_data( + self, + ) -> typing.Callable[ + [data_labeling_service.ImportDataRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def export_data( + self, + ) -> typing.Callable[ + [data_labeling_service.ExportDataRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def get_data_item( + self, + ) -> typing.Callable[ + [data_labeling_service.GetDataItemRequest], + typing.Union[dataset.DataItem, typing.Awaitable[dataset.DataItem]], + ]: + raise NotImplementedError() + + @property + def list_data_items( + self, + ) -> typing.Callable[ + [data_labeling_service.ListDataItemsRequest], + typing.Union[ + data_labeling_service.ListDataItemsResponse, + typing.Awaitable[data_labeling_service.ListDataItemsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_annotated_dataset( + self, + ) -> typing.Callable[ + [data_labeling_service.GetAnnotatedDatasetRequest], + typing.Union[ + dataset.AnnotatedDataset, typing.Awaitable[dataset.AnnotatedDataset] + ], + ]: + raise NotImplementedError() + + @property + def list_annotated_datasets( + self, + ) -> typing.Callable[ + [data_labeling_service.ListAnnotatedDatasetsRequest], + typing.Union[ + data_labeling_service.ListAnnotatedDatasetsResponse, + typing.Awaitable[data_labeling_service.ListAnnotatedDatasetsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_annotated_dataset( + self, + ) -> typing.Callable[ + [data_labeling_service.DeleteAnnotatedDatasetRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def label_image( + self, + ) -> typing.Callable[ + [data_labeling_service.LabelImageRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def label_video( + self, + ) -> typing.Callable[ + [data_labeling_service.LabelVideoRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def label_text( + self, + ) -> typing.Callable[ + [data_labeling_service.LabelTextRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def get_example( + self, + ) -> typing.Callable[ + [data_labeling_service.GetExampleRequest], + typing.Union[dataset.Example, typing.Awaitable[dataset.Example]], + ]: + raise NotImplementedError() + + @property + def list_examples( + self, + ) -> typing.Callable[ + [data_labeling_service.ListExamplesRequest], + typing.Union[ + data_labeling_service.ListExamplesResponse, + typing.Awaitable[data_labeling_service.ListExamplesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_annotation_spec_set( + self, + ) -> typing.Callable[ + [data_labeling_service.CreateAnnotationSpecSetRequest], + typing.Union[ + gcd_annotation_spec_set.AnnotationSpecSet, + typing.Awaitable[gcd_annotation_spec_set.AnnotationSpecSet], + ], + ]: + raise NotImplementedError() + + @property + def get_annotation_spec_set( + self, + ) -> typing.Callable[ + [data_labeling_service.GetAnnotationSpecSetRequest], + typing.Union[ + annotation_spec_set.AnnotationSpecSet, + typing.Awaitable[annotation_spec_set.AnnotationSpecSet], + ], + ]: + raise NotImplementedError() + + @property + def list_annotation_spec_sets( + self, + ) -> typing.Callable[ + [data_labeling_service.ListAnnotationSpecSetsRequest], + typing.Union[ + data_labeling_service.ListAnnotationSpecSetsResponse, + typing.Awaitable[data_labeling_service.ListAnnotationSpecSetsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_annotation_spec_set( + self, + ) -> typing.Callable[ + [data_labeling_service.DeleteAnnotationSpecSetRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def create_instruction( + self, + ) -> typing.Callable[ + [data_labeling_service.CreateInstructionRequest], + typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + ]: + raise NotImplementedError() + + @property + def get_instruction( + self, + ) -> typing.Callable[ + [data_labeling_service.GetInstructionRequest], + typing.Union[ + instruction.Instruction, typing.Awaitable[instruction.Instruction] + ], + ]: + raise NotImplementedError() + + @property + def list_instructions( + self, + ) -> typing.Callable[ + [data_labeling_service.ListInstructionsRequest], + typing.Union[ + data_labeling_service.ListInstructionsResponse, + typing.Awaitable[data_labeling_service.ListInstructionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_instruction( + self, + ) -> typing.Callable[ + [data_labeling_service.DeleteInstructionRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def get_evaluation( + self, + ) -> typing.Callable[ + [data_labeling_service.GetEvaluationRequest], + typing.Union[evaluation.Evaluation, typing.Awaitable[evaluation.Evaluation]], + ]: + raise NotImplementedError() + + @property + def search_evaluations( + self, + ) -> typing.Callable[ + [data_labeling_service.SearchEvaluationsRequest], + typing.Union[ + data_labeling_service.SearchEvaluationsResponse, + typing.Awaitable[data_labeling_service.SearchEvaluationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def search_example_comparisons( + self, + ) -> typing.Callable[ + [data_labeling_service.SearchExampleComparisonsRequest], + typing.Union[ + data_labeling_service.SearchExampleComparisonsResponse, + typing.Awaitable[data_labeling_service.SearchExampleComparisonsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_evaluation_job( + self, + ) -> typing.Callable[ + [data_labeling_service.CreateEvaluationJobRequest], + typing.Union[ + evaluation_job.EvaluationJob, typing.Awaitable[evaluation_job.EvaluationJob] + ], + ]: + raise NotImplementedError() + + @property + def update_evaluation_job( + self, + ) -> typing.Callable[ + [data_labeling_service.UpdateEvaluationJobRequest], + typing.Union[ + gcd_evaluation_job.EvaluationJob, + typing.Awaitable[gcd_evaluation_job.EvaluationJob], + ], + ]: + raise NotImplementedError() + + @property + def get_evaluation_job( + self, + ) -> typing.Callable[ + [data_labeling_service.GetEvaluationJobRequest], + typing.Union[ + evaluation_job.EvaluationJob, typing.Awaitable[evaluation_job.EvaluationJob] + ], + ]: + raise NotImplementedError() + + @property + def pause_evaluation_job( + self, + ) -> typing.Callable[ + [data_labeling_service.PauseEvaluationJobRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def resume_evaluation_job( + self, + ) -> typing.Callable[ + [data_labeling_service.ResumeEvaluationJobRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def delete_evaluation_job( + self, + ) -> typing.Callable[ + [data_labeling_service.DeleteEvaluationJobRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def list_evaluation_jobs( + self, + ) -> typing.Callable[ + [data_labeling_service.ListEvaluationJobsRequest], + typing.Union[ + data_labeling_service.ListEvaluationJobsResponse, + typing.Awaitable[data_labeling_service.ListEvaluationJobsResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("DataLabelingServiceTransport",) diff --git a/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/grpc.py b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/grpc.py new file mode 100644 index 0000000..8ab57b5 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/grpc.py @@ -0,0 +1,1201 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.cloud.datalabeling_v1beta1.types import annotation_spec_set +from google.cloud.datalabeling_v1beta1.types import ( + annotation_spec_set as gcd_annotation_spec_set, +) +from google.cloud.datalabeling_v1beta1.types import data_labeling_service +from google.cloud.datalabeling_v1beta1.types import dataset +from google.cloud.datalabeling_v1beta1.types import dataset as gcd_dataset +from google.cloud.datalabeling_v1beta1.types import evaluation +from google.cloud.datalabeling_v1beta1.types import evaluation_job +from google.cloud.datalabeling_v1beta1.types import evaluation_job as gcd_evaluation_job +from google.cloud.datalabeling_v1beta1.types import instruction +from google.longrunning import operations_pb2 as operations # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import DataLabelingServiceTransport + + +class DataLabelingServiceGrpcTransport(DataLabelingServiceTransport): + """gRPC backend transport for DataLabelingService. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "datalabeling.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + @classmethod + def create_channel( + cls, + host: str = "datalabeling.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if "operations_client" not in self.__dict__: + self.__dict__["operations_client"] = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self.__dict__["operations_client"] + + @property + def create_dataset( + self, + ) -> Callable[[data_labeling_service.CreateDatasetRequest], gcd_dataset.Dataset]: + r"""Return a callable for the create dataset method over gRPC. + + Creates dataset. If success return a Dataset + resource. + + Returns: + Callable[[~.CreateDatasetRequest], + ~.Dataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dataset" not in self._stubs: + self._stubs["create_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateDataset", + request_serializer=data_labeling_service.CreateDatasetRequest.serialize, + response_deserializer=gcd_dataset.Dataset.deserialize, + ) + return self._stubs["create_dataset"] + + @property + def get_dataset( + self, + ) -> Callable[[data_labeling_service.GetDatasetRequest], dataset.Dataset]: + r"""Return a callable for the get dataset method over gRPC. + + Gets dataset by resource name. + + Returns: + Callable[[~.GetDatasetRequest], + ~.Dataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dataset" not in self._stubs: + self._stubs["get_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataset", + request_serializer=data_labeling_service.GetDatasetRequest.serialize, + response_deserializer=dataset.Dataset.deserialize, + ) + return self._stubs["get_dataset"] + + @property + def list_datasets( + self, + ) -> Callable[ + [data_labeling_service.ListDatasetsRequest], + data_labeling_service.ListDatasetsResponse, + ]: + r"""Return a callable for the list datasets method over gRPC. + + Lists datasets under a project. Pagination is + supported. + + Returns: + Callable[[~.ListDatasetsRequest], + ~.ListDatasetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_datasets" not in self._stubs: + self._stubs["list_datasets"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDatasets", + request_serializer=data_labeling_service.ListDatasetsRequest.serialize, + response_deserializer=data_labeling_service.ListDatasetsResponse.deserialize, + ) + return self._stubs["list_datasets"] + + @property + def delete_dataset( + self, + ) -> Callable[[data_labeling_service.DeleteDatasetRequest], empty.Empty]: + r"""Return a callable for the delete dataset method over gRPC. + + Deletes a dataset by resource name. + + Returns: + Callable[[~.DeleteDatasetRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dataset" not in self._stubs: + self._stubs["delete_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteDataset", + request_serializer=data_labeling_service.DeleteDatasetRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_dataset"] + + @property + def import_data( + self, + ) -> Callable[[data_labeling_service.ImportDataRequest], operations.Operation]: + r"""Return a callable for the import data method over gRPC. + + Imports data into dataset based on source locations + defined in request. It can be called multiple times for + the same dataset. Each dataset can only have one long + running operation running on it. For example, no + labeling task (also long running operation) can be + started while importing is still ongoing. Vice versa. + + Returns: + Callable[[~.ImportDataRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_data" not in self._stubs: + self._stubs["import_data"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ImportData", + request_serializer=data_labeling_service.ImportDataRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["import_data"] + + @property + def export_data( + self, + ) -> Callable[[data_labeling_service.ExportDataRequest], operations.Operation]: + r"""Return a callable for the export data method over gRPC. + + Exports data and annotations from dataset. + + Returns: + Callable[[~.ExportDataRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_data" not in self._stubs: + self._stubs["export_data"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ExportData", + request_serializer=data_labeling_service.ExportDataRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["export_data"] + + @property + def get_data_item( + self, + ) -> Callable[[data_labeling_service.GetDataItemRequest], dataset.DataItem]: + r"""Return a callable for the get data item method over gRPC. + + Gets a data item in a dataset by resource name. This + API can be called after data are imported into dataset. + + Returns: + Callable[[~.GetDataItemRequest], + ~.DataItem]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_item" not in self._stubs: + self._stubs["get_data_item"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataItem", + request_serializer=data_labeling_service.GetDataItemRequest.serialize, + response_deserializer=dataset.DataItem.deserialize, + ) + return self._stubs["get_data_item"] + + @property + def list_data_items( + self, + ) -> Callable[ + [data_labeling_service.ListDataItemsRequest], + data_labeling_service.ListDataItemsResponse, + ]: + r"""Return a callable for the list data items method over gRPC. + + Lists data items in a dataset. This API can be called + after data are imported into dataset. Pagination is + supported. + + Returns: + Callable[[~.ListDataItemsRequest], + ~.ListDataItemsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_items" not in self._stubs: + self._stubs["list_data_items"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDataItems", + request_serializer=data_labeling_service.ListDataItemsRequest.serialize, + response_deserializer=data_labeling_service.ListDataItemsResponse.deserialize, + ) + return self._stubs["list_data_items"] + + @property + def get_annotated_dataset( + self, + ) -> Callable[ + [data_labeling_service.GetAnnotatedDatasetRequest], dataset.AnnotatedDataset + ]: + r"""Return a callable for the get annotated dataset method over gRPC. + + Gets an annotated dataset by resource name. + + Returns: + Callable[[~.GetAnnotatedDatasetRequest], + ~.AnnotatedDataset]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_annotated_dataset" not in self._stubs: + self._stubs["get_annotated_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotatedDataset", + request_serializer=data_labeling_service.GetAnnotatedDatasetRequest.serialize, + response_deserializer=dataset.AnnotatedDataset.deserialize, + ) + return self._stubs["get_annotated_dataset"] + + @property + def list_annotated_datasets( + self, + ) -> Callable[ + [data_labeling_service.ListAnnotatedDatasetsRequest], + data_labeling_service.ListAnnotatedDatasetsResponse, + ]: + r"""Return a callable for the list annotated datasets method over gRPC. + + Lists annotated datasets for a dataset. Pagination is + supported. + + Returns: + Callable[[~.ListAnnotatedDatasetsRequest], + ~.ListAnnotatedDatasetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_annotated_datasets" not in self._stubs: + self._stubs["list_annotated_datasets"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotatedDatasets", + request_serializer=data_labeling_service.ListAnnotatedDatasetsRequest.serialize, + response_deserializer=data_labeling_service.ListAnnotatedDatasetsResponse.deserialize, + ) + return self._stubs["list_annotated_datasets"] + + @property + def delete_annotated_dataset( + self, + ) -> Callable[[data_labeling_service.DeleteAnnotatedDatasetRequest], empty.Empty]: + r"""Return a callable for the delete annotated dataset method over gRPC. + + Deletes an annotated dataset by resource name. + + Returns: + Callable[[~.DeleteAnnotatedDatasetRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_annotated_dataset" not in self._stubs: + self._stubs["delete_annotated_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotatedDataset", + request_serializer=data_labeling_service.DeleteAnnotatedDatasetRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_annotated_dataset"] + + @property + def label_image( + self, + ) -> Callable[[data_labeling_service.LabelImageRequest], operations.Operation]: + r"""Return a callable for the label image method over gRPC. + + Starts a labeling task for image. The type of image + labeling task is configured by feature in the request. + + Returns: + Callable[[~.LabelImageRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "label_image" not in self._stubs: + self._stubs["label_image"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelImage", + request_serializer=data_labeling_service.LabelImageRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["label_image"] + + @property + def label_video( + self, + ) -> Callable[[data_labeling_service.LabelVideoRequest], operations.Operation]: + r"""Return a callable for the label video method over gRPC. + + Starts a labeling task for video. The type of video + labeling task is configured by feature in the request. + + Returns: + Callable[[~.LabelVideoRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "label_video" not in self._stubs: + self._stubs["label_video"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelVideo", + request_serializer=data_labeling_service.LabelVideoRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["label_video"] + + @property + def label_text( + self, + ) -> Callable[[data_labeling_service.LabelTextRequest], operations.Operation]: + r"""Return a callable for the label text method over gRPC. + + Starts a labeling task for text. The type of text + labeling task is configured by feature in the request. + + Returns: + Callable[[~.LabelTextRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "label_text" not in self._stubs: + self._stubs["label_text"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelText", + request_serializer=data_labeling_service.LabelTextRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["label_text"] + + @property + def get_example( + self, + ) -> Callable[[data_labeling_service.GetExampleRequest], dataset.Example]: + r"""Return a callable for the get example method over gRPC. + + Gets an example by resource name, including both data + and annotation. + + Returns: + Callable[[~.GetExampleRequest], + ~.Example]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_example" not in self._stubs: + self._stubs["get_example"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetExample", + request_serializer=data_labeling_service.GetExampleRequest.serialize, + response_deserializer=dataset.Example.deserialize, + ) + return self._stubs["get_example"] + + @property + def list_examples( + self, + ) -> Callable[ + [data_labeling_service.ListExamplesRequest], + data_labeling_service.ListExamplesResponse, + ]: + r"""Return a callable for the list examples method over gRPC. + + Lists examples in an annotated dataset. Pagination is + supported. + + Returns: + Callable[[~.ListExamplesRequest], + ~.ListExamplesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_examples" not in self._stubs: + self._stubs["list_examples"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListExamples", + request_serializer=data_labeling_service.ListExamplesRequest.serialize, + response_deserializer=data_labeling_service.ListExamplesResponse.deserialize, + ) + return self._stubs["list_examples"] + + @property + def create_annotation_spec_set( + self, + ) -> Callable[ + [data_labeling_service.CreateAnnotationSpecSetRequest], + gcd_annotation_spec_set.AnnotationSpecSet, + ]: + r"""Return a callable for the create annotation spec set method over gRPC. + + Creates an annotation spec set by providing a set of + labels. + + Returns: + Callable[[~.CreateAnnotationSpecSetRequest], + ~.AnnotationSpecSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_annotation_spec_set" not in self._stubs: + self._stubs["create_annotation_spec_set"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateAnnotationSpecSet", + request_serializer=data_labeling_service.CreateAnnotationSpecSetRequest.serialize, + response_deserializer=gcd_annotation_spec_set.AnnotationSpecSet.deserialize, + ) + return self._stubs["create_annotation_spec_set"] + + @property + def get_annotation_spec_set( + self, + ) -> Callable[ + [data_labeling_service.GetAnnotationSpecSetRequest], + annotation_spec_set.AnnotationSpecSet, + ]: + r"""Return a callable for the get annotation spec set method over gRPC. + + Gets an annotation spec set by resource name. + + Returns: + Callable[[~.GetAnnotationSpecSetRequest], + ~.AnnotationSpecSet]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_annotation_spec_set" not in self._stubs: + self._stubs["get_annotation_spec_set"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotationSpecSet", + request_serializer=data_labeling_service.GetAnnotationSpecSetRequest.serialize, + response_deserializer=annotation_spec_set.AnnotationSpecSet.deserialize, + ) + return self._stubs["get_annotation_spec_set"] + + @property + def list_annotation_spec_sets( + self, + ) -> Callable[ + [data_labeling_service.ListAnnotationSpecSetsRequest], + data_labeling_service.ListAnnotationSpecSetsResponse, + ]: + r"""Return a callable for the list annotation spec sets method over gRPC. + + Lists annotation spec sets for a project. Pagination + is supported. + + Returns: + Callable[[~.ListAnnotationSpecSetsRequest], + ~.ListAnnotationSpecSetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_annotation_spec_sets" not in self._stubs: + self._stubs["list_annotation_spec_sets"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotationSpecSets", + request_serializer=data_labeling_service.ListAnnotationSpecSetsRequest.serialize, + response_deserializer=data_labeling_service.ListAnnotationSpecSetsResponse.deserialize, + ) + return self._stubs["list_annotation_spec_sets"] + + @property + def delete_annotation_spec_set( + self, + ) -> Callable[[data_labeling_service.DeleteAnnotationSpecSetRequest], empty.Empty]: + r"""Return a callable for the delete annotation spec set method over gRPC. + + Deletes an annotation spec set by resource name. + + Returns: + Callable[[~.DeleteAnnotationSpecSetRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_annotation_spec_set" not in self._stubs: + self._stubs["delete_annotation_spec_set"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotationSpecSet", + request_serializer=data_labeling_service.DeleteAnnotationSpecSetRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_annotation_spec_set"] + + @property + def create_instruction( + self, + ) -> Callable[ + [data_labeling_service.CreateInstructionRequest], operations.Operation + ]: + r"""Return a callable for the create instruction method over gRPC. + + Creates an instruction for how data should be + labeled. + + Returns: + Callable[[~.CreateInstructionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instruction" not in self._stubs: + self._stubs["create_instruction"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateInstruction", + request_serializer=data_labeling_service.CreateInstructionRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["create_instruction"] + + @property + def get_instruction( + self, + ) -> Callable[ + [data_labeling_service.GetInstructionRequest], instruction.Instruction + ]: + r"""Return a callable for the get instruction method over gRPC. + + Gets an instruction by resource name. + + Returns: + Callable[[~.GetInstructionRequest], + ~.Instruction]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instruction" not in self._stubs: + self._stubs["get_instruction"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetInstruction", + request_serializer=data_labeling_service.GetInstructionRequest.serialize, + response_deserializer=instruction.Instruction.deserialize, + ) + return self._stubs["get_instruction"] + + @property + def list_instructions( + self, + ) -> Callable[ + [data_labeling_service.ListInstructionsRequest], + data_labeling_service.ListInstructionsResponse, + ]: + r"""Return a callable for the list instructions method over gRPC. + + Lists instructions for a project. Pagination is + supported. + + Returns: + Callable[[~.ListInstructionsRequest], + ~.ListInstructionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instructions" not in self._stubs: + self._stubs["list_instructions"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListInstructions", + request_serializer=data_labeling_service.ListInstructionsRequest.serialize, + response_deserializer=data_labeling_service.ListInstructionsResponse.deserialize, + ) + return self._stubs["list_instructions"] + + @property + def delete_instruction( + self, + ) -> Callable[[data_labeling_service.DeleteInstructionRequest], empty.Empty]: + r"""Return a callable for the delete instruction method over gRPC. + + Deletes an instruction object by resource name. + + Returns: + Callable[[~.DeleteInstructionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instruction" not in self._stubs: + self._stubs["delete_instruction"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteInstruction", + request_serializer=data_labeling_service.DeleteInstructionRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_instruction"] + + @property + def get_evaluation( + self, + ) -> Callable[[data_labeling_service.GetEvaluationRequest], evaluation.Evaluation]: + r"""Return a callable for the get evaluation method over gRPC. + + Gets an evaluation by resource name (to search, use + [projects.evaluations.search][google.cloud.datalabeling.v1beta1.DataLabelingService.SearchEvaluations]). + + Returns: + Callable[[~.GetEvaluationRequest], + ~.Evaluation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_evaluation" not in self._stubs: + self._stubs["get_evaluation"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetEvaluation", + request_serializer=data_labeling_service.GetEvaluationRequest.serialize, + response_deserializer=evaluation.Evaluation.deserialize, + ) + return self._stubs["get_evaluation"] + + @property + def search_evaluations( + self, + ) -> Callable[ + [data_labeling_service.SearchEvaluationsRequest], + data_labeling_service.SearchEvaluationsResponse, + ]: + r"""Return a callable for the search evaluations method over gRPC. + + Searches + [evaluations][google.cloud.datalabeling.v1beta1.Evaluation] + within a project. + + Returns: + Callable[[~.SearchEvaluationsRequest], + ~.SearchEvaluationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_evaluations" not in self._stubs: + self._stubs["search_evaluations"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/SearchEvaluations", + request_serializer=data_labeling_service.SearchEvaluationsRequest.serialize, + response_deserializer=data_labeling_service.SearchEvaluationsResponse.deserialize, + ) + return self._stubs["search_evaluations"] + + @property + def search_example_comparisons( + self, + ) -> Callable[ + [data_labeling_service.SearchExampleComparisonsRequest], + data_labeling_service.SearchExampleComparisonsResponse, + ]: + r"""Return a callable for the search example comparisons method over gRPC. + + Searches example comparisons from an evaluation. The + return format is a list of example comparisons that show + ground truth and prediction(s) for a single input. + Search by providing an evaluation ID. + + Returns: + Callable[[~.SearchExampleComparisonsRequest], + ~.SearchExampleComparisonsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_example_comparisons" not in self._stubs: + self._stubs["search_example_comparisons"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/SearchExampleComparisons", + request_serializer=data_labeling_service.SearchExampleComparisonsRequest.serialize, + response_deserializer=data_labeling_service.SearchExampleComparisonsResponse.deserialize, + ) + return self._stubs["search_example_comparisons"] + + @property + def create_evaluation_job( + self, + ) -> Callable[ + [data_labeling_service.CreateEvaluationJobRequest], evaluation_job.EvaluationJob + ]: + r"""Return a callable for the create evaluation job method over gRPC. + + Creates an evaluation job. + + Returns: + Callable[[~.CreateEvaluationJobRequest], + ~.EvaluationJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_evaluation_job" not in self._stubs: + self._stubs["create_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateEvaluationJob", + request_serializer=data_labeling_service.CreateEvaluationJobRequest.serialize, + response_deserializer=evaluation_job.EvaluationJob.deserialize, + ) + return self._stubs["create_evaluation_job"] + + @property + def update_evaluation_job( + self, + ) -> Callable[ + [data_labeling_service.UpdateEvaluationJobRequest], + gcd_evaluation_job.EvaluationJob, + ]: + r"""Return a callable for the update evaluation job method over gRPC. + + Updates an evaluation job. You can only update certain fields of + the job's + [EvaluationJobConfig][google.cloud.datalabeling.v1beta1.EvaluationJobConfig]: + ``humanAnnotationConfig.instruction``, ``exampleCount``, and + ``exampleSamplePercentage``. + + If you want to change any other aspect of the evaluation job, + you must delete the job and create a new one. + + Returns: + Callable[[~.UpdateEvaluationJobRequest], + ~.EvaluationJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_evaluation_job" not in self._stubs: + self._stubs["update_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/UpdateEvaluationJob", + request_serializer=data_labeling_service.UpdateEvaluationJobRequest.serialize, + response_deserializer=gcd_evaluation_job.EvaluationJob.deserialize, + ) + return self._stubs["update_evaluation_job"] + + @property + def get_evaluation_job( + self, + ) -> Callable[ + [data_labeling_service.GetEvaluationJobRequest], evaluation_job.EvaluationJob + ]: + r"""Return a callable for the get evaluation job method over gRPC. + + Gets an evaluation job by resource name. + + Returns: + Callable[[~.GetEvaluationJobRequest], + ~.EvaluationJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_evaluation_job" not in self._stubs: + self._stubs["get_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetEvaluationJob", + request_serializer=data_labeling_service.GetEvaluationJobRequest.serialize, + response_deserializer=evaluation_job.EvaluationJob.deserialize, + ) + return self._stubs["get_evaluation_job"] + + @property + def pause_evaluation_job( + self, + ) -> Callable[[data_labeling_service.PauseEvaluationJobRequest], empty.Empty]: + r"""Return a callable for the pause evaluation job method over gRPC. + + Pauses an evaluation job. Pausing an evaluation job that is + already in a ``PAUSED`` state is a no-op. + + Returns: + Callable[[~.PauseEvaluationJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_evaluation_job" not in self._stubs: + self._stubs["pause_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/PauseEvaluationJob", + request_serializer=data_labeling_service.PauseEvaluationJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["pause_evaluation_job"] + + @property + def resume_evaluation_job( + self, + ) -> Callable[[data_labeling_service.ResumeEvaluationJobRequest], empty.Empty]: + r"""Return a callable for the resume evaluation job method over gRPC. + + Resumes a paused evaluation job. A deleted evaluation + job can't be resumed. Resuming a running or scheduled + evaluation job is a no-op. + + Returns: + Callable[[~.ResumeEvaluationJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_evaluation_job" not in self._stubs: + self._stubs["resume_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ResumeEvaluationJob", + request_serializer=data_labeling_service.ResumeEvaluationJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["resume_evaluation_job"] + + @property + def delete_evaluation_job( + self, + ) -> Callable[[data_labeling_service.DeleteEvaluationJobRequest], empty.Empty]: + r"""Return a callable for the delete evaluation job method over gRPC. + + Stops and deletes an evaluation job. + + Returns: + Callable[[~.DeleteEvaluationJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_evaluation_job" not in self._stubs: + self._stubs["delete_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteEvaluationJob", + request_serializer=data_labeling_service.DeleteEvaluationJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_evaluation_job"] + + @property + def list_evaluation_jobs( + self, + ) -> Callable[ + [data_labeling_service.ListEvaluationJobsRequest], + data_labeling_service.ListEvaluationJobsResponse, + ]: + r"""Return a callable for the list evaluation jobs method over gRPC. + + Lists all evaluation jobs within a project with + possible filters. Pagination is supported. + + Returns: + Callable[[~.ListEvaluationJobsRequest], + ~.ListEvaluationJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_evaluation_jobs" not in self._stubs: + self._stubs["list_evaluation_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListEvaluationJobs", + request_serializer=data_labeling_service.ListEvaluationJobsRequest.serialize, + response_deserializer=data_labeling_service.ListEvaluationJobsResponse.deserialize, + ) + return self._stubs["list_evaluation_jobs"] + + +__all__ = ("DataLabelingServiceGrpcTransport",) diff --git a/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/grpc_asyncio.py b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/grpc_asyncio.py new file mode 100644 index 0000000..79fd1cf --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/services/data_labeling_service/transports/grpc_asyncio.py @@ -0,0 +1,1231 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.datalabeling_v1beta1.types import annotation_spec_set +from google.cloud.datalabeling_v1beta1.types import ( + annotation_spec_set as gcd_annotation_spec_set, +) +from google.cloud.datalabeling_v1beta1.types import data_labeling_service +from google.cloud.datalabeling_v1beta1.types import dataset +from google.cloud.datalabeling_v1beta1.types import dataset as gcd_dataset +from google.cloud.datalabeling_v1beta1.types import evaluation +from google.cloud.datalabeling_v1beta1.types import evaluation_job +from google.cloud.datalabeling_v1beta1.types import evaluation_job as gcd_evaluation_job +from google.cloud.datalabeling_v1beta1.types import instruction +from google.longrunning import operations_pb2 as operations # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import DataLabelingServiceTransport +from .grpc import DataLabelingServiceGrpcTransport + + +class DataLabelingServiceGrpcAsyncIOTransport(DataLabelingServiceTransport): + """gRPC AsyncIO backend transport for DataLabelingService. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "datalabeling.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "datalabeling.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if "operations_client" not in self.__dict__: + self.__dict__["operations_client"] = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self.__dict__["operations_client"] + + @property + def create_dataset( + self, + ) -> Callable[ + [data_labeling_service.CreateDatasetRequest], Awaitable[gcd_dataset.Dataset] + ]: + r"""Return a callable for the create dataset method over gRPC. + + Creates dataset. If success return a Dataset + resource. + + Returns: + Callable[[~.CreateDatasetRequest], + Awaitable[~.Dataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dataset" not in self._stubs: + self._stubs["create_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateDataset", + request_serializer=data_labeling_service.CreateDatasetRequest.serialize, + response_deserializer=gcd_dataset.Dataset.deserialize, + ) + return self._stubs["create_dataset"] + + @property + def get_dataset( + self, + ) -> Callable[ + [data_labeling_service.GetDatasetRequest], Awaitable[dataset.Dataset] + ]: + r"""Return a callable for the get dataset method over gRPC. + + Gets dataset by resource name. + + Returns: + Callable[[~.GetDatasetRequest], + Awaitable[~.Dataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dataset" not in self._stubs: + self._stubs["get_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataset", + request_serializer=data_labeling_service.GetDatasetRequest.serialize, + response_deserializer=dataset.Dataset.deserialize, + ) + return self._stubs["get_dataset"] + + @property + def list_datasets( + self, + ) -> Callable[ + [data_labeling_service.ListDatasetsRequest], + Awaitable[data_labeling_service.ListDatasetsResponse], + ]: + r"""Return a callable for the list datasets method over gRPC. + + Lists datasets under a project. Pagination is + supported. + + Returns: + Callable[[~.ListDatasetsRequest], + Awaitable[~.ListDatasetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_datasets" not in self._stubs: + self._stubs["list_datasets"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDatasets", + request_serializer=data_labeling_service.ListDatasetsRequest.serialize, + response_deserializer=data_labeling_service.ListDatasetsResponse.deserialize, + ) + return self._stubs["list_datasets"] + + @property + def delete_dataset( + self, + ) -> Callable[[data_labeling_service.DeleteDatasetRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete dataset method over gRPC. + + Deletes a dataset by resource name. + + Returns: + Callable[[~.DeleteDatasetRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dataset" not in self._stubs: + self._stubs["delete_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteDataset", + request_serializer=data_labeling_service.DeleteDatasetRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_dataset"] + + @property + def import_data( + self, + ) -> Callable[ + [data_labeling_service.ImportDataRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the import data method over gRPC. + + Imports data into dataset based on source locations + defined in request. It can be called multiple times for + the same dataset. Each dataset can only have one long + running operation running on it. For example, no + labeling task (also long running operation) can be + started while importing is still ongoing. Vice versa. + + Returns: + Callable[[~.ImportDataRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_data" not in self._stubs: + self._stubs["import_data"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ImportData", + request_serializer=data_labeling_service.ImportDataRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["import_data"] + + @property + def export_data( + self, + ) -> Callable[ + [data_labeling_service.ExportDataRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the export data method over gRPC. + + Exports data and annotations from dataset. + + Returns: + Callable[[~.ExportDataRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_data" not in self._stubs: + self._stubs["export_data"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ExportData", + request_serializer=data_labeling_service.ExportDataRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["export_data"] + + @property + def get_data_item( + self, + ) -> Callable[ + [data_labeling_service.GetDataItemRequest], Awaitable[dataset.DataItem] + ]: + r"""Return a callable for the get data item method over gRPC. + + Gets a data item in a dataset by resource name. This + API can be called after data are imported into dataset. + + Returns: + Callable[[~.GetDataItemRequest], + Awaitable[~.DataItem]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_item" not in self._stubs: + self._stubs["get_data_item"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataItem", + request_serializer=data_labeling_service.GetDataItemRequest.serialize, + response_deserializer=dataset.DataItem.deserialize, + ) + return self._stubs["get_data_item"] + + @property + def list_data_items( + self, + ) -> Callable[ + [data_labeling_service.ListDataItemsRequest], + Awaitable[data_labeling_service.ListDataItemsResponse], + ]: + r"""Return a callable for the list data items method over gRPC. + + Lists data items in a dataset. This API can be called + after data are imported into dataset. Pagination is + supported. + + Returns: + Callable[[~.ListDataItemsRequest], + Awaitable[~.ListDataItemsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_items" not in self._stubs: + self._stubs["list_data_items"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDataItems", + request_serializer=data_labeling_service.ListDataItemsRequest.serialize, + response_deserializer=data_labeling_service.ListDataItemsResponse.deserialize, + ) + return self._stubs["list_data_items"] + + @property + def get_annotated_dataset( + self, + ) -> Callable[ + [data_labeling_service.GetAnnotatedDatasetRequest], + Awaitable[dataset.AnnotatedDataset], + ]: + r"""Return a callable for the get annotated dataset method over gRPC. + + Gets an annotated dataset by resource name. + + Returns: + Callable[[~.GetAnnotatedDatasetRequest], + Awaitable[~.AnnotatedDataset]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_annotated_dataset" not in self._stubs: + self._stubs["get_annotated_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotatedDataset", + request_serializer=data_labeling_service.GetAnnotatedDatasetRequest.serialize, + response_deserializer=dataset.AnnotatedDataset.deserialize, + ) + return self._stubs["get_annotated_dataset"] + + @property + def list_annotated_datasets( + self, + ) -> Callable[ + [data_labeling_service.ListAnnotatedDatasetsRequest], + Awaitable[data_labeling_service.ListAnnotatedDatasetsResponse], + ]: + r"""Return a callable for the list annotated datasets method over gRPC. + + Lists annotated datasets for a dataset. Pagination is + supported. + + Returns: + Callable[[~.ListAnnotatedDatasetsRequest], + Awaitable[~.ListAnnotatedDatasetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_annotated_datasets" not in self._stubs: + self._stubs["list_annotated_datasets"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotatedDatasets", + request_serializer=data_labeling_service.ListAnnotatedDatasetsRequest.serialize, + response_deserializer=data_labeling_service.ListAnnotatedDatasetsResponse.deserialize, + ) + return self._stubs["list_annotated_datasets"] + + @property + def delete_annotated_dataset( + self, + ) -> Callable[ + [data_labeling_service.DeleteAnnotatedDatasetRequest], Awaitable[empty.Empty] + ]: + r"""Return a callable for the delete annotated dataset method over gRPC. + + Deletes an annotated dataset by resource name. + + Returns: + Callable[[~.DeleteAnnotatedDatasetRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_annotated_dataset" not in self._stubs: + self._stubs["delete_annotated_dataset"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotatedDataset", + request_serializer=data_labeling_service.DeleteAnnotatedDatasetRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_annotated_dataset"] + + @property + def label_image( + self, + ) -> Callable[ + [data_labeling_service.LabelImageRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the label image method over gRPC. + + Starts a labeling task for image. The type of image + labeling task is configured by feature in the request. + + Returns: + Callable[[~.LabelImageRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "label_image" not in self._stubs: + self._stubs["label_image"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelImage", + request_serializer=data_labeling_service.LabelImageRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["label_image"] + + @property + def label_video( + self, + ) -> Callable[ + [data_labeling_service.LabelVideoRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the label video method over gRPC. + + Starts a labeling task for video. The type of video + labeling task is configured by feature in the request. + + Returns: + Callable[[~.LabelVideoRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "label_video" not in self._stubs: + self._stubs["label_video"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelVideo", + request_serializer=data_labeling_service.LabelVideoRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["label_video"] + + @property + def label_text( + self, + ) -> Callable[ + [data_labeling_service.LabelTextRequest], Awaitable[operations.Operation] + ]: + r"""Return a callable for the label text method over gRPC. + + Starts a labeling task for text. The type of text + labeling task is configured by feature in the request. + + Returns: + Callable[[~.LabelTextRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "label_text" not in self._stubs: + self._stubs["label_text"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelText", + request_serializer=data_labeling_service.LabelTextRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["label_text"] + + @property + def get_example( + self, + ) -> Callable[ + [data_labeling_service.GetExampleRequest], Awaitable[dataset.Example] + ]: + r"""Return a callable for the get example method over gRPC. + + Gets an example by resource name, including both data + and annotation. + + Returns: + Callable[[~.GetExampleRequest], + Awaitable[~.Example]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_example" not in self._stubs: + self._stubs["get_example"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetExample", + request_serializer=data_labeling_service.GetExampleRequest.serialize, + response_deserializer=dataset.Example.deserialize, + ) + return self._stubs["get_example"] + + @property + def list_examples( + self, + ) -> Callable[ + [data_labeling_service.ListExamplesRequest], + Awaitable[data_labeling_service.ListExamplesResponse], + ]: + r"""Return a callable for the list examples method over gRPC. + + Lists examples in an annotated dataset. Pagination is + supported. + + Returns: + Callable[[~.ListExamplesRequest], + Awaitable[~.ListExamplesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_examples" not in self._stubs: + self._stubs["list_examples"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListExamples", + request_serializer=data_labeling_service.ListExamplesRequest.serialize, + response_deserializer=data_labeling_service.ListExamplesResponse.deserialize, + ) + return self._stubs["list_examples"] + + @property + def create_annotation_spec_set( + self, + ) -> Callable[ + [data_labeling_service.CreateAnnotationSpecSetRequest], + Awaitable[gcd_annotation_spec_set.AnnotationSpecSet], + ]: + r"""Return a callable for the create annotation spec set method over gRPC. + + Creates an annotation spec set by providing a set of + labels. + + Returns: + Callable[[~.CreateAnnotationSpecSetRequest], + Awaitable[~.AnnotationSpecSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_annotation_spec_set" not in self._stubs: + self._stubs["create_annotation_spec_set"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateAnnotationSpecSet", + request_serializer=data_labeling_service.CreateAnnotationSpecSetRequest.serialize, + response_deserializer=gcd_annotation_spec_set.AnnotationSpecSet.deserialize, + ) + return self._stubs["create_annotation_spec_set"] + + @property + def get_annotation_spec_set( + self, + ) -> Callable[ + [data_labeling_service.GetAnnotationSpecSetRequest], + Awaitable[annotation_spec_set.AnnotationSpecSet], + ]: + r"""Return a callable for the get annotation spec set method over gRPC. + + Gets an annotation spec set by resource name. + + Returns: + Callable[[~.GetAnnotationSpecSetRequest], + Awaitable[~.AnnotationSpecSet]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_annotation_spec_set" not in self._stubs: + self._stubs["get_annotation_spec_set"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotationSpecSet", + request_serializer=data_labeling_service.GetAnnotationSpecSetRequest.serialize, + response_deserializer=annotation_spec_set.AnnotationSpecSet.deserialize, + ) + return self._stubs["get_annotation_spec_set"] + + @property + def list_annotation_spec_sets( + self, + ) -> Callable[ + [data_labeling_service.ListAnnotationSpecSetsRequest], + Awaitable[data_labeling_service.ListAnnotationSpecSetsResponse], + ]: + r"""Return a callable for the list annotation spec sets method over gRPC. + + Lists annotation spec sets for a project. Pagination + is supported. + + Returns: + Callable[[~.ListAnnotationSpecSetsRequest], + Awaitable[~.ListAnnotationSpecSetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_annotation_spec_sets" not in self._stubs: + self._stubs["list_annotation_spec_sets"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotationSpecSets", + request_serializer=data_labeling_service.ListAnnotationSpecSetsRequest.serialize, + response_deserializer=data_labeling_service.ListAnnotationSpecSetsResponse.deserialize, + ) + return self._stubs["list_annotation_spec_sets"] + + @property + def delete_annotation_spec_set( + self, + ) -> Callable[ + [data_labeling_service.DeleteAnnotationSpecSetRequest], Awaitable[empty.Empty] + ]: + r"""Return a callable for the delete annotation spec set method over gRPC. + + Deletes an annotation spec set by resource name. + + Returns: + Callable[[~.DeleteAnnotationSpecSetRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_annotation_spec_set" not in self._stubs: + self._stubs["delete_annotation_spec_set"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotationSpecSet", + request_serializer=data_labeling_service.DeleteAnnotationSpecSetRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_annotation_spec_set"] + + @property + def create_instruction( + self, + ) -> Callable[ + [data_labeling_service.CreateInstructionRequest], + Awaitable[operations.Operation], + ]: + r"""Return a callable for the create instruction method over gRPC. + + Creates an instruction for how data should be + labeled. + + Returns: + Callable[[~.CreateInstructionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_instruction" not in self._stubs: + self._stubs["create_instruction"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateInstruction", + request_serializer=data_labeling_service.CreateInstructionRequest.serialize, + response_deserializer=operations.Operation.FromString, + ) + return self._stubs["create_instruction"] + + @property + def get_instruction( + self, + ) -> Callable[ + [data_labeling_service.GetInstructionRequest], + Awaitable[instruction.Instruction], + ]: + r"""Return a callable for the get instruction method over gRPC. + + Gets an instruction by resource name. + + Returns: + Callable[[~.GetInstructionRequest], + Awaitable[~.Instruction]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_instruction" not in self._stubs: + self._stubs["get_instruction"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetInstruction", + request_serializer=data_labeling_service.GetInstructionRequest.serialize, + response_deserializer=instruction.Instruction.deserialize, + ) + return self._stubs["get_instruction"] + + @property + def list_instructions( + self, + ) -> Callable[ + [data_labeling_service.ListInstructionsRequest], + Awaitable[data_labeling_service.ListInstructionsResponse], + ]: + r"""Return a callable for the list instructions method over gRPC. + + Lists instructions for a project. Pagination is + supported. + + Returns: + Callable[[~.ListInstructionsRequest], + Awaitable[~.ListInstructionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_instructions" not in self._stubs: + self._stubs["list_instructions"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListInstructions", + request_serializer=data_labeling_service.ListInstructionsRequest.serialize, + response_deserializer=data_labeling_service.ListInstructionsResponse.deserialize, + ) + return self._stubs["list_instructions"] + + @property + def delete_instruction( + self, + ) -> Callable[ + [data_labeling_service.DeleteInstructionRequest], Awaitable[empty.Empty] + ]: + r"""Return a callable for the delete instruction method over gRPC. + + Deletes an instruction object by resource name. + + Returns: + Callable[[~.DeleteInstructionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_instruction" not in self._stubs: + self._stubs["delete_instruction"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteInstruction", + request_serializer=data_labeling_service.DeleteInstructionRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_instruction"] + + @property + def get_evaluation( + self, + ) -> Callable[ + [data_labeling_service.GetEvaluationRequest], Awaitable[evaluation.Evaluation] + ]: + r"""Return a callable for the get evaluation method over gRPC. + + Gets an evaluation by resource name (to search, use + [projects.evaluations.search][google.cloud.datalabeling.v1beta1.DataLabelingService.SearchEvaluations]). + + Returns: + Callable[[~.GetEvaluationRequest], + Awaitable[~.Evaluation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_evaluation" not in self._stubs: + self._stubs["get_evaluation"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetEvaluation", + request_serializer=data_labeling_service.GetEvaluationRequest.serialize, + response_deserializer=evaluation.Evaluation.deserialize, + ) + return self._stubs["get_evaluation"] + + @property + def search_evaluations( + self, + ) -> Callable[ + [data_labeling_service.SearchEvaluationsRequest], + Awaitable[data_labeling_service.SearchEvaluationsResponse], + ]: + r"""Return a callable for the search evaluations method over gRPC. + + Searches + [evaluations][google.cloud.datalabeling.v1beta1.Evaluation] + within a project. + + Returns: + Callable[[~.SearchEvaluationsRequest], + Awaitable[~.SearchEvaluationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_evaluations" not in self._stubs: + self._stubs["search_evaluations"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/SearchEvaluations", + request_serializer=data_labeling_service.SearchEvaluationsRequest.serialize, + response_deserializer=data_labeling_service.SearchEvaluationsResponse.deserialize, + ) + return self._stubs["search_evaluations"] + + @property + def search_example_comparisons( + self, + ) -> Callable[ + [data_labeling_service.SearchExampleComparisonsRequest], + Awaitable[data_labeling_service.SearchExampleComparisonsResponse], + ]: + r"""Return a callable for the search example comparisons method over gRPC. + + Searches example comparisons from an evaluation. The + return format is a list of example comparisons that show + ground truth and prediction(s) for a single input. + Search by providing an evaluation ID. + + Returns: + Callable[[~.SearchExampleComparisonsRequest], + Awaitable[~.SearchExampleComparisonsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_example_comparisons" not in self._stubs: + self._stubs["search_example_comparisons"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/SearchExampleComparisons", + request_serializer=data_labeling_service.SearchExampleComparisonsRequest.serialize, + response_deserializer=data_labeling_service.SearchExampleComparisonsResponse.deserialize, + ) + return self._stubs["search_example_comparisons"] + + @property + def create_evaluation_job( + self, + ) -> Callable[ + [data_labeling_service.CreateEvaluationJobRequest], + Awaitable[evaluation_job.EvaluationJob], + ]: + r"""Return a callable for the create evaluation job method over gRPC. + + Creates an evaluation job. + + Returns: + Callable[[~.CreateEvaluationJobRequest], + Awaitable[~.EvaluationJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_evaluation_job" not in self._stubs: + self._stubs["create_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateEvaluationJob", + request_serializer=data_labeling_service.CreateEvaluationJobRequest.serialize, + response_deserializer=evaluation_job.EvaluationJob.deserialize, + ) + return self._stubs["create_evaluation_job"] + + @property + def update_evaluation_job( + self, + ) -> Callable[ + [data_labeling_service.UpdateEvaluationJobRequest], + Awaitable[gcd_evaluation_job.EvaluationJob], + ]: + r"""Return a callable for the update evaluation job method over gRPC. + + Updates an evaluation job. You can only update certain fields of + the job's + [EvaluationJobConfig][google.cloud.datalabeling.v1beta1.EvaluationJobConfig]: + ``humanAnnotationConfig.instruction``, ``exampleCount``, and + ``exampleSamplePercentage``. + + If you want to change any other aspect of the evaluation job, + you must delete the job and create a new one. + + Returns: + Callable[[~.UpdateEvaluationJobRequest], + Awaitable[~.EvaluationJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_evaluation_job" not in self._stubs: + self._stubs["update_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/UpdateEvaluationJob", + request_serializer=data_labeling_service.UpdateEvaluationJobRequest.serialize, + response_deserializer=gcd_evaluation_job.EvaluationJob.deserialize, + ) + return self._stubs["update_evaluation_job"] + + @property + def get_evaluation_job( + self, + ) -> Callable[ + [data_labeling_service.GetEvaluationJobRequest], + Awaitable[evaluation_job.EvaluationJob], + ]: + r"""Return a callable for the get evaluation job method over gRPC. + + Gets an evaluation job by resource name. + + Returns: + Callable[[~.GetEvaluationJobRequest], + Awaitable[~.EvaluationJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_evaluation_job" not in self._stubs: + self._stubs["get_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetEvaluationJob", + request_serializer=data_labeling_service.GetEvaluationJobRequest.serialize, + response_deserializer=evaluation_job.EvaluationJob.deserialize, + ) + return self._stubs["get_evaluation_job"] + + @property + def pause_evaluation_job( + self, + ) -> Callable[ + [data_labeling_service.PauseEvaluationJobRequest], Awaitable[empty.Empty] + ]: + r"""Return a callable for the pause evaluation job method over gRPC. + + Pauses an evaluation job. Pausing an evaluation job that is + already in a ``PAUSED`` state is a no-op. + + Returns: + Callable[[~.PauseEvaluationJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pause_evaluation_job" not in self._stubs: + self._stubs["pause_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/PauseEvaluationJob", + request_serializer=data_labeling_service.PauseEvaluationJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["pause_evaluation_job"] + + @property + def resume_evaluation_job( + self, + ) -> Callable[ + [data_labeling_service.ResumeEvaluationJobRequest], Awaitable[empty.Empty] + ]: + r"""Return a callable for the resume evaluation job method over gRPC. + + Resumes a paused evaluation job. A deleted evaluation + job can't be resumed. Resuming a running or scheduled + evaluation job is a no-op. + + Returns: + Callable[[~.ResumeEvaluationJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "resume_evaluation_job" not in self._stubs: + self._stubs["resume_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ResumeEvaluationJob", + request_serializer=data_labeling_service.ResumeEvaluationJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["resume_evaluation_job"] + + @property + def delete_evaluation_job( + self, + ) -> Callable[ + [data_labeling_service.DeleteEvaluationJobRequest], Awaitable[empty.Empty] + ]: + r"""Return a callable for the delete evaluation job method over gRPC. + + Stops and deletes an evaluation job. + + Returns: + Callable[[~.DeleteEvaluationJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_evaluation_job" not in self._stubs: + self._stubs["delete_evaluation_job"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteEvaluationJob", + request_serializer=data_labeling_service.DeleteEvaluationJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_evaluation_job"] + + @property + def list_evaluation_jobs( + self, + ) -> Callable[ + [data_labeling_service.ListEvaluationJobsRequest], + Awaitable[data_labeling_service.ListEvaluationJobsResponse], + ]: + r"""Return a callable for the list evaluation jobs method over gRPC. + + Lists all evaluation jobs within a project with + possible filters. Pagination is supported. + + Returns: + Callable[[~.ListEvaluationJobsRequest], + Awaitable[~.ListEvaluationJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_evaluation_jobs" not in self._stubs: + self._stubs["list_evaluation_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListEvaluationJobs", + request_serializer=data_labeling_service.ListEvaluationJobsRequest.serialize, + response_deserializer=data_labeling_service.ListEvaluationJobsResponse.deserialize, + ) + return self._stubs["list_evaluation_jobs"] + + +__all__ = ("DataLabelingServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/datalabeling_v1beta1/types.py b/google/cloud/datalabeling_v1beta1/types.py deleted file mode 100644 index 6d9d313..0000000 --- a/google/cloud/datalabeling_v1beta1/types.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.datalabeling_v1beta1.proto import annotation_pb2 -from google.cloud.datalabeling_v1beta1.proto import annotation_spec_set_pb2 -from google.cloud.datalabeling_v1beta1.proto import data_labeling_service_pb2 -from google.cloud.datalabeling_v1beta1.proto import data_payloads_pb2 -from google.cloud.datalabeling_v1beta1.proto import dataset_pb2 -from google.cloud.datalabeling_v1beta1.proto import evaluation_job_pb2 -from google.cloud.datalabeling_v1beta1.proto import evaluation_pb2 -from google.cloud.datalabeling_v1beta1.proto import human_annotation_config_pb2 -from google.cloud.datalabeling_v1beta1.proto import instruction_pb2 -from google.cloud.datalabeling_v1beta1.proto import ( - operations_pb2 as proto_operations_pb2, -) -from google.longrunning import operations_pb2 as longrunning_operations_pb2 -from google.protobuf import any_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 -from google.rpc import status_pb2 - - -_shared_modules = [ - longrunning_operations_pb2, - any_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, -] - -_local_modules = [ - annotation_pb2, - annotation_spec_set_pb2, - data_labeling_service_pb2, - data_payloads_pb2, - dataset_pb2, - evaluation_job_pb2, - evaluation_pb2, - human_annotation_config_pb2, - instruction_pb2, - proto_operations_pb2, -] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.datalabeling_v1beta1.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/google/cloud/datalabeling_v1beta1/types/__init__.py b/google/cloud/datalabeling_v1beta1/types/__init__.py new file mode 100644 index 0000000..83828e1 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/types/__init__.py @@ -0,0 +1,301 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .annotation_spec_set import ( + AnnotationSpecSet, + AnnotationSpec, +) +from .annotation import ( + Annotation, + AnnotationValue, + ImageClassificationAnnotation, + Vertex, + NormalizedVertex, + BoundingPoly, + NormalizedBoundingPoly, + ImageBoundingPolyAnnotation, + Polyline, + NormalizedPolyline, + ImagePolylineAnnotation, + ImageSegmentationAnnotation, + TextClassificationAnnotation, + TextEntityExtractionAnnotation, + SequentialSegment, + TimeSegment, + VideoClassificationAnnotation, + ObjectTrackingFrame, + VideoObjectTrackingAnnotation, + VideoEventAnnotation, + AnnotationMetadata, + OperatorMetadata, +) +from .data_payloads import ( + ImagePayload, + TextPayload, + VideoThumbnail, + VideoPayload, +) +from .human_annotation_config import ( + HumanAnnotationConfig, + ImageClassificationConfig, + BoundingPolyConfig, + PolylineConfig, + SegmentationConfig, + VideoClassificationConfig, + ObjectDetectionConfig, + ObjectTrackingConfig, + EventConfig, + TextClassificationConfig, + SentimentConfig, + TextEntityExtractionConfig, +) +from .dataset import ( + Dataset, + InputConfig, + TextMetadata, + ClassificationMetadata, + GcsSource, + BigQuerySource, + OutputConfig, + GcsDestination, + GcsFolderDestination, + DataItem, + AnnotatedDataset, + LabelStats, + AnnotatedDatasetMetadata, + Example, +) +from .evaluation import ( + Evaluation, + EvaluationConfig, + BoundingBoxEvaluationOptions, + EvaluationMetrics, + ClassificationMetrics, + ObjectDetectionMetrics, + PrCurve, + ConfusionMatrix, +) +from .evaluation_job import ( + EvaluationJob, + EvaluationJobConfig, + EvaluationJobAlertConfig, + Attempt, +) +from .instruction import ( + Instruction, + CsvInstruction, + PdfInstruction, +) +from .data_labeling_service import ( + CreateDatasetRequest, + GetDatasetRequest, + ListDatasetsRequest, + ListDatasetsResponse, + DeleteDatasetRequest, + ImportDataRequest, + ExportDataRequest, + GetDataItemRequest, + ListDataItemsRequest, + ListDataItemsResponse, + GetAnnotatedDatasetRequest, + ListAnnotatedDatasetsRequest, + ListAnnotatedDatasetsResponse, + DeleteAnnotatedDatasetRequest, + LabelImageRequest, + LabelVideoRequest, + LabelTextRequest, + GetExampleRequest, + ListExamplesRequest, + ListExamplesResponse, + CreateAnnotationSpecSetRequest, + GetAnnotationSpecSetRequest, + ListAnnotationSpecSetsRequest, + ListAnnotationSpecSetsResponse, + DeleteAnnotationSpecSetRequest, + CreateInstructionRequest, + GetInstructionRequest, + DeleteInstructionRequest, + ListInstructionsRequest, + ListInstructionsResponse, + GetEvaluationRequest, + SearchEvaluationsRequest, + SearchEvaluationsResponse, + SearchExampleComparisonsRequest, + SearchExampleComparisonsResponse, + CreateEvaluationJobRequest, + UpdateEvaluationJobRequest, + GetEvaluationJobRequest, + PauseEvaluationJobRequest, + ResumeEvaluationJobRequest, + DeleteEvaluationJobRequest, + ListEvaluationJobsRequest, + ListEvaluationJobsResponse, +) +from .operations import ( + ImportDataOperationResponse, + ExportDataOperationResponse, + ImportDataOperationMetadata, + ExportDataOperationMetadata, + LabelOperationMetadata, + LabelImageClassificationOperationMetadata, + LabelImageBoundingBoxOperationMetadata, + LabelImageOrientedBoundingBoxOperationMetadata, + LabelImageBoundingPolyOperationMetadata, + LabelImagePolylineOperationMetadata, + LabelImageSegmentationOperationMetadata, + LabelVideoClassificationOperationMetadata, + LabelVideoObjectDetectionOperationMetadata, + LabelVideoObjectTrackingOperationMetadata, + LabelVideoEventOperationMetadata, + LabelTextClassificationOperationMetadata, + LabelTextEntityExtractionOperationMetadata, + CreateInstructionMetadata, +) + + +__all__ = ( + "AnnotationSpecSet", + "AnnotationSpec", + "Annotation", + "AnnotationValue", + "ImageClassificationAnnotation", + "Vertex", + "NormalizedVertex", + "BoundingPoly", + "NormalizedBoundingPoly", + "ImageBoundingPolyAnnotation", + "Polyline", + "NormalizedPolyline", + "ImagePolylineAnnotation", + "ImageSegmentationAnnotation", + "TextClassificationAnnotation", + "TextEntityExtractionAnnotation", + "SequentialSegment", + "TimeSegment", + "VideoClassificationAnnotation", + "ObjectTrackingFrame", + "VideoObjectTrackingAnnotation", + "VideoEventAnnotation", + "AnnotationMetadata", + "OperatorMetadata", + "ImagePayload", + "TextPayload", + "VideoThumbnail", + "VideoPayload", + "HumanAnnotationConfig", + "ImageClassificationConfig", + "BoundingPolyConfig", + "PolylineConfig", + "SegmentationConfig", + "VideoClassificationConfig", + "ObjectDetectionConfig", + "ObjectTrackingConfig", + "EventConfig", + "TextClassificationConfig", + "SentimentConfig", + "TextEntityExtractionConfig", + "Dataset", + "InputConfig", + "TextMetadata", + "ClassificationMetadata", + "GcsSource", + "BigQuerySource", + "OutputConfig", + "GcsDestination", + "GcsFolderDestination", + "DataItem", + "AnnotatedDataset", + "LabelStats", + "AnnotatedDatasetMetadata", + "Example", + "Evaluation", + "EvaluationConfig", + "BoundingBoxEvaluationOptions", + "EvaluationMetrics", + "ClassificationMetrics", + "ObjectDetectionMetrics", + "PrCurve", + "ConfusionMatrix", + "EvaluationJob", + "EvaluationJobConfig", + "EvaluationJobAlertConfig", + "Attempt", + "Instruction", + "CsvInstruction", + "PdfInstruction", + "CreateDatasetRequest", + "GetDatasetRequest", + "ListDatasetsRequest", + "ListDatasetsResponse", + "DeleteDatasetRequest", + "ImportDataRequest", + "ExportDataRequest", + "GetDataItemRequest", + "ListDataItemsRequest", + "ListDataItemsResponse", + "GetAnnotatedDatasetRequest", + "ListAnnotatedDatasetsRequest", + "ListAnnotatedDatasetsResponse", + "DeleteAnnotatedDatasetRequest", + "LabelImageRequest", + "LabelVideoRequest", + "LabelTextRequest", + "GetExampleRequest", + "ListExamplesRequest", + "ListExamplesResponse", + "CreateAnnotationSpecSetRequest", + "GetAnnotationSpecSetRequest", + "ListAnnotationSpecSetsRequest", + "ListAnnotationSpecSetsResponse", + "DeleteAnnotationSpecSetRequest", + "CreateInstructionRequest", + "GetInstructionRequest", + "DeleteInstructionRequest", + "ListInstructionsRequest", + "ListInstructionsResponse", + "GetEvaluationRequest", + "SearchEvaluationsRequest", + "SearchEvaluationsResponse", + "SearchExampleComparisonsRequest", + "SearchExampleComparisonsResponse", + "CreateEvaluationJobRequest", + "UpdateEvaluationJobRequest", + "GetEvaluationJobRequest", + "PauseEvaluationJobRequest", + "ResumeEvaluationJobRequest", + "DeleteEvaluationJobRequest", + "ListEvaluationJobsRequest", + "ListEvaluationJobsResponse", + "ImportDataOperationResponse", + "ExportDataOperationResponse", + "ImportDataOperationMetadata", + "ExportDataOperationMetadata", + "LabelOperationMetadata", + "LabelImageClassificationOperationMetadata", + "LabelImageBoundingBoxOperationMetadata", + "LabelImageOrientedBoundingBoxOperationMetadata", + "LabelImageBoundingPolyOperationMetadata", + "LabelImagePolylineOperationMetadata", + "LabelImageSegmentationOperationMetadata", + "LabelVideoClassificationOperationMetadata", + "LabelVideoObjectDetectionOperationMetadata", + "LabelVideoObjectTrackingOperationMetadata", + "LabelVideoEventOperationMetadata", + "LabelTextClassificationOperationMetadata", + "LabelTextEntityExtractionOperationMetadata", + "CreateInstructionMetadata", +) diff --git a/google/cloud/datalabeling_v1beta1/types/annotation.py b/google/cloud/datalabeling_v1beta1/types/annotation.py new file mode 100644 index 0000000..6e75d18 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/types/annotation.py @@ -0,0 +1,588 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datalabeling_v1beta1.types import annotation_spec_set +from google.protobuf import duration_pb2 as duration # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datalabeling.v1beta1", + manifest={ + "AnnotationSource", + "AnnotationSentiment", + "AnnotationType", + "Annotation", + "AnnotationValue", + "ImageClassificationAnnotation", + "Vertex", + "NormalizedVertex", + "BoundingPoly", + "NormalizedBoundingPoly", + "ImageBoundingPolyAnnotation", + "Polyline", + "NormalizedPolyline", + "ImagePolylineAnnotation", + "ImageSegmentationAnnotation", + "TextClassificationAnnotation", + "TextEntityExtractionAnnotation", + "SequentialSegment", + "TimeSegment", + "VideoClassificationAnnotation", + "ObjectTrackingFrame", + "VideoObjectTrackingAnnotation", + "VideoEventAnnotation", + "AnnotationMetadata", + "OperatorMetadata", + }, +) + + +class AnnotationSource(proto.Enum): + r"""Specifies where the annotation comes from (whether it was + provided by a human labeler or a different source). + """ + ANNOTATION_SOURCE_UNSPECIFIED = 0 + OPERATOR = 3 + + +class AnnotationSentiment(proto.Enum): + r"""""" + ANNOTATION_SENTIMENT_UNSPECIFIED = 0 + NEGATIVE = 1 + POSITIVE = 2 + + +class AnnotationType(proto.Enum): + r"""""" + ANNOTATION_TYPE_UNSPECIFIED = 0 + IMAGE_CLASSIFICATION_ANNOTATION = 1 + IMAGE_BOUNDING_BOX_ANNOTATION = 2 + IMAGE_ORIENTED_BOUNDING_BOX_ANNOTATION = 13 + IMAGE_BOUNDING_POLY_ANNOTATION = 10 + IMAGE_POLYLINE_ANNOTATION = 11 + IMAGE_SEGMENTATION_ANNOTATION = 12 + VIDEO_SHOTS_CLASSIFICATION_ANNOTATION = 3 + VIDEO_OBJECT_TRACKING_ANNOTATION = 4 + VIDEO_OBJECT_DETECTION_ANNOTATION = 5 + VIDEO_EVENT_ANNOTATION = 6 + TEXT_CLASSIFICATION_ANNOTATION = 8 + TEXT_ENTITY_EXTRACTION_ANNOTATION = 9 + GENERAL_CLASSIFICATION_ANNOTATION = 14 + + +class Annotation(proto.Message): + r"""Annotation for Example. Each example may have one or more + annotations. For example in image classification problem, each + image might have one or more labels. We call labels binded with + this image an Annotation. + + Attributes: + name (str): + Output only. Unique name of this annotation, format is: + + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/{annotated_dataset}/examples/{example_id}/annotations/{annotation_id} + annotation_source (~.annotation.AnnotationSource): + Output only. The source of the annotation. + annotation_value (~.annotation.AnnotationValue): + Output only. This is the actual annotation + value, e.g classification, bounding box values + are stored here. + annotation_metadata (~.annotation.AnnotationMetadata): + Output only. Annotation metadata, including + information like votes for labels. + annotation_sentiment (~.annotation.AnnotationSentiment): + Output only. Sentiment for this annotation. + """ + + name = proto.Field(proto.STRING, number=1) + + annotation_source = proto.Field(proto.ENUM, number=2, enum="AnnotationSource",) + + annotation_value = proto.Field(proto.MESSAGE, number=3, message="AnnotationValue",) + + annotation_metadata = proto.Field( + proto.MESSAGE, number=4, message="AnnotationMetadata", + ) + + annotation_sentiment = proto.Field( + proto.ENUM, number=6, enum="AnnotationSentiment", + ) + + +class AnnotationValue(proto.Message): + r"""Annotation value for an example. + + Attributes: + image_classification_annotation (~.annotation.ImageClassificationAnnotation): + Annotation value for image classification + case. + image_bounding_poly_annotation (~.annotation.ImageBoundingPolyAnnotation): + Annotation value for image bounding box, + oriented bounding box and polygon cases. + image_polyline_annotation (~.annotation.ImagePolylineAnnotation): + Annotation value for image polyline cases. + Polyline here is different from BoundingPoly. It + is formed by line segments connected to each + other but not closed form(Bounding Poly). The + line segments can cross each other. + image_segmentation_annotation (~.annotation.ImageSegmentationAnnotation): + Annotation value for image segmentation. + text_classification_annotation (~.annotation.TextClassificationAnnotation): + Annotation value for text classification + case. + text_entity_extraction_annotation (~.annotation.TextEntityExtractionAnnotation): + Annotation value for text entity extraction + case. + video_classification_annotation (~.annotation.VideoClassificationAnnotation): + Annotation value for video classification + case. + video_object_tracking_annotation (~.annotation.VideoObjectTrackingAnnotation): + Annotation value for video object detection + and tracking case. + video_event_annotation (~.annotation.VideoEventAnnotation): + Annotation value for video event case. + """ + + image_classification_annotation = proto.Field( + proto.MESSAGE, + number=1, + oneof="value_type", + message="ImageClassificationAnnotation", + ) + + image_bounding_poly_annotation = proto.Field( + proto.MESSAGE, + number=2, + oneof="value_type", + message="ImageBoundingPolyAnnotation", + ) + + image_polyline_annotation = proto.Field( + proto.MESSAGE, number=8, oneof="value_type", message="ImagePolylineAnnotation", + ) + + image_segmentation_annotation = proto.Field( + proto.MESSAGE, + number=9, + oneof="value_type", + message="ImageSegmentationAnnotation", + ) + + text_classification_annotation = proto.Field( + proto.MESSAGE, + number=3, + oneof="value_type", + message="TextClassificationAnnotation", + ) + + text_entity_extraction_annotation = proto.Field( + proto.MESSAGE, + number=10, + oneof="value_type", + message="TextEntityExtractionAnnotation", + ) + + video_classification_annotation = proto.Field( + proto.MESSAGE, + number=4, + oneof="value_type", + message="VideoClassificationAnnotation", + ) + + video_object_tracking_annotation = proto.Field( + proto.MESSAGE, + number=5, + oneof="value_type", + message="VideoObjectTrackingAnnotation", + ) + + video_event_annotation = proto.Field( + proto.MESSAGE, number=6, oneof="value_type", message="VideoEventAnnotation", + ) + + +class ImageClassificationAnnotation(proto.Message): + r"""Image classification annotation definition. + + Attributes: + annotation_spec (~.annotation_spec_set.AnnotationSpec): + Label of image. + """ + + annotation_spec = proto.Field( + proto.MESSAGE, number=1, message=annotation_spec_set.AnnotationSpec, + ) + + +class Vertex(proto.Message): + r"""A vertex represents a 2D point in the image. + NOTE: the vertex coordinates are in the same scale as the + original image. + + Attributes: + x (int): + X coordinate. + y (int): + Y coordinate. + """ + + x = proto.Field(proto.INT32, number=1) + + y = proto.Field(proto.INT32, number=2) + + +class NormalizedVertex(proto.Message): + r"""A vertex represents a 2D point in the image. + NOTE: the normalized vertex coordinates are relative to the + original image and range from 0 to 1. + + Attributes: + x (float): + X coordinate. + y (float): + Y coordinate. + """ + + x = proto.Field(proto.FLOAT, number=1) + + y = proto.Field(proto.FLOAT, number=2) + + +class BoundingPoly(proto.Message): + r"""A bounding polygon in the image. + + Attributes: + vertices (Sequence[~.annotation.Vertex]): + The bounding polygon vertices. + """ + + vertices = proto.RepeatedField(proto.MESSAGE, number=1, message=Vertex,) + + +class NormalizedBoundingPoly(proto.Message): + r"""Normalized bounding polygon. + + Attributes: + normalized_vertices (Sequence[~.annotation.NormalizedVertex]): + The bounding polygon normalized vertices. + """ + + normalized_vertices = proto.RepeatedField( + proto.MESSAGE, number=1, message=NormalizedVertex, + ) + + +class ImageBoundingPolyAnnotation(proto.Message): + r"""Image bounding poly annotation. It represents a polygon + including bounding box in the image. + + Attributes: + bounding_poly (~.annotation.BoundingPoly): + + normalized_bounding_poly (~.annotation.NormalizedBoundingPoly): + + annotation_spec (~.annotation_spec_set.AnnotationSpec): + Label of object in this bounding polygon. + """ + + bounding_poly = proto.Field( + proto.MESSAGE, number=2, oneof="bounded_area", message=BoundingPoly, + ) + + normalized_bounding_poly = proto.Field( + proto.MESSAGE, number=3, oneof="bounded_area", message=NormalizedBoundingPoly, + ) + + annotation_spec = proto.Field( + proto.MESSAGE, number=1, message=annotation_spec_set.AnnotationSpec, + ) + + +class Polyline(proto.Message): + r"""A line with multiple line segments. + + Attributes: + vertices (Sequence[~.annotation.Vertex]): + The polyline vertices. + """ + + vertices = proto.RepeatedField(proto.MESSAGE, number=1, message=Vertex,) + + +class NormalizedPolyline(proto.Message): + r"""Normalized polyline. + + Attributes: + normalized_vertices (Sequence[~.annotation.NormalizedVertex]): + The normalized polyline vertices. + """ + + normalized_vertices = proto.RepeatedField( + proto.MESSAGE, number=1, message=NormalizedVertex, + ) + + +class ImagePolylineAnnotation(proto.Message): + r"""A polyline for the image annotation. + + Attributes: + polyline (~.annotation.Polyline): + + normalized_polyline (~.annotation.NormalizedPolyline): + + annotation_spec (~.annotation_spec_set.AnnotationSpec): + Label of this polyline. + """ + + polyline = proto.Field(proto.MESSAGE, number=2, oneof="poly", message=Polyline,) + + normalized_polyline = proto.Field( + proto.MESSAGE, number=3, oneof="poly", message=NormalizedPolyline, + ) + + annotation_spec = proto.Field( + proto.MESSAGE, number=1, message=annotation_spec_set.AnnotationSpec, + ) + + +class ImageSegmentationAnnotation(proto.Message): + r"""Image segmentation annotation. + + Attributes: + annotation_colors (Sequence[~.annotation.ImageSegmentationAnnotation.AnnotationColorsEntry]): + The mapping between rgb color and annotation + spec. The key is the rgb color represented in + format of rgb(0, 0, 0). The value is the + AnnotationSpec. + mime_type (str): + Image format. + image_bytes (bytes): + A byte string of a full image's color map. + """ + + annotation_colors = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message=annotation_spec_set.AnnotationSpec, + ) + + mime_type = proto.Field(proto.STRING, number=2) + + image_bytes = proto.Field(proto.BYTES, number=3) + + +class TextClassificationAnnotation(proto.Message): + r"""Text classification annotation. + + Attributes: + annotation_spec (~.annotation_spec_set.AnnotationSpec): + Label of the text. + """ + + annotation_spec = proto.Field( + proto.MESSAGE, number=1, message=annotation_spec_set.AnnotationSpec, + ) + + +class TextEntityExtractionAnnotation(proto.Message): + r"""Text entity extraction annotation. + + Attributes: + annotation_spec (~.annotation_spec_set.AnnotationSpec): + Label of the text entities. + sequential_segment (~.annotation.SequentialSegment): + Position of the entity. + """ + + annotation_spec = proto.Field( + proto.MESSAGE, number=1, message=annotation_spec_set.AnnotationSpec, + ) + + sequential_segment = proto.Field( + proto.MESSAGE, number=2, message="SequentialSegment", + ) + + +class SequentialSegment(proto.Message): + r"""Start and end position in a sequence (e.g. text segment). + + Attributes: + start (int): + Start position (inclusive). + end (int): + End position (exclusive). + """ + + start = proto.Field(proto.INT32, number=1) + + end = proto.Field(proto.INT32, number=2) + + +class TimeSegment(proto.Message): + r"""A time period inside of an example that has a time dimension + (e.g. video). + + Attributes: + start_time_offset (~.duration.Duration): + Start of the time segment (inclusive), + represented as the duration since the example + start. + end_time_offset (~.duration.Duration): + End of the time segment (exclusive), + represented as the duration since the example + start. + """ + + start_time_offset = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) + + end_time_offset = proto.Field(proto.MESSAGE, number=2, message=duration.Duration,) + + +class VideoClassificationAnnotation(proto.Message): + r"""Video classification annotation. + + Attributes: + time_segment (~.annotation.TimeSegment): + The time segment of the video to which the + annotation applies. + annotation_spec (~.annotation_spec_set.AnnotationSpec): + Label of the segment specified by time_segment. + """ + + time_segment = proto.Field(proto.MESSAGE, number=1, message=TimeSegment,) + + annotation_spec = proto.Field( + proto.MESSAGE, number=2, message=annotation_spec_set.AnnotationSpec, + ) + + +class ObjectTrackingFrame(proto.Message): + r"""Video frame level annotation for object detection and + tracking. + + Attributes: + bounding_poly (~.annotation.BoundingPoly): + + normalized_bounding_poly (~.annotation.NormalizedBoundingPoly): + + time_offset (~.duration.Duration): + The time offset of this frame relative to the + beginning of the video. + """ + + bounding_poly = proto.Field( + proto.MESSAGE, number=1, oneof="bounded_area", message=BoundingPoly, + ) + + normalized_bounding_poly = proto.Field( + proto.MESSAGE, number=2, oneof="bounded_area", message=NormalizedBoundingPoly, + ) + + time_offset = proto.Field(proto.MESSAGE, number=3, message=duration.Duration,) + + +class VideoObjectTrackingAnnotation(proto.Message): + r"""Video object tracking annotation. + + Attributes: + annotation_spec (~.annotation_spec_set.AnnotationSpec): + Label of the object tracked in this + annotation. + time_segment (~.annotation.TimeSegment): + The time segment of the video to which object + tracking applies. + object_tracking_frames (Sequence[~.annotation.ObjectTrackingFrame]): + The list of frames where this object track + appears. + """ + + annotation_spec = proto.Field( + proto.MESSAGE, number=1, message=annotation_spec_set.AnnotationSpec, + ) + + time_segment = proto.Field(proto.MESSAGE, number=2, message=TimeSegment,) + + object_tracking_frames = proto.RepeatedField( + proto.MESSAGE, number=3, message=ObjectTrackingFrame, + ) + + +class VideoEventAnnotation(proto.Message): + r"""Video event annotation. + + Attributes: + annotation_spec (~.annotation_spec_set.AnnotationSpec): + Label of the event in this annotation. + time_segment (~.annotation.TimeSegment): + The time segment of the video to which the + annotation applies. + """ + + annotation_spec = proto.Field( + proto.MESSAGE, number=1, message=annotation_spec_set.AnnotationSpec, + ) + + time_segment = proto.Field(proto.MESSAGE, number=2, message=TimeSegment,) + + +class AnnotationMetadata(proto.Message): + r"""Additional information associated with the annotation. + + Attributes: + operator_metadata (~.annotation.OperatorMetadata): + Metadata related to human labeling. + """ + + operator_metadata = proto.Field( + proto.MESSAGE, number=2, message="OperatorMetadata", + ) + + +class OperatorMetadata(proto.Message): + r"""General information useful for labels coming from + contributors. + + Attributes: + score (float): + Confidence score corresponding to a label. + For examle, if 3 contributors have answered the + question and 2 of them agree on the final label, + the confidence score will be 0.67 (2/3). + total_votes (int): + The total number of contributors that answer + this question. + label_votes (int): + The total number of contributors that choose + this label. + comments (Sequence[str]): + Comments from contributors. + """ + + score = proto.Field(proto.FLOAT, number=1) + + total_votes = proto.Field(proto.INT32, number=2) + + label_votes = proto.Field(proto.INT32, number=3) + + comments = proto.RepeatedField(proto.STRING, number=4) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datalabeling_v1beta1/types/annotation_spec_set.py b/google/cloud/datalabeling_v1beta1/types/annotation_spec_set.py new file mode 100644 index 0000000..4444155 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/types/annotation_spec_set.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datalabeling.v1beta1", + manifest={"AnnotationSpecSet", "AnnotationSpec",}, +) + + +class AnnotationSpecSet(proto.Message): + r"""An AnnotationSpecSet is a collection of label definitions. + For example, in image classification tasks, you define a set of + possible labels for images as an AnnotationSpecSet. An + AnnotationSpecSet is immutable upon creation. + + Attributes: + name (str): + Output only. The AnnotationSpecSet resource name in the + following format: + + "projects/{project_id}/annotationSpecSets/{annotation_spec_set_id}". + display_name (str): + Required. The display name for + AnnotationSpecSet that you define when you + create it. Maximum of 64 characters. + description (str): + Optional. User-provided description of the + annotation specification set. The description + can be up to 10,000 characters long. + annotation_specs (Sequence[~.annotation_spec_set.AnnotationSpec]): + Required. The array of AnnotationSpecs that + you define when you create the + AnnotationSpecSet. These are the possible labels + for the labeling task. + blocking_resources (Sequence[str]): + Output only. The names of any related + resources that are blocking changes to the + annotation spec set. + """ + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + annotation_specs = proto.RepeatedField( + proto.MESSAGE, number=4, message="AnnotationSpec", + ) + + blocking_resources = proto.RepeatedField(proto.STRING, number=5) + + +class AnnotationSpec(proto.Message): + r"""Container of information related to one possible annotation that can + be used in a labeling task. For example, an image classification + task where images are labeled as ``dog`` or ``cat`` must reference + an AnnotationSpec for ``dog`` and an AnnotationSpec for ``cat``. + + Attributes: + display_name (str): + Required. The display name of the + AnnotationSpec. Maximum of 64 characters. + description (str): + Optional. User-provided description of the + annotation specification. The description can be + up to 10,000 characters long. + """ + + display_name = proto.Field(proto.STRING, number=1) + + description = proto.Field(proto.STRING, number=2) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datalabeling_v1beta1/types/data_labeling_service.py b/google/cloud/datalabeling_v1beta1/types/data_labeling_service.py new file mode 100644 index 0000000..21c02d7 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/types/data_labeling_service.py @@ -0,0 +1,1168 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datalabeling_v1beta1.types import ( + annotation_spec_set as gcd_annotation_spec_set, +) +from google.cloud.datalabeling_v1beta1.types import dataset as gcd_dataset +from google.cloud.datalabeling_v1beta1.types import evaluation +from google.cloud.datalabeling_v1beta1.types import evaluation_job as gcd_evaluation_job +from google.cloud.datalabeling_v1beta1.types import human_annotation_config +from google.cloud.datalabeling_v1beta1.types import instruction as gcd_instruction +from google.protobuf import field_mask_pb2 as field_mask # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datalabeling.v1beta1", + manifest={ + "CreateDatasetRequest", + "GetDatasetRequest", + "ListDatasetsRequest", + "ListDatasetsResponse", + "DeleteDatasetRequest", + "ImportDataRequest", + "ExportDataRequest", + "GetDataItemRequest", + "ListDataItemsRequest", + "ListDataItemsResponse", + "GetAnnotatedDatasetRequest", + "ListAnnotatedDatasetsRequest", + "ListAnnotatedDatasetsResponse", + "DeleteAnnotatedDatasetRequest", + "LabelImageRequest", + "LabelVideoRequest", + "LabelTextRequest", + "GetExampleRequest", + "ListExamplesRequest", + "ListExamplesResponse", + "CreateAnnotationSpecSetRequest", + "GetAnnotationSpecSetRequest", + "ListAnnotationSpecSetsRequest", + "ListAnnotationSpecSetsResponse", + "DeleteAnnotationSpecSetRequest", + "CreateInstructionRequest", + "GetInstructionRequest", + "DeleteInstructionRequest", + "ListInstructionsRequest", + "ListInstructionsResponse", + "GetEvaluationRequest", + "SearchEvaluationsRequest", + "SearchEvaluationsResponse", + "SearchExampleComparisonsRequest", + "SearchExampleComparisonsResponse", + "CreateEvaluationJobRequest", + "UpdateEvaluationJobRequest", + "GetEvaluationJobRequest", + "PauseEvaluationJobRequest", + "ResumeEvaluationJobRequest", + "DeleteEvaluationJobRequest", + "ListEvaluationJobsRequest", + "ListEvaluationJobsResponse", + }, +) + + +class CreateDatasetRequest(proto.Message): + r"""Request message for CreateDataset. + + Attributes: + parent (str): + Required. Dataset resource parent, format: + projects/{project_id} + dataset (~.gcd_dataset.Dataset): + Required. The dataset to be created. + """ + + parent = proto.Field(proto.STRING, number=1) + + dataset = proto.Field(proto.MESSAGE, number=2, message=gcd_dataset.Dataset,) + + +class GetDatasetRequest(proto.Message): + r"""Request message for GetDataSet. + + Attributes: + name (str): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListDatasetsRequest(proto.Message): + r"""Request message for ListDataset. + + Attributes: + parent (str): + Required. Dataset resource parent, format: + projects/{project_id} + filter (str): + Optional. Filter on dataset is not supported + at this moment. + page_size (int): + Optional. Requested page size. Server may + return fewer results than requested. Default + value is 100. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained by + [ListDatasetsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListDatasetsResponse.next_page_token] + of the previous [DataLabelingService.ListDatasets] call. + Returns the first page if empty. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListDatasetsResponse(proto.Message): + r"""Results of listing datasets within a project. + + Attributes: + datasets (Sequence[~.gcd_dataset.Dataset]): + The list of datasets to return. + next_page_token (str): + A token to retrieve next page of results. + """ + + @property + def raw_page(self): + return self + + datasets = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcd_dataset.Dataset, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteDatasetRequest(proto.Message): + r"""Request message for DeleteDataset. + + Attributes: + name (str): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + """ + + name = proto.Field(proto.STRING, number=1) + + +class ImportDataRequest(proto.Message): + r"""Request message for ImportData API. + + Attributes: + name (str): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + input_config (~.gcd_dataset.InputConfig): + Required. Specify the input source of the + data. + user_email_address (str): + Email of the user who started the import task + and should be notified by email. If empty no + notification will be sent. + """ + + name = proto.Field(proto.STRING, number=1) + + input_config = proto.Field( + proto.MESSAGE, number=2, message=gcd_dataset.InputConfig, + ) + + user_email_address = proto.Field(proto.STRING, number=3) + + +class ExportDataRequest(proto.Message): + r"""Request message for ExportData API. + + Attributes: + name (str): + Required. Dataset resource name, format: + projects/{project_id}/datasets/{dataset_id} + annotated_dataset (str): + Required. Annotated dataset resource name. DataItem in + Dataset and their annotations in specified annotated dataset + will be exported. It's in format of + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id} + filter (str): + Optional. Filter is not supported at this + moment. + output_config (~.gcd_dataset.OutputConfig): + Required. Specify the output destination. + user_email_address (str): + Email of the user who started the export task + and should be notified by email. If empty no + notification will be sent. + """ + + name = proto.Field(proto.STRING, number=1) + + annotated_dataset = proto.Field(proto.STRING, number=2) + + filter = proto.Field(proto.STRING, number=3) + + output_config = proto.Field( + proto.MESSAGE, number=4, message=gcd_dataset.OutputConfig, + ) + + user_email_address = proto.Field(proto.STRING, number=5) + + +class GetDataItemRequest(proto.Message): + r"""Request message for GetDataItem. + + Attributes: + name (str): + Required. The name of the data item to get, format: + projects/{project_id}/datasets/{dataset_id}/dataItems/{data_item_id} + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListDataItemsRequest(proto.Message): + r"""Request message for ListDataItems. + + Attributes: + parent (str): + Required. Name of the dataset to list data items, format: + projects/{project_id}/datasets/{dataset_id} + filter (str): + Optional. Filter is not supported at this + moment. + page_size (int): + Optional. Requested page size. Server may + return fewer results than requested. Default + value is 100. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained by + [ListDataItemsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListDataItemsResponse.next_page_token] + of the previous [DataLabelingService.ListDataItems] call. + Return first page if empty. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListDataItemsResponse(proto.Message): + r"""Results of listing data items in a dataset. + + Attributes: + data_items (Sequence[~.gcd_dataset.DataItem]): + The list of data items to return. + next_page_token (str): + A token to retrieve next page of results. + """ + + @property + def raw_page(self): + return self + + data_items = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcd_dataset.DataItem, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetAnnotatedDatasetRequest(proto.Message): + r"""Request message for GetAnnotatedDataset. + + Attributes: + name (str): + Required. Name of the annotated dataset to get, format: + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id} + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListAnnotatedDatasetsRequest(proto.Message): + r"""Request message for ListAnnotatedDatasets. + + Attributes: + parent (str): + Required. Name of the dataset to list annotated datasets, + format: projects/{project_id}/datasets/{dataset_id} + filter (str): + Optional. Filter is not supported at this + moment. + page_size (int): + Optional. Requested page size. Server may + return fewer results than requested. Default + value is 100. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained by + [ListAnnotatedDatasetsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsResponse.next_page_token] + of the previous [DataLabelingService.ListAnnotatedDatasets] + call. Return first page if empty. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListAnnotatedDatasetsResponse(proto.Message): + r"""Results of listing annotated datasets for a dataset. + + Attributes: + annotated_datasets (Sequence[~.gcd_dataset.AnnotatedDataset]): + The list of annotated datasets to return. + next_page_token (str): + A token to retrieve next page of results. + """ + + @property + def raw_page(self): + return self + + annotated_datasets = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcd_dataset.AnnotatedDataset, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteAnnotatedDatasetRequest(proto.Message): + r"""Request message for DeleteAnnotatedDataset. + + Attributes: + name (str): + Required. Name of the annotated dataset to delete, format: + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id} + """ + + name = proto.Field(proto.STRING, number=1) + + +class LabelImageRequest(proto.Message): + r"""Request message for starting an image labeling task. + + Attributes: + image_classification_config (~.human_annotation_config.ImageClassificationConfig): + Configuration for image classification task. One of + image_classification_config, bounding_poly_config, + polyline_config and segmentation_config are required. + bounding_poly_config (~.human_annotation_config.BoundingPolyConfig): + Configuration for bounding box and bounding poly task. One + of image_classification_config, bounding_poly_config, + polyline_config and segmentation_config are required. + polyline_config (~.human_annotation_config.PolylineConfig): + Configuration for polyline task. One of + image_classification_config, bounding_poly_config, + polyline_config and segmentation_config are required. + segmentation_config (~.human_annotation_config.SegmentationConfig): + Configuration for segmentation task. One of + image_classification_config, bounding_poly_config, + polyline_config and segmentation_config are required. + parent (str): + Required. Name of the dataset to request labeling task, + format: projects/{project_id}/datasets/{dataset_id} + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Required. Basic human annotation config. + feature (~.data_labeling_service.LabelImageRequest.Feature): + Required. The type of image labeling task. + """ + + class Feature(proto.Enum): + r"""Image labeling task feature.""" + FEATURE_UNSPECIFIED = 0 + CLASSIFICATION = 1 + BOUNDING_BOX = 2 + ORIENTED_BOUNDING_BOX = 6 + BOUNDING_POLY = 3 + POLYLINE = 4 + SEGMENTATION = 5 + + image_classification_config = proto.Field( + proto.MESSAGE, + number=4, + oneof="request_config", + message=human_annotation_config.ImageClassificationConfig, + ) + + bounding_poly_config = proto.Field( + proto.MESSAGE, + number=5, + oneof="request_config", + message=human_annotation_config.BoundingPolyConfig, + ) + + polyline_config = proto.Field( + proto.MESSAGE, + number=6, + oneof="request_config", + message=human_annotation_config.PolylineConfig, + ) + + segmentation_config = proto.Field( + proto.MESSAGE, + number=7, + oneof="request_config", + message=human_annotation_config.SegmentationConfig, + ) + + parent = proto.Field(proto.STRING, number=1) + + basic_config = proto.Field( + proto.MESSAGE, number=2, message=human_annotation_config.HumanAnnotationConfig, + ) + + feature = proto.Field(proto.ENUM, number=3, enum=Feature,) + + +class LabelVideoRequest(proto.Message): + r"""Request message for LabelVideo. + + Attributes: + video_classification_config (~.human_annotation_config.VideoClassificationConfig): + Configuration for video classification task. One of + video_classification_config, object_detection_config, + object_tracking_config and event_config is required. + object_detection_config (~.human_annotation_config.ObjectDetectionConfig): + Configuration for video object detection task. One of + video_classification_config, object_detection_config, + object_tracking_config and event_config is required. + object_tracking_config (~.human_annotation_config.ObjectTrackingConfig): + Configuration for video object tracking task. One of + video_classification_config, object_detection_config, + object_tracking_config and event_config is required. + event_config (~.human_annotation_config.EventConfig): + Configuration for video event task. One of + video_classification_config, object_detection_config, + object_tracking_config and event_config is required. + parent (str): + Required. Name of the dataset to request labeling task, + format: projects/{project_id}/datasets/{dataset_id} + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Required. Basic human annotation config. + feature (~.data_labeling_service.LabelVideoRequest.Feature): + Required. The type of video labeling task. + """ + + class Feature(proto.Enum): + r"""Video labeling task feature.""" + FEATURE_UNSPECIFIED = 0 + CLASSIFICATION = 1 + OBJECT_DETECTION = 2 + OBJECT_TRACKING = 3 + EVENT = 4 + + video_classification_config = proto.Field( + proto.MESSAGE, + number=4, + oneof="request_config", + message=human_annotation_config.VideoClassificationConfig, + ) + + object_detection_config = proto.Field( + proto.MESSAGE, + number=5, + oneof="request_config", + message=human_annotation_config.ObjectDetectionConfig, + ) + + object_tracking_config = proto.Field( + proto.MESSAGE, + number=6, + oneof="request_config", + message=human_annotation_config.ObjectTrackingConfig, + ) + + event_config = proto.Field( + proto.MESSAGE, + number=7, + oneof="request_config", + message=human_annotation_config.EventConfig, + ) + + parent = proto.Field(proto.STRING, number=1) + + basic_config = proto.Field( + proto.MESSAGE, number=2, message=human_annotation_config.HumanAnnotationConfig, + ) + + feature = proto.Field(proto.ENUM, number=3, enum=Feature,) + + +class LabelTextRequest(proto.Message): + r"""Request message for LabelText. + + Attributes: + text_classification_config (~.human_annotation_config.TextClassificationConfig): + Configuration for text classification task. One of + text_classification_config and text_entity_extraction_config + is required. + text_entity_extraction_config (~.human_annotation_config.TextEntityExtractionConfig): + Configuration for entity extraction task. One of + text_classification_config and text_entity_extraction_config + is required. + parent (str): + Required. Name of the data set to request labeling task, + format: projects/{project_id}/datasets/{dataset_id} + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Required. Basic human annotation config. + feature (~.data_labeling_service.LabelTextRequest.Feature): + Required. The type of text labeling task. + """ + + class Feature(proto.Enum): + r"""Text labeling task feature.""" + FEATURE_UNSPECIFIED = 0 + TEXT_CLASSIFICATION = 1 + TEXT_ENTITY_EXTRACTION = 2 + + text_classification_config = proto.Field( + proto.MESSAGE, + number=4, + oneof="request_config", + message=human_annotation_config.TextClassificationConfig, + ) + + text_entity_extraction_config = proto.Field( + proto.MESSAGE, + number=5, + oneof="request_config", + message=human_annotation_config.TextEntityExtractionConfig, + ) + + parent = proto.Field(proto.STRING, number=1) + + basic_config = proto.Field( + proto.MESSAGE, number=2, message=human_annotation_config.HumanAnnotationConfig, + ) + + feature = proto.Field(proto.ENUM, number=6, enum=Feature,) + + +class GetExampleRequest(proto.Message): + r"""Request message for GetExample + + Attributes: + name (str): + Required. Name of example, format: + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id}/examples/{example_id} + filter (str): + Optional. An expression for filtering Examples. Filter by + annotation_spec.display_name is supported. Format + "annotation_spec.display_name = {display_name}". + """ + + name = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + +class ListExamplesRequest(proto.Message): + r"""Request message for ListExamples. + + Attributes: + parent (str): + Required. Example resource parent. + filter (str): + Optional. An expression for filtering Examples. For + annotated datasets that have annotation spec set, filter by + annotation_spec.display_name is supported. Format + "annotation_spec.display_name = {display_name}". + page_size (int): + Optional. Requested page size. Server may + return fewer results than requested. Default + value is 100. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained by + [ListExamplesResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListExamplesResponse.next_page_token] + of the previous [DataLabelingService.ListExamples] call. + Return first page if empty. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListExamplesResponse(proto.Message): + r"""Results of listing Examples in and annotated dataset. + + Attributes: + examples (Sequence[~.gcd_dataset.Example]): + The list of examples to return. + next_page_token (str): + A token to retrieve next page of results. + """ + + @property + def raw_page(self): + return self + + examples = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcd_dataset.Example, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class CreateAnnotationSpecSetRequest(proto.Message): + r"""Request message for CreateAnnotationSpecSet. + + Attributes: + parent (str): + Required. AnnotationSpecSet resource parent, format: + projects/{project_id} + annotation_spec_set (~.gcd_annotation_spec_set.AnnotationSpecSet): + Required. Annotation spec set to create. Annotation specs + must be included. Only one annotation spec will be accepted + for annotation specs with same display_name. + """ + + parent = proto.Field(proto.STRING, number=1) + + annotation_spec_set = proto.Field( + proto.MESSAGE, number=2, message=gcd_annotation_spec_set.AnnotationSpecSet, + ) + + +class GetAnnotationSpecSetRequest(proto.Message): + r"""Request message for GetAnnotationSpecSet. + + Attributes: + name (str): + Required. AnnotationSpecSet resource name, format: + projects/{project_id}/annotationSpecSets/{annotation_spec_set_id} + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListAnnotationSpecSetsRequest(proto.Message): + r"""Request message for ListAnnotationSpecSets. + + Attributes: + parent (str): + Required. Parent of AnnotationSpecSet resource, format: + projects/{project_id} + filter (str): + Optional. Filter is not supported at this + moment. + page_size (int): + Optional. Requested page size. Server may + return fewer results than requested. Default + value is 100. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained by + [ListAnnotationSpecSetsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsResponse.next_page_token] + of the previous [DataLabelingService.ListAnnotationSpecSets] + call. Return first page if empty. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListAnnotationSpecSetsResponse(proto.Message): + r"""Results of listing annotation spec set under a project. + + Attributes: + annotation_spec_sets (Sequence[~.gcd_annotation_spec_set.AnnotationSpecSet]): + The list of annotation spec sets. + next_page_token (str): + A token to retrieve next page of results. + """ + + @property + def raw_page(self): + return self + + annotation_spec_sets = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcd_annotation_spec_set.AnnotationSpecSet, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteAnnotationSpecSetRequest(proto.Message): + r"""Request message for DeleteAnnotationSpecSet. + + Attributes: + name (str): + Required. AnnotationSpec resource name, format: + ``projects/{project_id}/annotationSpecSets/{annotation_spec_set_id}``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateInstructionRequest(proto.Message): + r"""Request message for CreateInstruction. + + Attributes: + parent (str): + Required. Instruction resource parent, format: + projects/{project_id} + instruction (~.gcd_instruction.Instruction): + Required. Instruction of how to perform the + labeling task. + """ + + parent = proto.Field(proto.STRING, number=1) + + instruction = proto.Field( + proto.MESSAGE, number=2, message=gcd_instruction.Instruction, + ) + + +class GetInstructionRequest(proto.Message): + r"""Request message for GetInstruction. + + Attributes: + name (str): + Required. Instruction resource name, format: + projects/{project_id}/instructions/{instruction_id} + """ + + name = proto.Field(proto.STRING, number=1) + + +class DeleteInstructionRequest(proto.Message): + r"""Request message for DeleteInstruction. + + Attributes: + name (str): + Required. Instruction resource name, format: + projects/{project_id}/instructions/{instruction_id} + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListInstructionsRequest(proto.Message): + r"""Request message for ListInstructions. + + Attributes: + parent (str): + Required. Instruction resource parent, format: + projects/{project_id} + filter (str): + Optional. Filter is not supported at this + moment. + page_size (int): + Optional. Requested page size. Server may + return fewer results than requested. Default + value is 100. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained by + [ListInstructionsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListInstructionsResponse.next_page_token] + of the previous [DataLabelingService.ListInstructions] call. + Return first page if empty. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListInstructionsResponse(proto.Message): + r"""Results of listing instructions under a project. + + Attributes: + instructions (Sequence[~.gcd_instruction.Instruction]): + The list of Instructions to return. + next_page_token (str): + A token to retrieve next page of results. + """ + + @property + def raw_page(self): + return self + + instructions = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcd_instruction.Instruction, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetEvaluationRequest(proto.Message): + r"""Request message for GetEvaluation. + + Attributes: + name (str): + Required. Name of the evaluation. Format: + + "projects/{project_id}/datasets/{dataset_id}/evaluations/{evaluation_id}' + """ + + name = proto.Field(proto.STRING, number=1) + + +class SearchEvaluationsRequest(proto.Message): + r"""Request message for SearchEvaluation. + + Attributes: + parent (str): + Required. Evaluation search parent (project ID). Format: + "projects/{project_id}". + filter (str): + Optional. To search evaluations, you can filter by the + following: + + - evaluation\_job.evaluation_job_id (the last part of + [EvaluationJob.name][google.cloud.datalabeling.v1beta1.EvaluationJob.name]) + - evaluation\_job.model_id (the {model_name} portion of + [EvaluationJob.modelVersion][google.cloud.datalabeling.v1beta1.EvaluationJob.model_version]) + - evaluation\_job.evaluation_job_run_time_start (Minimum + threshold for the + [evaluationJobRunTime][google.cloud.datalabeling.v1beta1.Evaluation.evaluation_job_run_time] + that created the evaluation) + - evaluation\_job.evaluation_job_run_time_end (Maximum + threshold for the + [evaluationJobRunTime][google.cloud.datalabeling.v1beta1.Evaluation.evaluation_job_run_time] + that created the evaluation) + - evaluation\_job.job_state + ([EvaluationJob.state][google.cloud.datalabeling.v1beta1.EvaluationJob.state]) + - annotation\_spec.display_name (the Evaluation contains a + metric for the annotation spec with this + [displayName][google.cloud.datalabeling.v1beta1.AnnotationSpec.display_name]) + + To filter by multiple critiera, use the ``AND`` operator or + the ``OR`` operator. The following examples shows a string + that filters by several critiera: + + "evaluation\ *job.evaluation_job_id = {evaluation_job_id} + AND evaluation*\ job.model_id = {model_name} AND + evaluation\ *job.evaluation_job_run_time_start = + {timestamp_1} AND + evaluation*\ job.evaluation_job_run_time_end = {timestamp_2} + AND annotation\_spec.display_name = {display_name}". + page_size (int): + Optional. Requested page size. Server may + return fewer results than requested. Default + value is 100. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained by the + [nextPageToken][google.cloud.datalabeling.v1beta1.SearchEvaluationsResponse.next_page_token] + of the response to a previous search request. + + If you don't specify this field, the API call requests the + first page of the search. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class SearchEvaluationsResponse(proto.Message): + r"""Results of searching evaluations. + + Attributes: + evaluations (Sequence[~.evaluation.Evaluation]): + The list of evaluations matching the search. + next_page_token (str): + A token to retrieve next page of results. + """ + + @property + def raw_page(self): + return self + + evaluations = proto.RepeatedField( + proto.MESSAGE, number=1, message=evaluation.Evaluation, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class SearchExampleComparisonsRequest(proto.Message): + r"""Request message of SearchExampleComparisons. + + Attributes: + parent (str): + Required. Name of the + [Evaluation][google.cloud.datalabeling.v1beta1.Evaluation] + resource to search for example comparisons from. Format: + + "projects/{project_id}/datasets/{dataset_id}/evaluations/{evaluation_id}". + page_size (int): + Optional. Requested page size. Server may + return fewer results than requested. Default + value is 100. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained by the + [nextPageToken][SearchExampleComparisons.next_page_token] of + the response to a previous search rquest. + + If you don't specify this field, the API call requests the + first page of the search. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class SearchExampleComparisonsResponse(proto.Message): + r"""Results of searching example comparisons. + + Attributes: + example_comparisons (Sequence[~.data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison]): + A list of example comparisons matching the + search criteria. + next_page_token (str): + A token to retrieve next page of results. + """ + + class ExampleComparison(proto.Message): + r"""Example comparisons comparing ground truth output and + predictions for a specific input. + + Attributes: + ground_truth_example (~.gcd_dataset.Example): + The ground truth output for the input. + model_created_examples (Sequence[~.gcd_dataset.Example]): + Predictions by the model for the input. + """ + + ground_truth_example = proto.Field( + proto.MESSAGE, number=1, message=gcd_dataset.Example, + ) + + model_created_examples = proto.RepeatedField( + proto.MESSAGE, number=2, message=gcd_dataset.Example, + ) + + @property + def raw_page(self): + return self + + example_comparisons = proto.RepeatedField( + proto.MESSAGE, number=1, message=ExampleComparison, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class CreateEvaluationJobRequest(proto.Message): + r"""Request message for CreateEvaluationJob. + + Attributes: + parent (str): + Required. Evaluation job resource parent. Format: + "projects/{project_id}". + job (~.gcd_evaluation_job.EvaluationJob): + Required. The evaluation job to create. + """ + + parent = proto.Field(proto.STRING, number=1) + + job = proto.Field( + proto.MESSAGE, number=2, message=gcd_evaluation_job.EvaluationJob, + ) + + +class UpdateEvaluationJobRequest(proto.Message): + r"""Request message for UpdateEvaluationJob. + + Attributes: + evaluation_job (~.gcd_evaluation_job.EvaluationJob): + Required. Evaluation job that is going to be + updated. + update_mask (~.field_mask.FieldMask): + Optional. Mask for which fields to update. You can only + provide the following fields: + + - ``evaluationJobConfig.humanAnnotationConfig.instruction`` + - ``evaluationJobConfig.exampleCount`` + - ``evaluationJobConfig.exampleSamplePercentage`` + + You can provide more than one of these fields by separating + them with commas. + """ + + evaluation_job = proto.Field( + proto.MESSAGE, number=1, message=gcd_evaluation_job.EvaluationJob, + ) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class GetEvaluationJobRequest(proto.Message): + r"""Request message for GetEvaluationJob. + + Attributes: + name (str): + Required. Name of the evaluation job. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + """ + + name = proto.Field(proto.STRING, number=1) + + +class PauseEvaluationJobRequest(proto.Message): + r"""Request message for PauseEvaluationJob. + + Attributes: + name (str): + Required. Name of the evaluation job that is going to be + paused. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + """ + + name = proto.Field(proto.STRING, number=1) + + +class ResumeEvaluationJobRequest(proto.Message): + r"""Request message ResumeEvaluationJob. + + Attributes: + name (str): + Required. Name of the evaluation job that is going to be + resumed. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + """ + + name = proto.Field(proto.STRING, number=1) + + +class DeleteEvaluationJobRequest(proto.Message): + r"""Request message DeleteEvaluationJob. + + Attributes: + name (str): + Required. Name of the evaluation job that is going to be + deleted. Format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListEvaluationJobsRequest(proto.Message): + r"""Request message for ListEvaluationJobs. + + Attributes: + parent (str): + Required. Evaluation job resource parent. Format: + "projects/{project_id}". + filter (str): + Optional. You can filter the jobs to list by model_id (also + known as model_name, as described in + [EvaluationJob.modelVersion][google.cloud.datalabeling.v1beta1.EvaluationJob.model_version]) + or by evaluation job state (as described in + [EvaluationJob.state][google.cloud.datalabeling.v1beta1.EvaluationJob.state]). + To filter by both criteria, use the ``AND`` operator or the + ``OR`` operator. For example, you can use the following + string for your filter: "evaluation\ *job.model_id = + {model_name} AND evaluation*\ job.state = + {evaluation_job_state}". + page_size (int): + Optional. Requested page size. Server may + return fewer results than requested. Default + value is 100. + page_token (str): + Optional. A token identifying a page of results for the + server to return. Typically obtained by the + [nextPageToken][google.cloud.datalabeling.v1beta1.ListEvaluationJobsResponse.next_page_token] + in the response to the previous request. The request returns + the first page if this is empty. + """ + + parent = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListEvaluationJobsResponse(proto.Message): + r"""Results for listing evaluation jobs. + + Attributes: + evaluation_jobs (Sequence[~.gcd_evaluation_job.EvaluationJob]): + The list of evaluation jobs to return. + next_page_token (str): + A token to retrieve next page of results. + """ + + @property + def raw_page(self): + return self + + evaluation_jobs = proto.RepeatedField( + proto.MESSAGE, number=1, message=gcd_evaluation_job.EvaluationJob, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datalabeling_v1beta1/types/data_payloads.py b/google/cloud/datalabeling_v1beta1/types/data_payloads.py new file mode 100644 index 0000000..6117229 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/types/data_payloads.py @@ -0,0 +1,112 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import duration_pb2 as duration # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datalabeling.v1beta1", + manifest={"ImagePayload", "TextPayload", "VideoThumbnail", "VideoPayload",}, +) + + +class ImagePayload(proto.Message): + r"""Container of information about an image. + + Attributes: + mime_type (str): + Image format. + image_thumbnail (bytes): + A byte string of a thumbnail image. + image_uri (str): + Image uri from the user bucket. + signed_uri (str): + Signed uri of the image file in the service + bucket. + """ + + mime_type = proto.Field(proto.STRING, number=1) + + image_thumbnail = proto.Field(proto.BYTES, number=2) + + image_uri = proto.Field(proto.STRING, number=3) + + signed_uri = proto.Field(proto.STRING, number=4) + + +class TextPayload(proto.Message): + r"""Container of information about a piece of text. + + Attributes: + text_content (str): + Text content. + """ + + text_content = proto.Field(proto.STRING, number=1) + + +class VideoThumbnail(proto.Message): + r"""Container of information of a video thumbnail. + + Attributes: + thumbnail (bytes): + A byte string of the video frame. + time_offset (~.duration.Duration): + Time offset relative to the beginning of the + video, corresponding to the video frame where + the thumbnail has been extracted from. + """ + + thumbnail = proto.Field(proto.BYTES, number=1) + + time_offset = proto.Field(proto.MESSAGE, number=2, message=duration.Duration,) + + +class VideoPayload(proto.Message): + r"""Container of information of a video. + + Attributes: + mime_type (str): + Video format. + video_uri (str): + Video uri from the user bucket. + video_thumbnails (Sequence[~.data_payloads.VideoThumbnail]): + The list of video thumbnails. + frame_rate (float): + FPS of the video. + signed_uri (str): + Signed uri of the video file in the service + bucket. + """ + + mime_type = proto.Field(proto.STRING, number=1) + + video_uri = proto.Field(proto.STRING, number=2) + + video_thumbnails = proto.RepeatedField( + proto.MESSAGE, number=3, message=VideoThumbnail, + ) + + frame_rate = proto.Field(proto.FLOAT, number=4) + + signed_uri = proto.Field(proto.STRING, number=5) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datalabeling_v1beta1/types/dataset.py b/google/cloud/datalabeling_v1beta1/types/dataset.py new file mode 100644 index 0000000..8771716 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/types/dataset.py @@ -0,0 +1,555 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datalabeling_v1beta1.types import annotation +from google.cloud.datalabeling_v1beta1.types import data_payloads +from google.cloud.datalabeling_v1beta1.types import ( + human_annotation_config as gcd_human_annotation_config, +) +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datalabeling.v1beta1", + manifest={ + "DataType", + "Dataset", + "InputConfig", + "TextMetadata", + "ClassificationMetadata", + "GcsSource", + "BigQuerySource", + "OutputConfig", + "GcsDestination", + "GcsFolderDestination", + "DataItem", + "AnnotatedDataset", + "LabelStats", + "AnnotatedDatasetMetadata", + "Example", + }, +) + + +class DataType(proto.Enum): + r"""""" + DATA_TYPE_UNSPECIFIED = 0 + IMAGE = 1 + VIDEO = 2 + TEXT = 4 + GENERAL_DATA = 6 + + +class Dataset(proto.Message): + r"""Dataset is the resource to hold your data. You can request + multiple labeling tasks for a dataset while each one will + generate an AnnotatedDataset. + + Attributes: + name (str): + Output only. Dataset resource name, format is: + projects/{project_id}/datasets/{dataset_id} + display_name (str): + Required. The display name of the dataset. + Maximum of 64 characters. + description (str): + Optional. User-provided description of the + annotation specification set. The description + can be up to 10000 characters long. + create_time (~.timestamp.Timestamp): + Output only. Time the dataset is created. + input_configs (Sequence[~.dataset.InputConfig]): + Output only. This is populated with the + original input configs where ImportData is + called. It is available only after the clients + import data to this dataset. + blocking_resources (Sequence[str]): + Output only. The names of any related + resources that are blocking changes to the + dataset. + data_item_count (int): + Output only. The number of data items in the + dataset. + """ + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + input_configs = proto.RepeatedField(proto.MESSAGE, number=5, message="InputConfig",) + + blocking_resources = proto.RepeatedField(proto.STRING, number=6) + + data_item_count = proto.Field(proto.INT64, number=7) + + +class InputConfig(proto.Message): + r"""The configuration of input data, including data type, + location, etc. + + Attributes: + text_metadata (~.dataset.TextMetadata): + Required for text import, as language code + must be specified. + gcs_source (~.dataset.GcsSource): + Source located in Cloud Storage. + bigquery_source (~.dataset.BigQuerySource): + Source located in BigQuery. You must specify this field if + you are using this InputConfig in an + [EvaluationJob][google.cloud.datalabeling.v1beta1.EvaluationJob]. + data_type (~.dataset.DataType): + Required. Data type must be specifed when + user tries to import data. + annotation_type (~.annotation.AnnotationType): + Optional. The type of annotation to be performed on this + data. You must specify this field if you are using this + InputConfig in an + [EvaluationJob][google.cloud.datalabeling.v1beta1.EvaluationJob]. + classification_metadata (~.dataset.ClassificationMetadata): + Optional. Metadata about annotations for the input. You must + specify this field if you are using this InputConfig in an + [EvaluationJob][google.cloud.datalabeling.v1beta1.EvaluationJob] + for a model version that performs classification. + """ + + text_metadata = proto.Field( + proto.MESSAGE, number=6, oneof="data_type_metadata", message="TextMetadata", + ) + + gcs_source = proto.Field( + proto.MESSAGE, number=2, oneof="source", message="GcsSource", + ) + + bigquery_source = proto.Field( + proto.MESSAGE, number=5, oneof="source", message="BigQuerySource", + ) + + data_type = proto.Field(proto.ENUM, number=1, enum="DataType",) + + annotation_type = proto.Field(proto.ENUM, number=3, enum=annotation.AnnotationType,) + + classification_metadata = proto.Field( + proto.MESSAGE, number=4, message="ClassificationMetadata", + ) + + +class TextMetadata(proto.Message): + r"""Metadata for the text. + + Attributes: + language_code (str): + The language of this text, as a + `BCP-47 `__. + Default value is en-US. + """ + + language_code = proto.Field(proto.STRING, number=1) + + +class ClassificationMetadata(proto.Message): + r"""Metadata for classification annotations. + + Attributes: + is_multi_label (bool): + Whether the classification task is multi- + abel or not. + """ + + is_multi_label = proto.Field(proto.BOOL, number=1) + + +class GcsSource(proto.Message): + r"""Source of the Cloud Storage file to be imported. + + Attributes: + input_uri (str): + Required. The input URI of source file. This must be a Cloud + Storage path (``gs://...``). + mime_type (str): + Required. The format of the source file. Only + "text/csv" is supported. + """ + + input_uri = proto.Field(proto.STRING, number=1) + + mime_type = proto.Field(proto.STRING, number=2) + + +class BigQuerySource(proto.Message): + r"""The BigQuery location for input data. If used in an + [EvaluationJob][google.cloud.datalabeling.v1beta1.EvaluationJob], + this is where the service saves the prediction input and output + sampled from the model version. + + Attributes: + input_uri (str): + Required. BigQuery URI to a table, up to 2,000 characters + long. If you specify the URI of a table that does not exist, + Data Labeling Service creates a table at the URI with the + correct schema when you create your + [EvaluationJob][google.cloud.datalabeling.v1beta1.EvaluationJob]. + If you specify the URI of a table that already exists, it + must have the `correct + schema `__. + + Provide the table URI in the following format: + + "bq://{your_project_id}/{your_dataset_name}/{your_table_name}" + + `Learn + more `__. + """ + + input_uri = proto.Field(proto.STRING, number=1) + + +class OutputConfig(proto.Message): + r"""The configuration of output data. + + Attributes: + gcs_destination (~.dataset.GcsDestination): + Output to a file in Cloud Storage. Should be + used for labeling output other than image + segmentation. + gcs_folder_destination (~.dataset.GcsFolderDestination): + Output to a folder in Cloud Storage. Should + be used for image segmentation labeling output. + """ + + gcs_destination = proto.Field( + proto.MESSAGE, number=1, oneof="destination", message="GcsDestination", + ) + + gcs_folder_destination = proto.Field( + proto.MESSAGE, number=2, oneof="destination", message="GcsFolderDestination", + ) + + +class GcsDestination(proto.Message): + r"""Export destination of the data.Only gcs path is allowed in + output_uri. + + Attributes: + output_uri (str): + Required. The output uri of destination file. + mime_type (str): + Required. The format of the gcs destination. + Only "text/csv" and "application/json" + are supported. + """ + + output_uri = proto.Field(proto.STRING, number=1) + + mime_type = proto.Field(proto.STRING, number=2) + + +class GcsFolderDestination(proto.Message): + r"""Export folder destination of the data. + + Attributes: + output_folder_uri (str): + Required. Cloud Storage directory to export + data to. + """ + + output_folder_uri = proto.Field(proto.STRING, number=1) + + +class DataItem(proto.Message): + r"""DataItem is a piece of data, without annotation. For example, + an image. + + Attributes: + image_payload (~.data_payloads.ImagePayload): + The image payload, a container of the image + bytes/uri. + text_payload (~.data_payloads.TextPayload): + The text payload, a container of text + content. + video_payload (~.data_payloads.VideoPayload): + The video payload, a container of the video + uri. + name (str): + Output only. Name of the data item, in format of: + projects/{project_id}/datasets/{dataset_id}/dataItems/{data_item_id} + """ + + image_payload = proto.Field( + proto.MESSAGE, number=2, oneof="payload", message=data_payloads.ImagePayload, + ) + + text_payload = proto.Field( + proto.MESSAGE, number=3, oneof="payload", message=data_payloads.TextPayload, + ) + + video_payload = proto.Field( + proto.MESSAGE, number=4, oneof="payload", message=data_payloads.VideoPayload, + ) + + name = proto.Field(proto.STRING, number=1) + + +class AnnotatedDataset(proto.Message): + r"""AnnotatedDataset is a set holding annotations for data in a + Dataset. Each labeling task will generate an AnnotatedDataset + under the Dataset that the task is requested for. + + Attributes: + name (str): + Output only. AnnotatedDataset resource name in format of: + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id} + display_name (str): + Output only. The display name of the + AnnotatedDataset. It is specified in + HumanAnnotationConfig when user starts a + labeling task. Maximum of 64 characters. + description (str): + Output only. The description of the + AnnotatedDataset. It is specified in + HumanAnnotationConfig when user starts a + labeling task. Maximum of 10000 characters. + annotation_source (~.annotation.AnnotationSource): + Output only. Source of the annotation. + annotation_type (~.annotation.AnnotationType): + Output only. Type of the annotation. It is + specified when starting labeling task. + example_count (int): + Output only. Number of examples in the + annotated dataset. + completed_example_count (int): + Output only. Number of examples that have + annotation in the annotated dataset. + label_stats (~.dataset.LabelStats): + Output only. Per label statistics. + create_time (~.timestamp.Timestamp): + Output only. Time the AnnotatedDataset was + created. + metadata (~.dataset.AnnotatedDatasetMetadata): + Output only. Additional information about + AnnotatedDataset. + blocking_resources (Sequence[str]): + Output only. The names of any related + resources that are blocking changes to the + annotated dataset. + """ + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=9) + + annotation_source = proto.Field( + proto.ENUM, number=3, enum=annotation.AnnotationSource, + ) + + annotation_type = proto.Field(proto.ENUM, number=8, enum=annotation.AnnotationType,) + + example_count = proto.Field(proto.INT64, number=4) + + completed_example_count = proto.Field(proto.INT64, number=5) + + label_stats = proto.Field(proto.MESSAGE, number=6, message="LabelStats",) + + create_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + + metadata = proto.Field( + proto.MESSAGE, number=10, message="AnnotatedDatasetMetadata", + ) + + blocking_resources = proto.RepeatedField(proto.STRING, number=11) + + +class LabelStats(proto.Message): + r"""Statistics about annotation specs. + + Attributes: + example_count (Sequence[~.dataset.LabelStats.ExampleCountEntry]): + Map of each annotation spec's example count. + Key is the annotation spec name and value is the + number of examples for that annotation spec. If + the annotated dataset does not have annotation + spec, the map will return a pair where the key + is empty string and value is the total number of + annotations. + """ + + example_count = proto.MapField(proto.STRING, proto.INT64, number=1) + + +class AnnotatedDatasetMetadata(proto.Message): + r"""Metadata on AnnotatedDataset. + + Attributes: + image_classification_config (~.gcd_human_annotation_config.ImageClassificationConfig): + Configuration for image classification task. + bounding_poly_config (~.gcd_human_annotation_config.BoundingPolyConfig): + Configuration for image bounding box and + bounding poly task. + polyline_config (~.gcd_human_annotation_config.PolylineConfig): + Configuration for image polyline task. + segmentation_config (~.gcd_human_annotation_config.SegmentationConfig): + Configuration for image segmentation task. + video_classification_config (~.gcd_human_annotation_config.VideoClassificationConfig): + Configuration for video classification task. + object_detection_config (~.gcd_human_annotation_config.ObjectDetectionConfig): + Configuration for video object detection + task. + object_tracking_config (~.gcd_human_annotation_config.ObjectTrackingConfig): + Configuration for video object tracking task. + event_config (~.gcd_human_annotation_config.EventConfig): + Configuration for video event labeling task. + text_classification_config (~.gcd_human_annotation_config.TextClassificationConfig): + Configuration for text classification task. + text_entity_extraction_config (~.gcd_human_annotation_config.TextEntityExtractionConfig): + Configuration for text entity extraction + task. + human_annotation_config (~.gcd_human_annotation_config.HumanAnnotationConfig): + HumanAnnotationConfig used when requesting + the human labeling task for this + AnnotatedDataset. + """ + + image_classification_config = proto.Field( + proto.MESSAGE, + number=2, + oneof="annotation_request_config", + message=gcd_human_annotation_config.ImageClassificationConfig, + ) + + bounding_poly_config = proto.Field( + proto.MESSAGE, + number=3, + oneof="annotation_request_config", + message=gcd_human_annotation_config.BoundingPolyConfig, + ) + + polyline_config = proto.Field( + proto.MESSAGE, + number=4, + oneof="annotation_request_config", + message=gcd_human_annotation_config.PolylineConfig, + ) + + segmentation_config = proto.Field( + proto.MESSAGE, + number=5, + oneof="annotation_request_config", + message=gcd_human_annotation_config.SegmentationConfig, + ) + + video_classification_config = proto.Field( + proto.MESSAGE, + number=6, + oneof="annotation_request_config", + message=gcd_human_annotation_config.VideoClassificationConfig, + ) + + object_detection_config = proto.Field( + proto.MESSAGE, + number=7, + oneof="annotation_request_config", + message=gcd_human_annotation_config.ObjectDetectionConfig, + ) + + object_tracking_config = proto.Field( + proto.MESSAGE, + number=8, + oneof="annotation_request_config", + message=gcd_human_annotation_config.ObjectTrackingConfig, + ) + + event_config = proto.Field( + proto.MESSAGE, + number=9, + oneof="annotation_request_config", + message=gcd_human_annotation_config.EventConfig, + ) + + text_classification_config = proto.Field( + proto.MESSAGE, + number=10, + oneof="annotation_request_config", + message=gcd_human_annotation_config.TextClassificationConfig, + ) + + text_entity_extraction_config = proto.Field( + proto.MESSAGE, + number=11, + oneof="annotation_request_config", + message=gcd_human_annotation_config.TextEntityExtractionConfig, + ) + + human_annotation_config = proto.Field( + proto.MESSAGE, + number=1, + message=gcd_human_annotation_config.HumanAnnotationConfig, + ) + + +class Example(proto.Message): + r"""An Example is a piece of data and its annotation. For + example, an image with label "house". + + Attributes: + image_payload (~.data_payloads.ImagePayload): + The image payload, a container of the image + bytes/uri. + text_payload (~.data_payloads.TextPayload): + The text payload, a container of the text + content. + video_payload (~.data_payloads.VideoPayload): + The video payload, a container of the video + uri. + name (str): + Output only. Name of the example, in format of: + projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + {annotated_dataset_id}/examples/{example_id} + annotations (Sequence[~.annotation.Annotation]): + Output only. Annotations for the piece of + data in Example. One piece of data can have + multiple annotations. + """ + + image_payload = proto.Field( + proto.MESSAGE, number=2, oneof="payload", message=data_payloads.ImagePayload, + ) + + text_payload = proto.Field( + proto.MESSAGE, number=6, oneof="payload", message=data_payloads.TextPayload, + ) + + video_payload = proto.Field( + proto.MESSAGE, number=7, oneof="payload", message=data_payloads.VideoPayload, + ) + + name = proto.Field(proto.STRING, number=1) + + annotations = proto.RepeatedField( + proto.MESSAGE, number=5, message=annotation.Annotation, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datalabeling_v1beta1/types/evaluation.py b/google/cloud/datalabeling_v1beta1/types/evaluation.py new file mode 100644 index 0000000..b8f902c --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/types/evaluation.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datalabeling_v1beta1.types import annotation +from google.cloud.datalabeling_v1beta1.types import annotation_spec_set +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datalabeling.v1beta1", + manifest={ + "Evaluation", + "EvaluationConfig", + "BoundingBoxEvaluationOptions", + "EvaluationMetrics", + "ClassificationMetrics", + "ObjectDetectionMetrics", + "PrCurve", + "ConfusionMatrix", + }, +) + + +class Evaluation(proto.Message): + r"""Describes an evaluation between a machine learning model's + predictions and ground truth labels. Created when an + [EvaluationJob][google.cloud.datalabeling.v1beta1.EvaluationJob] + runs successfully. + + Attributes: + name (str): + Output only. Resource name of an evaluation. The name has + the following format: + + "projects/{project_id}/datasets/{dataset_id}/evaluations/{evaluation_id}' + config (~.evaluation.EvaluationConfig): + Output only. Options used in the evaluation + job that created this evaluation. + evaluation_job_run_time (~.timestamp.Timestamp): + Output only. Timestamp for when the + evaluation job that created this evaluation ran. + create_time (~.timestamp.Timestamp): + Output only. Timestamp for when this + evaluation was created. + evaluation_metrics (~.evaluation.EvaluationMetrics): + Output only. Metrics comparing predictions to + ground truth labels. + annotation_type (~.annotation.AnnotationType): + Output only. Type of task that the model version being + evaluated performs, as defined in the + + [evaluationJobConfig.inputConfig.annotationType][google.cloud.datalabeling.v1beta1.EvaluationJobConfig.input_config] + field of the evaluation job that created this evaluation. + evaluated_item_count (int): + Output only. The number of items in the + ground truth dataset that were used for this + evaluation. Only populated when the evaulation + is for certain AnnotationTypes. + """ + + name = proto.Field(proto.STRING, number=1) + + config = proto.Field(proto.MESSAGE, number=2, message="EvaluationConfig",) + + evaluation_job_run_time = proto.Field( + proto.MESSAGE, number=3, message=timestamp.Timestamp, + ) + + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + evaluation_metrics = proto.Field( + proto.MESSAGE, number=5, message="EvaluationMetrics", + ) + + annotation_type = proto.Field(proto.ENUM, number=6, enum=annotation.AnnotationType,) + + evaluated_item_count = proto.Field(proto.INT64, number=7) + + +class EvaluationConfig(proto.Message): + r"""Configuration details used for calculating evaluation metrics and + creating an + [Evaluation][google.cloud.datalabeling.v1beta1.Evaluation]. + + Attributes: + bounding_box_evaluation_options (~.evaluation.BoundingBoxEvaluationOptions): + Only specify this field if the related model performs image + object detection (``IMAGE_BOUNDING_BOX_ANNOTATION``). + Describes how to evaluate bounding boxes. + """ + + bounding_box_evaluation_options = proto.Field( + proto.MESSAGE, + number=1, + oneof="vertical_option", + message="BoundingBoxEvaluationOptions", + ) + + +class BoundingBoxEvaluationOptions(proto.Message): + r"""Options regarding evaluation between bounding boxes. + + Attributes: + iou_threshold (float): + Minimum [intersection-over-union + + (IOU)](/vision/automl/object-detection/docs/evaluate#intersection-over-union) + required for 2 bounding boxes to be considered a match. This + must be a number between 0 and 1. + """ + + iou_threshold = proto.Field(proto.FLOAT, number=1) + + +class EvaluationMetrics(proto.Message): + r""" + + Attributes: + classification_metrics (~.evaluation.ClassificationMetrics): + + object_detection_metrics (~.evaluation.ObjectDetectionMetrics): + + """ + + classification_metrics = proto.Field( + proto.MESSAGE, number=1, oneof="metrics", message="ClassificationMetrics", + ) + + object_detection_metrics = proto.Field( + proto.MESSAGE, number=2, oneof="metrics", message="ObjectDetectionMetrics", + ) + + +class ClassificationMetrics(proto.Message): + r"""Metrics calculated for a classification model. + + Attributes: + pr_curve (~.evaluation.PrCurve): + Precision-recall curve based on ground truth + labels, predicted labels, and scores for the + predicted labels. + confusion_matrix (~.evaluation.ConfusionMatrix): + Confusion matrix of predicted labels vs. + ground truth labels. + """ + + pr_curve = proto.Field(proto.MESSAGE, number=1, message="PrCurve",) + + confusion_matrix = proto.Field(proto.MESSAGE, number=2, message="ConfusionMatrix",) + + +class ObjectDetectionMetrics(proto.Message): + r"""Metrics calculated for an image object detection (bounding + box) model. + + Attributes: + pr_curve (~.evaluation.PrCurve): + Precision-recall curve. + """ + + pr_curve = proto.Field(proto.MESSAGE, number=1, message="PrCurve",) + + +class PrCurve(proto.Message): + r""" + + Attributes: + annotation_spec (~.annotation_spec_set.AnnotationSpec): + The annotation spec of the label for which + the precision-recall curve calculated. If this + field is empty, that means the precision-recall + curve is an aggregate curve for all labels. + area_under_curve (float): + Area under the precision-recall curve. Not to + be confused with area under a receiver operating + characteristic (ROC) curve. + confidence_metrics_entries (Sequence[~.evaluation.PrCurve.ConfidenceMetricsEntry]): + Entries that make up the precision-recall graph. Each entry + is a "point" on the graph drawn for a different + ``confidence_threshold``. + mean_average_precision (float): + Mean average prcision of this curve. + """ + + class ConfidenceMetricsEntry(proto.Message): + r""" + + Attributes: + confidence_threshold (float): + Threshold used for this entry. + + For classification tasks, this is a classification + threshold: a predicted label is categorized as positive or + negative (in the context of this point on the PR curve) + based on whether the label's score meets this threshold. + + For image object detection (bounding box) tasks, this is the + [intersection-over-union + + (IOU)](/vision/automl/object-detection/docs/evaluate#intersection-over-union) + threshold for the context of this point on the PR curve. + recall (float): + Recall value. + precision (float): + Precision value. + f1_score (float): + Harmonic mean of recall and precision. + recall_at1 (float): + Recall value for entries with label that has + highest score. + precision_at1 (float): + Precision value for entries with label that + has highest score. + f1_score_at1 (float): + The harmonic mean of + [recall_at1][google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.recall_at1] + and + [precision_at1][google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.precision_at1]. + recall_at5 (float): + Recall value for entries with label that has + highest 5 scores. + precision_at5 (float): + Precision value for entries with label that + has highest 5 scores. + f1_score_at5 (float): + The harmonic mean of + [recall_at5][google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.recall_at5] + and + [precision_at5][google.cloud.datalabeling.v1beta1.PrCurve.ConfidenceMetricsEntry.precision_at5]. + """ + + confidence_threshold = proto.Field(proto.FLOAT, number=1) + + recall = proto.Field(proto.FLOAT, number=2) + + precision = proto.Field(proto.FLOAT, number=3) + + f1_score = proto.Field(proto.FLOAT, number=4) + + recall_at1 = proto.Field(proto.FLOAT, number=5) + + precision_at1 = proto.Field(proto.FLOAT, number=6) + + f1_score_at1 = proto.Field(proto.FLOAT, number=7) + + recall_at5 = proto.Field(proto.FLOAT, number=8) + + precision_at5 = proto.Field(proto.FLOAT, number=9) + + f1_score_at5 = proto.Field(proto.FLOAT, number=10) + + annotation_spec = proto.Field( + proto.MESSAGE, number=1, message=annotation_spec_set.AnnotationSpec, + ) + + area_under_curve = proto.Field(proto.FLOAT, number=2) + + confidence_metrics_entries = proto.RepeatedField( + proto.MESSAGE, number=3, message=ConfidenceMetricsEntry, + ) + + mean_average_precision = proto.Field(proto.FLOAT, number=4) + + +class ConfusionMatrix(proto.Message): + r"""Confusion matrix of the model running the classification. + Only applicable when the metrics entry aggregates multiple + labels. Not applicable when the entry is for a single label. + + Attributes: + row (Sequence[~.evaluation.ConfusionMatrix.Row]): + + """ + + class ConfusionMatrixEntry(proto.Message): + r""" + + Attributes: + annotation_spec (~.annotation_spec_set.AnnotationSpec): + The annotation spec of a predicted label. + item_count (int): + Number of items predicted to have this label. (The ground + truth label for these items is the ``Row.annotationSpec`` of + this entry's parent.) + """ + + annotation_spec = proto.Field( + proto.MESSAGE, number=1, message=annotation_spec_set.AnnotationSpec, + ) + + item_count = proto.Field(proto.INT32, number=2) + + class Row(proto.Message): + r"""A row in the confusion matrix. Each entry in this row has the + same ground truth label. + + Attributes: + annotation_spec (~.annotation_spec_set.AnnotationSpec): + The annotation spec of the ground truth label + for this row. + entries (Sequence[~.evaluation.ConfusionMatrix.ConfusionMatrixEntry]): + A list of the confusion matrix entries. One + entry for each possible predicted label. + """ + + annotation_spec = proto.Field( + proto.MESSAGE, number=1, message=annotation_spec_set.AnnotationSpec, + ) + + entries = proto.RepeatedField( + proto.MESSAGE, number=2, message="ConfusionMatrix.ConfusionMatrixEntry", + ) + + row = proto.RepeatedField(proto.MESSAGE, number=1, message=Row,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datalabeling_v1beta1/types/evaluation_job.py b/google/cloud/datalabeling_v1beta1/types/evaluation_job.py new file mode 100644 index 0000000..e7dbe16 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/types/evaluation_job.py @@ -0,0 +1,339 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datalabeling_v1beta1.types import dataset +from google.cloud.datalabeling_v1beta1.types import evaluation +from google.cloud.datalabeling_v1beta1.types import ( + human_annotation_config as gcd_human_annotation_config, +) +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datalabeling.v1beta1", + manifest={ + "EvaluationJob", + "EvaluationJobConfig", + "EvaluationJobAlertConfig", + "Attempt", + }, +) + + +class EvaluationJob(proto.Message): + r"""Defines an evaluation job that runs periodically to generate + [Evaluations][google.cloud.datalabeling.v1beta1.Evaluation]. + `Creating an evaluation + job `__ is the + starting point for using continuous evaluation. + + Attributes: + name (str): + Output only. After you create a job, Data Labeling Service + assigns a name to the job with the following format: + + "projects/{project_id}/evaluationJobs/{evaluation_job_id}". + description (str): + Required. Description of the job. The + description can be up to 25,000 characters long. + state (~.evaluation_job.EvaluationJob.State): + Output only. Describes the current state of + the job. + schedule (str): + Required. Describes the interval at which the job runs. This + interval must be at least 1 day, and it is rounded to the + nearest day. For example, if you specify a 50-hour interval, + the job runs every 2 days. + + You can provide the schedule in `crontab + format `__ + or in an `English-like + format `__. + + Regardless of what you specify, the job will run at 10:00 AM + UTC. Only the interval from this schedule is used, not the + specific time of day. + model_version (str): + Required. The `AI Platform Prediction model + version `__ to be + evaluated. Prediction input and output is sampled from this + model version. When creating an evaluation job, specify the + model version in the following format: + + "projects/{project_id}/models/{model_name}/versions/{version_name}" + + There can only be one evaluation job per model version. + evaluation_job_config (~.evaluation_job.EvaluationJobConfig): + Required. Configuration details for the + evaluation job. + annotation_spec_set (str): + Required. Name of the + [AnnotationSpecSet][google.cloud.datalabeling.v1beta1.AnnotationSpecSet] + describing all the labels that your machine learning model + outputs. You must create this resource before you create an + evaluation job and provide its name in the following format: + + "projects/{project_id}/annotationSpecSets/{annotation_spec_set_id}". + label_missing_ground_truth (bool): + Required. Whether you want Data Labeling Service to provide + ground truth labels for prediction input. If you want the + service to assign human labelers to annotate your data, set + this to ``true``. If you want to provide your own ground + truth labels in the evaluation job's BigQuery table, set + this to ``false``. + attempts (Sequence[~.evaluation_job.Attempt]): + Output only. Every time the evaluation job + runs and an error occurs, the failed attempt is + appended to this array. + create_time (~.timestamp.Timestamp): + Output only. Timestamp of when this + evaluation job was created. + """ + + class State(proto.Enum): + r"""State of the job.""" + STATE_UNSPECIFIED = 0 + SCHEDULED = 1 + RUNNING = 2 + PAUSED = 3 + STOPPED = 4 + + name = proto.Field(proto.STRING, number=1) + + description = proto.Field(proto.STRING, number=2) + + state = proto.Field(proto.ENUM, number=3, enum=State,) + + schedule = proto.Field(proto.STRING, number=4) + + model_version = proto.Field(proto.STRING, number=5) + + evaluation_job_config = proto.Field( + proto.MESSAGE, number=6, message="EvaluationJobConfig", + ) + + annotation_spec_set = proto.Field(proto.STRING, number=7) + + label_missing_ground_truth = proto.Field(proto.BOOL, number=8) + + attempts = proto.RepeatedField(proto.MESSAGE, number=9, message="Attempt",) + + create_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) + + +class EvaluationJobConfig(proto.Message): + r"""Configures specific details of how a continuous evaluation + job works. Provide this configuration when you create an + EvaluationJob. + + Attributes: + image_classification_config (~.gcd_human_annotation_config.ImageClassificationConfig): + Specify this field if your model version performs image + classification or general classification. + + ``annotationSpecSet`` in this configuration must match + [EvaluationJob.annotationSpecSet][google.cloud.datalabeling.v1beta1.EvaluationJob.annotation_spec_set]. + ``allowMultiLabel`` in this configuration must match + ``classificationMetadata.isMultiLabel`` in + [input_config][google.cloud.datalabeling.v1beta1.EvaluationJobConfig.input_config]. + bounding_poly_config (~.gcd_human_annotation_config.BoundingPolyConfig): + Specify this field if your model version performs image + object detection (bounding box detection). + + ``annotationSpecSet`` in this configuration must match + [EvaluationJob.annotationSpecSet][google.cloud.datalabeling.v1beta1.EvaluationJob.annotation_spec_set]. + text_classification_config (~.gcd_human_annotation_config.TextClassificationConfig): + Specify this field if your model version performs text + classification. + + ``annotationSpecSet`` in this configuration must match + [EvaluationJob.annotationSpecSet][google.cloud.datalabeling.v1beta1.EvaluationJob.annotation_spec_set]. + ``allowMultiLabel`` in this configuration must match + ``classificationMetadata.isMultiLabel`` in + [input_config][google.cloud.datalabeling.v1beta1.EvaluationJobConfig.input_config]. + input_config (~.dataset.InputConfig): + Rquired. Details for the sampled prediction input. Within + this configuration, there are requirements for several + fields: + + - ``dataType`` must be one of ``IMAGE``, ``TEXT``, or + ``GENERAL_DATA``. + - ``annotationType`` must be one of + ``IMAGE_CLASSIFICATION_ANNOTATION``, + ``TEXT_CLASSIFICATION_ANNOTATION``, + ``GENERAL_CLASSIFICATION_ANNOTATION``, or + ``IMAGE_BOUNDING_BOX_ANNOTATION`` (image object + detection). + - If your machine learning model performs classification, + you must specify ``classificationMetadata.isMultiLabel``. + - You must specify ``bigquerySource`` (not ``gcsSource``). + evaluation_config (~.evaluation.EvaluationConfig): + Required. Details for calculating evaluation metrics and + creating + [Evaulations][google.cloud.datalabeling.v1beta1.Evaluation]. + If your model version performs image object detection, you + must specify the ``boundingBoxEvaluationOptions`` field + within this configuration. Otherwise, provide an empty + object for this configuration. + human_annotation_config (~.gcd_human_annotation_config.HumanAnnotationConfig): + Optional. Details for human annotation of your data. If you + set + [labelMissingGroundTruth][google.cloud.datalabeling.v1beta1.EvaluationJob.label_missing_ground_truth] + to ``true`` for this evaluation job, then you must specify + this field. If you plan to provide your own ground truth + labels, then omit this field. + + Note that you must create an + [Instruction][google.cloud.datalabeling.v1beta1.Instruction] + resource before you can specify this field. Provide the name + of the instruction resource in the ``instruction`` field + within this configuration. + bigquery_import_keys (Sequence[~.evaluation_job.EvaluationJobConfig.BigqueryImportKeysEntry]): + Required. Prediction keys that tell Data Labeling Service + where to find the data for evaluation in your BigQuery + table. When the service samples prediction input and output + from your model version and saves it to BigQuery, the data + gets stored as JSON strings in the BigQuery table. These + keys tell Data Labeling Service how to parse the JSON. + + You can provide the following entries in this field: + + - ``data_json_key``: the data key for prediction input. You + must provide either this key or ``reference_json_key``. + - ``reference_json_key``: the data reference key for + prediction input. You must provide either this key or + ``data_json_key``. + - ``label_json_key``: the label key for prediction output. + Required. + - ``label_score_json_key``: the score key for prediction + output. Required. + - ``bounding_box_json_key``: the bounding box key for + prediction output. Required if your model version perform + image object detection. + + Learn `how to configure prediction + keys `__. + example_count (int): + Required. The maximum number of predictions to sample and + save to BigQuery during each [evaluation + interval][google.cloud.datalabeling.v1beta1.EvaluationJob.schedule]. + This limit overrides ``example_sample_percentage``: even if + the service has not sampled enough predictions to fulfill + ``example_sample_perecentage`` during an interval, it stops + sampling predictions when it meets this limit. + example_sample_percentage (float): + Required. Fraction of predictions to sample and save to + BigQuery during each [evaluation + interval][google.cloud.datalabeling.v1beta1.EvaluationJob.schedule]. + For example, 0.1 means 10% of predictions served by your + model version get saved to BigQuery. + evaluation_job_alert_config (~.evaluation_job.EvaluationJobAlertConfig): + Optional. Configuration details for + evaluation job alerts. Specify this field if you + want to receive email alerts if the evaluation + job finds that your predictions have low mean + average precision during a run. + """ + + image_classification_config = proto.Field( + proto.MESSAGE, + number=4, + oneof="human_annotation_request_config", + message=gcd_human_annotation_config.ImageClassificationConfig, + ) + + bounding_poly_config = proto.Field( + proto.MESSAGE, + number=5, + oneof="human_annotation_request_config", + message=gcd_human_annotation_config.BoundingPolyConfig, + ) + + text_classification_config = proto.Field( + proto.MESSAGE, + number=8, + oneof="human_annotation_request_config", + message=gcd_human_annotation_config.TextClassificationConfig, + ) + + input_config = proto.Field(proto.MESSAGE, number=1, message=dataset.InputConfig,) + + evaluation_config = proto.Field( + proto.MESSAGE, number=2, message=evaluation.EvaluationConfig, + ) + + human_annotation_config = proto.Field( + proto.MESSAGE, + number=3, + message=gcd_human_annotation_config.HumanAnnotationConfig, + ) + + bigquery_import_keys = proto.MapField(proto.STRING, proto.STRING, number=9) + + example_count = proto.Field(proto.INT32, number=10) + + example_sample_percentage = proto.Field(proto.DOUBLE, number=11) + + evaluation_job_alert_config = proto.Field( + proto.MESSAGE, number=13, message="EvaluationJobAlertConfig", + ) + + +class EvaluationJobAlertConfig(proto.Message): + r"""Provides details for how an evaluation job sends email alerts + based on the results of a run. + + Attributes: + email (str): + Required. An email address to send alerts to. + min_acceptable_mean_average_precision (float): + Required. A number between 0 and 1 that describes a minimum + mean average precision threshold. When the evaluation job + runs, if it calculates that your model version's predictions + from the recent interval have + [meanAveragePrecision][google.cloud.datalabeling.v1beta1.PrCurve.mean_average_precision] + below this threshold, then it sends an alert to your + specified email. + """ + + email = proto.Field(proto.STRING, number=1) + + min_acceptable_mean_average_precision = proto.Field(proto.DOUBLE, number=2) + + +class Attempt(proto.Message): + r"""Records a failed evaluation job run. + + Attributes: + attempt_time (~.timestamp.Timestamp): + + partial_failures (Sequence[~.status.Status]): + Details of errors that occurred. + """ + + attempt_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + partial_failures = proto.RepeatedField( + proto.MESSAGE, number=2, message=status.Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datalabeling_v1beta1/types/human_annotation_config.py b/google/cloud/datalabeling_v1beta1/types/human_annotation_config.py new file mode 100644 index 0000000..4df8c01 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/types/human_annotation_config.py @@ -0,0 +1,326 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import duration_pb2 as duration # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datalabeling.v1beta1", + manifest={ + "StringAggregationType", + "HumanAnnotationConfig", + "ImageClassificationConfig", + "BoundingPolyConfig", + "PolylineConfig", + "SegmentationConfig", + "VideoClassificationConfig", + "ObjectDetectionConfig", + "ObjectTrackingConfig", + "EventConfig", + "TextClassificationConfig", + "SentimentConfig", + "TextEntityExtractionConfig", + }, +) + + +class StringAggregationType(proto.Enum): + r"""""" + STRING_AGGREGATION_TYPE_UNSPECIFIED = 0 + MAJORITY_VOTE = 1 + UNANIMOUS_VOTE = 2 + NO_AGGREGATION = 3 + + +class HumanAnnotationConfig(proto.Message): + r"""Configuration for how human labeling task should be done. + + Attributes: + instruction (str): + Required. Instruction resource name. + annotated_dataset_display_name (str): + Required. A human-readable name for + AnnotatedDataset defined by users. Maximum of 64 + characters . + annotated_dataset_description (str): + Optional. A human-readable description for + AnnotatedDataset. The description can be up to + 10000 characters long. + label_group (str): + Optional. A human-readable label used to logically group + labeling tasks. This string must match the regular + expression ``[a-zA-Z\\d_-]{0,128}``. + language_code (str): + Optional. The Language of this question, as a + `BCP-47 `__. + Default value is en-US. Only need to set this when task is + language related. For example, French text classification. + replica_count (int): + Optional. Replication of questions. Each + question will be sent to up to this number of + contributors to label. Aggregated answers will + be returned. Default is set to 1. + For image related labeling, valid values are 1, + 3, 5. + question_duration (~.duration.Duration): + Optional. Maximum duration for contributors + to answer a question. Maximum is 3600 seconds. + Default is 3600 seconds. + contributor_emails (Sequence[str]): + Optional. If you want your own labeling + contributors to manage and work on this labeling + request, you can set these contributors here. We + will give them access to the question types in + crowdcompute. Note that these emails must be + registered in crowdcompute worker UI: + https://crowd-compute.appspot.com/ + user_email_address (str): + Email of the user who started the labeling + task and should be notified by email. If empty + no notification will be sent. + """ + + instruction = proto.Field(proto.STRING, number=1) + + annotated_dataset_display_name = proto.Field(proto.STRING, number=2) + + annotated_dataset_description = proto.Field(proto.STRING, number=3) + + label_group = proto.Field(proto.STRING, number=4) + + language_code = proto.Field(proto.STRING, number=5) + + replica_count = proto.Field(proto.INT32, number=6) + + question_duration = proto.Field(proto.MESSAGE, number=7, message=duration.Duration,) + + contributor_emails = proto.RepeatedField(proto.STRING, number=9) + + user_email_address = proto.Field(proto.STRING, number=10) + + +class ImageClassificationConfig(proto.Message): + r"""Config for image classification human labeling task. + + Attributes: + annotation_spec_set (str): + Required. Annotation spec set resource name. + allow_multi_label (bool): + Optional. If allow_multi_label is true, contributors are + able to choose multiple labels for one image. + answer_aggregation_type (~.human_annotation_config.StringAggregationType): + Optional. The type of how to aggregate + answers. + """ + + annotation_spec_set = proto.Field(proto.STRING, number=1) + + allow_multi_label = proto.Field(proto.BOOL, number=2) + + answer_aggregation_type = proto.Field( + proto.ENUM, number=3, enum="StringAggregationType", + ) + + +class BoundingPolyConfig(proto.Message): + r"""Config for image bounding poly (and bounding box) human + labeling task. + + Attributes: + annotation_spec_set (str): + Required. Annotation spec set resource name. + instruction_message (str): + Optional. Instruction message showed on + contributors UI. + """ + + annotation_spec_set = proto.Field(proto.STRING, number=1) + + instruction_message = proto.Field(proto.STRING, number=2) + + +class PolylineConfig(proto.Message): + r"""Config for image polyline human labeling task. + + Attributes: + annotation_spec_set (str): + Required. Annotation spec set resource name. + instruction_message (str): + Optional. Instruction message showed on + contributors UI. + """ + + annotation_spec_set = proto.Field(proto.STRING, number=1) + + instruction_message = proto.Field(proto.STRING, number=2) + + +class SegmentationConfig(proto.Message): + r"""Config for image segmentation + + Attributes: + annotation_spec_set (str): + Required. Annotation spec set resource name. format: + projects/{project_id}/annotationSpecSets/{annotation_spec_set_id} + instruction_message (str): + Instruction message showed on labelers UI. + """ + + annotation_spec_set = proto.Field(proto.STRING, number=1) + + instruction_message = proto.Field(proto.STRING, number=2) + + +class VideoClassificationConfig(proto.Message): + r"""Config for video classification human labeling task. + Currently two types of video classification are supported: 1. + Assign labels on the entire video. + 2. Split the video into multiple video clips based on camera + shot, and assign labels on each video clip. + + Attributes: + annotation_spec_set_configs (Sequence[~.human_annotation_config.VideoClassificationConfig.AnnotationSpecSetConfig]): + Required. The list of annotation spec set + configs. Since watching a video clip takes much + longer time than an image, we support label with + multiple AnnotationSpecSet at the same time. + Labels in each AnnotationSpecSet will be shown + in a group to contributors. Contributors can + select one or more (depending on whether to + allow multi label) from each group. + apply_shot_detection (bool): + Optional. Option to apply shot detection on + the video. + """ + + class AnnotationSpecSetConfig(proto.Message): + r"""Annotation spec set with the setting of allowing multi labels + or not. + + Attributes: + annotation_spec_set (str): + Required. Annotation spec set resource name. + allow_multi_label (bool): + Optional. If allow_multi_label is true, contributors are + able to choose multiple labels from one annotation spec set. + """ + + annotation_spec_set = proto.Field(proto.STRING, number=1) + + allow_multi_label = proto.Field(proto.BOOL, number=2) + + annotation_spec_set_configs = proto.RepeatedField( + proto.MESSAGE, number=1, message=AnnotationSpecSetConfig, + ) + + apply_shot_detection = proto.Field(proto.BOOL, number=2) + + +class ObjectDetectionConfig(proto.Message): + r"""Config for video object detection human labeling task. + Object detection will be conducted on the images extracted from + the video, and those objects will be labeled with bounding + boxes. User need to specify the number of images to be extracted + per second as the extraction frame rate. + + Attributes: + annotation_spec_set (str): + Required. Annotation spec set resource name. + extraction_frame_rate (float): + Required. Number of frames per second to be + extracted from the video. + """ + + annotation_spec_set = proto.Field(proto.STRING, number=1) + + extraction_frame_rate = proto.Field(proto.DOUBLE, number=3) + + +class ObjectTrackingConfig(proto.Message): + r"""Config for video object tracking human labeling task. + + Attributes: + annotation_spec_set (str): + Required. Annotation spec set resource name. + """ + + annotation_spec_set = proto.Field(proto.STRING, number=1) + + +class EventConfig(proto.Message): + r"""Config for video event human labeling task. + + Attributes: + annotation_spec_sets (Sequence[str]): + Required. The list of annotation spec set + resource name. Similar to video classification, + we support selecting event from multiple + AnnotationSpecSet at the same time. + """ + + annotation_spec_sets = proto.RepeatedField(proto.STRING, number=1) + + +class TextClassificationConfig(proto.Message): + r"""Config for text classification human labeling task. + + Attributes: + allow_multi_label (bool): + Optional. If allow_multi_label is true, contributors are + able to choose multiple labels for one text segment. + annotation_spec_set (str): + Required. Annotation spec set resource name. + sentiment_config (~.human_annotation_config.SentimentConfig): + Optional. Configs for sentiment selection. + """ + + allow_multi_label = proto.Field(proto.BOOL, number=1) + + annotation_spec_set = proto.Field(proto.STRING, number=2) + + sentiment_config = proto.Field(proto.MESSAGE, number=3, message="SentimentConfig",) + + +class SentimentConfig(proto.Message): + r"""Config for setting up sentiments. + + Attributes: + enable_label_sentiment_selection (bool): + If set to true, contributors will have the + option to select sentiment of the label they + selected, to mark it as negative or positive + label. Default is false. + """ + + enable_label_sentiment_selection = proto.Field(proto.BOOL, number=1) + + +class TextEntityExtractionConfig(proto.Message): + r"""Config for text entity extraction human labeling task. + + Attributes: + annotation_spec_set (str): + Required. Annotation spec set resource name. + """ + + annotation_spec_set = proto.Field(proto.STRING, number=1) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datalabeling_v1beta1/types/instruction.py b/google/cloud/datalabeling_v1beta1/types/instruction.py new file mode 100644 index 0000000..4223dea --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/types/instruction.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datalabeling_v1beta1.types import dataset +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datalabeling.v1beta1", + manifest={"Instruction", "CsvInstruction", "PdfInstruction",}, +) + + +class Instruction(proto.Message): + r"""Instruction of how to perform the labeling task for human + operators. Currently only PDF instruction is supported. + + Attributes: + name (str): + Output only. Instruction resource name, format: + projects/{project_id}/instructions/{instruction_id} + display_name (str): + Required. The display name of the + instruction. Maximum of 64 characters. + description (str): + Optional. User-provided description of the + instruction. The description can be up to 10000 + characters long. + create_time (~.timestamp.Timestamp): + Output only. Creation time of instruction. + update_time (~.timestamp.Timestamp): + Output only. Last update time of instruction. + data_type (~.dataset.DataType): + Required. The data type of this instruction. + csv_instruction (~.instruction.CsvInstruction): + Deprecated: this instruction format is not supported any + more. Instruction from a CSV file, such as for + classification task. The CSV file should have exact two + columns, in the following format: + + - The first column is labeled data, such as an image + reference, text. + - The second column is comma separated labels associated + with data. + pdf_instruction (~.instruction.PdfInstruction): + Instruction from a PDF document. The PDF + should be in a Cloud Storage bucket. + blocking_resources (Sequence[str]): + Output only. The names of any related + resources that are blocking changes to the + instruction. + """ + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + data_type = proto.Field(proto.ENUM, number=6, enum=dataset.DataType,) + + csv_instruction = proto.Field(proto.MESSAGE, number=7, message="CsvInstruction",) + + pdf_instruction = proto.Field(proto.MESSAGE, number=9, message="PdfInstruction",) + + blocking_resources = proto.RepeatedField(proto.STRING, number=10) + + +class CsvInstruction(proto.Message): + r"""Deprecated: this instruction format is not supported any + more. Instruction from a CSV file. + + Attributes: + gcs_file_uri (str): + CSV file for the instruction. Only gcs path + is allowed. + """ + + gcs_file_uri = proto.Field(proto.STRING, number=1) + + +class PdfInstruction(proto.Message): + r"""Instruction from a PDF file. + + Attributes: + gcs_file_uri (str): + PDF file for the instruction. Only gcs path + is allowed. + """ + + gcs_file_uri = proto.Field(proto.STRING, number=1) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datalabeling_v1beta1/types/operations.py b/google/cloud/datalabeling_v1beta1/types/operations.py new file mode 100644 index 0000000..4706d91 --- /dev/null +++ b/google/cloud/datalabeling_v1beta1/types/operations.py @@ -0,0 +1,494 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.datalabeling_v1beta1.types import dataset as gcd_dataset +from google.cloud.datalabeling_v1beta1.types import human_annotation_config +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.datalabeling.v1beta1", + manifest={ + "ImportDataOperationResponse", + "ExportDataOperationResponse", + "ImportDataOperationMetadata", + "ExportDataOperationMetadata", + "LabelOperationMetadata", + "LabelImageClassificationOperationMetadata", + "LabelImageBoundingBoxOperationMetadata", + "LabelImageOrientedBoundingBoxOperationMetadata", + "LabelImageBoundingPolyOperationMetadata", + "LabelImagePolylineOperationMetadata", + "LabelImageSegmentationOperationMetadata", + "LabelVideoClassificationOperationMetadata", + "LabelVideoObjectDetectionOperationMetadata", + "LabelVideoObjectTrackingOperationMetadata", + "LabelVideoEventOperationMetadata", + "LabelTextClassificationOperationMetadata", + "LabelTextEntityExtractionOperationMetadata", + "CreateInstructionMetadata", + }, +) + + +class ImportDataOperationResponse(proto.Message): + r"""Response used for ImportData longrunning operation. + + Attributes: + dataset (str): + Ouptut only. The name of imported dataset. + total_count (int): + Output only. Total number of examples + requested to import + import_count (int): + Output only. Number of examples imported + successfully. + """ + + dataset = proto.Field(proto.STRING, number=1) + + total_count = proto.Field(proto.INT32, number=2) + + import_count = proto.Field(proto.INT32, number=3) + + +class ExportDataOperationResponse(proto.Message): + r"""Response used for ExportDataset longrunning operation. + + Attributes: + dataset (str): + Ouptut only. The name of dataset. "projects/*/datasets/*". + total_count (int): + Output only. Total number of examples + requested to export + export_count (int): + Output only. Number of examples exported + successfully. + label_stats (~.gcd_dataset.LabelStats): + Output only. Statistic infos of labels in the + exported dataset. + output_config (~.gcd_dataset.OutputConfig): + Output only. output_config in the ExportData request. + """ + + dataset = proto.Field(proto.STRING, number=1) + + total_count = proto.Field(proto.INT32, number=2) + + export_count = proto.Field(proto.INT32, number=3) + + label_stats = proto.Field(proto.MESSAGE, number=4, message=gcd_dataset.LabelStats,) + + output_config = proto.Field( + proto.MESSAGE, number=5, message=gcd_dataset.OutputConfig, + ) + + +class ImportDataOperationMetadata(proto.Message): + r"""Metadata of an ImportData operation. + + Attributes: + dataset (str): + Output only. The name of imported dataset. + "projects/*/datasets/*". + partial_failures (Sequence[~.status.Status]): + Output only. Partial failures encountered. + E.g. single files that couldn't be read. + Status details field will contain standard GCP + error details. + create_time (~.timestamp.Timestamp): + Output only. Timestamp when import dataset + request was created. + """ + + dataset = proto.Field(proto.STRING, number=1) + + partial_failures = proto.RepeatedField( + proto.MESSAGE, number=2, message=status.Status, + ) + + create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + +class ExportDataOperationMetadata(proto.Message): + r"""Metadata of an ExportData operation. + + Attributes: + dataset (str): + Output only. The name of dataset to be exported. + "projects/*/datasets/*". + partial_failures (Sequence[~.status.Status]): + Output only. Partial failures encountered. + E.g. single files that couldn't be read. + Status details field will contain standard GCP + error details. + create_time (~.timestamp.Timestamp): + Output only. Timestamp when export dataset + request was created. + """ + + dataset = proto.Field(proto.STRING, number=1) + + partial_failures = proto.RepeatedField( + proto.MESSAGE, number=2, message=status.Status, + ) + + create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + +class LabelOperationMetadata(proto.Message): + r"""Metadata of a labeling operation, such as LabelImage or + LabelVideo. Next tag: 20 + + Attributes: + image_classification_details (~.operations.LabelImageClassificationOperationMetadata): + Details of label image classification + operation. + image_bounding_box_details (~.operations.LabelImageBoundingBoxOperationMetadata): + Details of label image bounding box + operation. + image_bounding_poly_details (~.operations.LabelImageBoundingPolyOperationMetadata): + Details of label image bounding poly + operation. + image_oriented_bounding_box_details (~.operations.LabelImageOrientedBoundingBoxOperationMetadata): + Details of label image oriented bounding box + operation. + image_polyline_details (~.operations.LabelImagePolylineOperationMetadata): + Details of label image polyline operation. + image_segmentation_details (~.operations.LabelImageSegmentationOperationMetadata): + Details of label image segmentation + operation. + video_classification_details (~.operations.LabelVideoClassificationOperationMetadata): + Details of label video classification + operation. + video_object_detection_details (~.operations.LabelVideoObjectDetectionOperationMetadata): + Details of label video object detection + operation. + video_object_tracking_details (~.operations.LabelVideoObjectTrackingOperationMetadata): + Details of label video object tracking + operation. + video_event_details (~.operations.LabelVideoEventOperationMetadata): + Details of label video event operation. + text_classification_details (~.operations.LabelTextClassificationOperationMetadata): + Details of label text classification + operation. + text_entity_extraction_details (~.operations.LabelTextEntityExtractionOperationMetadata): + Details of label text entity extraction + operation. + progress_percent (int): + Output only. Progress of label operation. Range: [0, 100]. + partial_failures (Sequence[~.status.Status]): + Output only. Partial failures encountered. + E.g. single files that couldn't be read. + Status details field will contain standard GCP + error details. + create_time (~.timestamp.Timestamp): + Output only. Timestamp when labeling request + was created. + """ + + image_classification_details = proto.Field( + proto.MESSAGE, + number=3, + oneof="details", + message="LabelImageClassificationOperationMetadata", + ) + + image_bounding_box_details = proto.Field( + proto.MESSAGE, + number=4, + oneof="details", + message="LabelImageBoundingBoxOperationMetadata", + ) + + image_bounding_poly_details = proto.Field( + proto.MESSAGE, + number=11, + oneof="details", + message="LabelImageBoundingPolyOperationMetadata", + ) + + image_oriented_bounding_box_details = proto.Field( + proto.MESSAGE, + number=14, + oneof="details", + message="LabelImageOrientedBoundingBoxOperationMetadata", + ) + + image_polyline_details = proto.Field( + proto.MESSAGE, + number=12, + oneof="details", + message="LabelImagePolylineOperationMetadata", + ) + + image_segmentation_details = proto.Field( + proto.MESSAGE, + number=15, + oneof="details", + message="LabelImageSegmentationOperationMetadata", + ) + + video_classification_details = proto.Field( + proto.MESSAGE, + number=5, + oneof="details", + message="LabelVideoClassificationOperationMetadata", + ) + + video_object_detection_details = proto.Field( + proto.MESSAGE, + number=6, + oneof="details", + message="LabelVideoObjectDetectionOperationMetadata", + ) + + video_object_tracking_details = proto.Field( + proto.MESSAGE, + number=7, + oneof="details", + message="LabelVideoObjectTrackingOperationMetadata", + ) + + video_event_details = proto.Field( + proto.MESSAGE, + number=8, + oneof="details", + message="LabelVideoEventOperationMetadata", + ) + + text_classification_details = proto.Field( + proto.MESSAGE, + number=9, + oneof="details", + message="LabelTextClassificationOperationMetadata", + ) + + text_entity_extraction_details = proto.Field( + proto.MESSAGE, + number=13, + oneof="details", + message="LabelTextEntityExtractionOperationMetadata", + ) + + progress_percent = proto.Field(proto.INT32, number=1) + + partial_failures = proto.RepeatedField( + proto.MESSAGE, number=2, message=status.Status, + ) + + create_time = proto.Field(proto.MESSAGE, number=16, message=timestamp.Timestamp,) + + +class LabelImageClassificationOperationMetadata(proto.Message): + r"""Metadata of a LabelImageClassification operation. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config used in + labeling request. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class LabelImageBoundingBoxOperationMetadata(proto.Message): + r"""Details of a LabelImageBoundingBox operation metadata. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config used in + labeling request. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class LabelImageOrientedBoundingBoxOperationMetadata(proto.Message): + r"""Details of a LabelImageOrientedBoundingBox operation + metadata. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class LabelImageBoundingPolyOperationMetadata(proto.Message): + r"""Details of LabelImageBoundingPoly operation metadata. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config used in + labeling request. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class LabelImagePolylineOperationMetadata(proto.Message): + r"""Details of LabelImagePolyline operation metadata. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config used in + labeling request. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class LabelImageSegmentationOperationMetadata(proto.Message): + r"""Details of a LabelImageSegmentation operation metadata. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class LabelVideoClassificationOperationMetadata(proto.Message): + r"""Details of a LabelVideoClassification operation metadata. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config used in + labeling request. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class LabelVideoObjectDetectionOperationMetadata(proto.Message): + r"""Details of a LabelVideoObjectDetection operation metadata. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config used in + labeling request. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class LabelVideoObjectTrackingOperationMetadata(proto.Message): + r"""Details of a LabelVideoObjectTracking operation metadata. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config used in + labeling request. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class LabelVideoEventOperationMetadata(proto.Message): + r"""Details of a LabelVideoEvent operation metadata. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config used in + labeling request. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class LabelTextClassificationOperationMetadata(proto.Message): + r"""Details of a LabelTextClassification operation metadata. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config used in + labeling request. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class LabelTextEntityExtractionOperationMetadata(proto.Message): + r"""Details of a LabelTextEntityExtraction operation metadata. + + Attributes: + basic_config (~.human_annotation_config.HumanAnnotationConfig): + Basic human annotation config used in + labeling request. + """ + + basic_config = proto.Field( + proto.MESSAGE, number=1, message=human_annotation_config.HumanAnnotationConfig, + ) + + +class CreateInstructionMetadata(proto.Message): + r"""Metadata of a CreateInstruction operation. + + Attributes: + instruction (str): + The name of the created Instruction. + projects/{project_id}/instructions/{instruction_id} + partial_failures (Sequence[~.status.Status]): + Partial failures encountered. + E.g. single files that couldn't be read. + Status details field will contain standard GCP + error details. + create_time (~.timestamp.Timestamp): + Timestamp when create instruction request was + created. + """ + + instruction = proto.Field(proto.STRING, number=1) + + partial_failures = proto.RepeatedField( + proto.MESSAGE, number=2, message=status.Status, + ) + + create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..4505b48 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/noxfile.py b/noxfile.py index 5849759..9f35fe6 100644 --- a/noxfile.py +++ b/noxfile.py @@ -27,8 +27,8 @@ BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,6 +70,8 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. + session.install("asyncmock", "pytest-asyncio") + session.install("mock", "pytest", "pytest-cov") session.install("-e", ".") @@ -139,7 +141,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=79") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") diff --git a/samples/snippets/create_annotation_spec_set.py b/samples/snippets/create_annotation_spec_set.py index 4a8add7..62983e5 100644 --- a/samples/snippets/create_annotation_spec_set.py +++ b/samples/snippets/create_annotation_spec_set.py @@ -26,61 +26,58 @@ def create_annotation_spec_set(project_id): Google Cloud project. """ from google.cloud import datalabeling_v1beta1 as datalabeling + client = datalabeling.DataLabelingServiceClient() # [END datalabeling_create_annotation_spec_set_beta] # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) # [START datalabeling_create_annotation_spec_set_beta] - project_path = client.project_path(project_id) + project_path = f"projects/{project_id}" - annotation_spec_1 = datalabeling.types.AnnotationSpec( - display_name='label_1', - description='label_description_1' + annotation_spec_1 = datalabeling.AnnotationSpec( + display_name="label_1", description="label_description_1" ) - annotation_spec_2 = datalabeling.types.AnnotationSpec( - display_name='label_2', - description='label_description_2' + annotation_spec_2 = datalabeling.AnnotationSpec( + display_name="label_2", description="label_description_2" ) - annotation_spec_set = datalabeling.types.AnnotationSpecSet( - display_name='YOUR_ANNOTATION_SPEC_SET_DISPLAY_NAME', - description='YOUR_DESCRIPTION', - annotation_specs=[annotation_spec_1, annotation_spec_2] + annotation_spec_set = datalabeling.AnnotationSpecSet( + display_name="YOUR_ANNOTATION_SPEC_SET_DISPLAY_NAME", + description="YOUR_DESCRIPTION", + annotation_specs=[annotation_spec_1, annotation_spec_2], ) response = client.create_annotation_spec_set( - project_path, annotation_spec_set) + request={"parent": project_path, "annotation_spec_set": annotation_spec_set} + ) # The format of the resource name: # project_id/{project_id}/annotationSpecSets/{annotationSpecSets_id} - print('The annotation_spec_set resource name: {}'.format(response.name)) - print('Display name: {}'.format(response.display_name)) - print('Description: {}'.format(response.description)) - print('Annotation specs:') + print("The annotation_spec_set resource name: {}".format(response.name)) + print("Display name: {}".format(response.display_name)) + print("Description: {}".format(response.description)) + print("Annotation specs:") for annotation_spec in response.annotation_specs: - print('\tDisplay name: {}'.format(annotation_spec.display_name)) - print('\tDescription: {}\n'.format(annotation_spec.description)) + print("\tDisplay name: {}".format(annotation_spec.display_name)) + print("\tDescription: {}\n".format(annotation_spec.description)) return response + + # [END datalabeling_create_annotation_spec_set_beta] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) - parser.add_argument( - '--project-id', - help='Project ID. Required.', - required=True - ) + parser.add_argument("--project-id", help="Project ID. Required.", required=True) args = parser.parse_args() diff --git a/samples/snippets/create_annotation_spec_set_test.py b/samples/snippets/create_annotation_spec_set_test.py index 6ae5ee5..1ed722e 100644 --- a/samples/snippets/create_annotation_spec_set_test.py +++ b/samples/snippets/create_annotation_spec_set_test.py @@ -24,10 +24,10 @@ import testing_lib -PROJECT_ID = os.getenv('GOOGLE_CLOUD_PROJECT') +PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def cleaner(): resource_names = [] @@ -38,9 +38,9 @@ def cleaner(): def test_create_annotation_spec_set(cleaner, capsys): - @backoff.on_exception( - backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE) + backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE + ) def run_sample(): return create_annotation_spec_set.create_annotation_spec_set(PROJECT_ID) @@ -50,4 +50,4 @@ def run_sample(): cleaner.append(response.name) out, _ = capsys.readouterr() - assert 'The annotation_spec_set resource name:' in out + assert "The annotation_spec_set resource name:" in out diff --git a/samples/snippets/create_instruction.py b/samples/snippets/create_instruction.py index 5495acb..f0d2399 100644 --- a/samples/snippets/create_instruction.py +++ b/samples/snippets/create_instruction.py @@ -27,77 +27,70 @@ def create_instruction(project_id, data_type, instruction_gcs_uri): Google Cloud Storage. """ from google.cloud import datalabeling_v1beta1 as datalabeling + client = datalabeling.DataLabelingServiceClient() # [END datalabeling_create_instruction_beta] # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) # [START datalabeling_create_instruction_beta] - project_path = client.project_path(project_id) + project_path = f"projects/{project_id}" - pdf_instruction = datalabeling.types.PdfInstruction( - gcs_file_uri=instruction_gcs_uri) + pdf_instruction = datalabeling.PdfInstruction(gcs_file_uri=instruction_gcs_uri) - instruction = datalabeling.types.Instruction( - display_name='YOUR_INSTRUCTION_DISPLAY_NAME', - description='YOUR_DESCRIPTION', + instruction = datalabeling.Instruction( + display_name="YOUR_INSTRUCTION_DISPLAY_NAME", + description="YOUR_DESCRIPTION", data_type=data_type, - pdf_instruction=pdf_instruction + pdf_instruction=pdf_instruction, ) - operation = client.create_instruction(project_path, instruction) + operation = client.create_instruction( + request={"parent": project_path, "instruction": instruction} + ) result = operation.result() # The format of the resource name: # project_id/{project_id}/instruction/{instruction_id} - print('The instruction resource name: {}'.format(result.name)) - print('Display name: {}'.format(result.display_name)) - print('Description: {}'.format(result.description)) - print('Create time:') - print('\tseconds: {}'.format(result.create_time.seconds)) - print('\tnanos: {}'.format(result.create_time.nanos)) - print('Data type: {}'.format( - datalabeling.enums.DataType(result.data_type).name)) - print('Pdf instruction:') - print('\tGcs file uri: {}\n'.format( - result.pdf_instruction.gcs_file_uri)) + print("The instruction resource name: {}".format(result.name)) + print("Display name: {}".format(result.display_name)) + print("Description: {}".format(result.description)) + print("Create time:") + print("\tseconds: {}".format(result.create_time.timestamp_pb().seconds)) + print("\tnanos: {}".format(result.create_time.timestamp_pb().nanos)) + print("Data type: {}".format(datalabeling.DataType(result.data_type).name)) + print("Pdf instruction:") + print("\tGcs file uri: {}\n".format(result.pdf_instruction.gcs_file_uri)) return result + + # [END datalabeling_create_instruction_beta] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) - parser.add_argument( - '--project-id', - help='Project ID. Required.', - required=True - ) + parser.add_argument("--project-id", help="Project ID. Required.", required=True) parser.add_argument( - '--data-type', - help='Data type. Only support IMAGE, VIDEO, TEXT and AUDIO. Required.', - required=True + "--data-type", + help="Data type. Only support IMAGE, VIDEO, TEXT and AUDIO. Required.", + required=True, ) parser.add_argument( - '--instruction-gcs-uri', - help='The URI of Google Cloud Storage of the instruction. Required.', - required=True + "--instruction-gcs-uri", + help="The URI of Google Cloud Storage of the instruction. Required.", + required=True, ) args = parser.parse_args() - create_instruction( - args.project_id, - args.data_type, - args.instruction_gcs_uri - ) + create_instruction(args.project_id, args.data_type, args.instruction_gcs_uri) diff --git a/samples/snippets/create_instruction_test.py b/samples/snippets/create_instruction_test.py index b164da0..5b32f82 100644 --- a/samples/snippets/create_instruction_test.py +++ b/samples/snippets/create_instruction_test.py @@ -18,18 +18,18 @@ import backoff from google.api_core.exceptions import ServerError +from google.cloud import datalabeling import pytest import create_instruction import testing_lib -PROJECT_ID = os.getenv('GOOGLE_CLOUD_PROJECT') -INSTRUCTION_GCS_URI = ('gs://cloud-samples-data/datalabeling' - '/instruction/test.pdf') +PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") +INSTRUCTION_GCS_URI = "gs://cloud-samples-data/datalabeling" "/instruction/test.pdf" -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def cleaner(): resource_names = [] @@ -40,15 +40,16 @@ def cleaner(): def test_create_instruction(cleaner, capsys): - @backoff.on_exception( - backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE) + backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE + ) def run_sample(): return create_instruction.create_instruction( - PROJECT_ID, 'IMAGE', INSTRUCTION_GCS_URI) + PROJECT_ID, datalabeling.DataType.IMAGE, INSTRUCTION_GCS_URI + ) instruction = run_sample() cleaner.append(instruction.name) out, _ = capsys.readouterr() - assert 'The instruction resource name: ' in out + assert "The instruction resource name: " in out diff --git a/samples/snippets/export_data.py b/samples/snippets/export_data.py index f70dc9c..8018676 100644 --- a/samples/snippets/export_data.py +++ b/samples/snippets/export_data.py @@ -21,61 +21,65 @@ # [START datalabeling_export_data_beta] -def export_data(dataset_resource_name, annotated_dataset_resource_name, - export_gcs_uri): +def export_data(dataset_resource_name, annotated_dataset_resource_name, export_gcs_uri): """Exports a dataset from the given Google Cloud project.""" from google.cloud import datalabeling_v1beta1 as datalabeling + client = datalabeling.DataLabelingServiceClient() # [END datalabeling_export_data_beta] # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) # [START datalabeling_export_data_beta] - gcs_destination = datalabeling.types.GcsDestination( - output_uri=export_gcs_uri, mime_type='text/csv') + gcs_destination = datalabeling.GcsDestination( + output_uri=export_gcs_uri, mime_type="text/csv" + ) - output_config = datalabeling.types.OutputConfig( - gcs_destination=gcs_destination) + output_config = datalabeling.OutputConfig(gcs_destination=gcs_destination) response = client.export_data( - dataset_resource_name, - annotated_dataset_resource_name, - output_config + request={ + "name": dataset_resource_name, + "annotated_dataset": annotated_dataset_resource_name, + "output_config": output_config, + } ) - print('Dataset ID: {}\n'.format(response.result().dataset)) - print('Output config:') - print('\tGcs destination:') - print('\t\tOutput URI: {}\n'.format( - response.result().output_config.gcs_destination.output_uri)) + print("Dataset ID: {}\n".format(response.result().dataset)) + print("Output config:") + print("\tGcs destination:") + print( + "\t\tOutput URI: {}\n".format( + response.result().output_config.gcs_destination.output_uri + ) + ) + + # [END datalabeling_export_data_beta] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument( - '--dataset-resource-name', - help='Dataset resource name. Required.', - required=True + "--dataset-resource-name", + help="Dataset resource name. Required.", + required=True, ) parser.add_argument( - '--annotated-dataset-resource-name', - help='Annotated Dataset resource name. Required.', - required=True + "--annotated-dataset-resource-name", + help="Annotated Dataset resource name. Required.", + required=True, ) parser.add_argument( - '--export-gcs-uri', - help='The export GCS URI. Required.', - required=True + "--export-gcs-uri", help="The export GCS URI. Required.", required=True ) args = parser.parse_args() @@ -83,5 +87,5 @@ def export_data(dataset_resource_name, annotated_dataset_resource_name, export_data( args.dataset_resource_name, args.annotated_dataset_resource_name, - args.export_gcs_uri + args.export_gcs_uri, ) diff --git a/samples/snippets/import_data.py b/samples/snippets/import_data.py index 01c3201..1324214 100644 --- a/samples/snippets/import_data.py +++ b/samples/snippets/import_data.py @@ -24,55 +24,59 @@ def import_data(dataset_resource_name, data_type, input_gcs_uri): """Imports data to the given Google Cloud project and dataset.""" from google.cloud import datalabeling_v1beta1 as datalabeling + client = datalabeling.DataLabelingServiceClient() # [END datalabeling_import_data_beta] # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) # [START datalabeling_import_data_beta] - gcs_source = datalabeling.types.GcsSource( - input_uri=input_gcs_uri, mime_type='text/csv') + gcs_source = datalabeling.GcsSource(input_uri=input_gcs_uri, mime_type="text/csv") - csv_input_config = datalabeling.types.InputConfig( - data_type=data_type, gcs_source=gcs_source) + csv_input_config = datalabeling.InputConfig( + data_type=data_type, gcs_source=gcs_source + ) - response = client.import_data(dataset_resource_name, csv_input_config) + response = client.import_data( + request={"name": dataset_resource_name, "input_config": csv_input_config} + ) result = response.result() # The format of resource name: # project_id/{project_id}/datasets/{dataset_id} - print('Dataset resource name: {}\n'.format(result.dataset)) + print("Dataset resource name: {}\n".format(result.dataset)) return result + + # [END datalabeling_import_data_beta] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument( - '--dataset-resource-name', - help='Dataset resource name. Required.', - required=True + "--dataset-resource-name", + help="Dataset resource name. Required.", + required=True, ) parser.add_argument( - '--data-type', - help='Data type. Only support IMAGE, VIDEO, TEXT and AUDIO. Required.', - required=True + "--data-type", + help="Data type. Only support IMAGE, VIDEO, TEXT and AUDIO. Required.", + required=True, ) parser.add_argument( - '--input-gcs-uri', - help='The GCS URI of the input dataset. Required.', - required=True + "--input-gcs-uri", + help="The GCS URI of the input dataset. Required.", + required=True, ) args = parser.parse_args() diff --git a/samples/snippets/import_data_test.py b/samples/snippets/import_data_test.py index 246cfba..d304cba 100644 --- a/samples/snippets/import_data_test.py +++ b/samples/snippets/import_data_test.py @@ -18,17 +18,18 @@ import backoff from google.api_core.exceptions import ServerError +from google.cloud import datalabeling import pytest import import_data import testing_lib -PROJECT_ID = os.getenv('GOOGLE_CLOUD_PROJECT') -INPUT_GCS_URI = 'gs://cloud-samples-data/datalabeling/image/image_dataset.csv' +PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") +INPUT_GCS_URI = "gs://cloud-samples-data/datalabeling/image/image_dataset.csv" -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def dataset(): # create a temporary dataset dataset = testing_lib.create_dataset(PROJECT_ID) @@ -40,12 +41,14 @@ def dataset(): def test_import_data(capsys, dataset): - @backoff.on_exception( - backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE) + backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE + ) def run_sample(): - import_data.import_data(dataset.name, 'IMAGE', INPUT_GCS_URI) + import_data.import_data( + dataset.name, datalabeling.DataType.IMAGE, INPUT_GCS_URI + ) run_sample() out, _ = capsys.readouterr() - assert 'Dataset resource name: ' in out + assert "Dataset resource name: " in out diff --git a/samples/snippets/label_image.py b/samples/snippets/label_image.py index 19a10eb..010f0dd 100644 --- a/samples/snippets/label_image.py +++ b/samples/snippets/label_image.py @@ -21,69 +21,73 @@ # [START datalabeling_label_image_beta] -def label_image(dataset_resource_name, instruction_resource_name, - annotation_spec_set_resource_name): +def label_image( + dataset_resource_name, instruction_resource_name, annotation_spec_set_resource_name +): """Labels an image dataset.""" from google.cloud import datalabeling_v1beta1 as datalabeling + client = datalabeling.DataLabelingServiceClient() # [END datalabeling_label_image_beta] # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) # [START datalabeling_label_image_beta] - basic_config = datalabeling.types.HumanAnnotationConfig( + basic_config = datalabeling.HumanAnnotationConfig( instruction=instruction_resource_name, - annotated_dataset_display_name='YOUR_ANNOTATED_DATASET_DISPLAY_NAME', - label_group='YOUR_LABEL_GROUP', - replica_count=1 + annotated_dataset_display_name="YOUR_ANNOTATED_DATASET_DISPLAY_NAME", + label_group="YOUR_LABEL_GROUP", + replica_count=1, ) - feature = datalabeling.enums.LabelImageRequest.Feature.CLASSIFICATION + feature = datalabeling.LabelImageRequest.Feature.CLASSIFICATION - config = datalabeling.types.ImageClassificationConfig( + config = datalabeling.ImageClassificationConfig( annotation_spec_set=annotation_spec_set_resource_name, allow_multi_label=False, - answer_aggregation_type=datalabeling.enums.StringAggregationType - .MAJORITY_VOTE + answer_aggregation_type=datalabeling.StringAggregationType.MAJORITY_VOTE, ) response = client.label_image( - dataset_resource_name, - basic_config, - feature, - image_classification_config=config + request={ + "parent": dataset_resource_name, + "basic_config": basic_config, + "feature": feature, + "image_classification_config": config, + } ) - print('Label_image operation name: {}'.format(response.operation.name)) + print("Label_image operation name: {}".format(response.operation.name)) return response + + # [END datalabeling_label_image_beta] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument( - '--dataset-resource-name', - help='Dataset resource name. Required.', - required=True + "--dataset-resource-name", + help="Dataset resource name. Required.", + required=True, ) parser.add_argument( - '--instruction-resource-name', - help='Instruction resource name. Required.', - required=True + "--instruction-resource-name", + help="Instruction resource name. Required.", + required=True, ) parser.add_argument( - '--annotation-spec-set-resource-name', - help='Annotation spec set resource name. Required.', - required=True + "--annotation-spec-set-resource-name", + help="Annotation spec set resource name. Required.", + required=True, ) args = parser.parse_args() @@ -91,5 +95,5 @@ def label_image(dataset_resource_name, instruction_resource_name, label_image( args.dataset_resource_name, args.instruction_resource_name, - args.annotation_spec_set_resource_name + args.annotation_spec_set_resource_name, ) diff --git a/samples/snippets/label_image_test.py b/samples/snippets/label_image_test.py index a6feee4..930ce78 100644 --- a/samples/snippets/label_image_test.py +++ b/samples/snippets/label_image_test.py @@ -18,31 +18,31 @@ import backoff from google.api_core.exceptions import ServerError +from google.cloud import datalabeling import pytest import label_image import testing_lib -PROJECT_ID = os.getenv('GOOGLE_CLOUD_PROJECT') -INPUT_GCS_URI = 'gs://cloud-samples-data/datalabeling/image/image_dataset.csv' -INSTRUCTION_GCS_URI = ('gs://cloud-samples-data/datalabeling' - '/instruction/test.pdf') +PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") +INPUT_GCS_URI = "gs://cloud-samples-data/datalabeling/image/image_dataset.csv" +INSTRUCTION_GCS_URI = "gs://cloud-samples-data/datalabeling" "/instruction/test.pdf" -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def dataset(): # create a temporary dataset dataset = testing_lib.create_dataset(PROJECT_ID) - testing_lib.import_data(dataset.name, 'IMAGE', INPUT_GCS_URI) + testing_lib.import_data(dataset.name, datalabeling.DataType.IMAGE, INPUT_GCS_URI) yield dataset # tear down testing_lib.delete_dataset(dataset.name) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def annotation_spec_set(): # create a temporary annotation_spec_set response = testing_lib.create_annotation_spec_set(PROJECT_ID) @@ -52,11 +52,12 @@ def annotation_spec_set(): testing_lib.delete_annotation_spec_set(response.name) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def instruction(): # create a temporary instruction instruction = testing_lib.create_instruction( - PROJECT_ID, 'IMAGE', INSTRUCTION_GCS_URI) + PROJECT_ID, datalabeling.DataType.IMAGE, INSTRUCTION_GCS_URI + ) yield instruction @@ -64,7 +65,7 @@ def instruction(): testing_lib.delete_instruction(instruction.name) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def cleaner(): resource_names = [] @@ -76,18 +77,19 @@ def cleaner(): # Passing in dataset as the last argument in test_label_image since it needs # to be deleted before the annotation_spec_set can be deleted. -def test_label_image( - capsys, annotation_spec_set, instruction, dataset, cleaner): - +@pytest.mark.skip(reason="currently unavailable") +def test_label_image(capsys, annotation_spec_set, instruction, dataset, cleaner): @backoff.on_exception( - backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE) + backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE + ) def run_sample(): # Start labeling. return label_image.label_image( - dataset.name, instruction.name, annotation_spec_set.name) + dataset.name, instruction.name, annotation_spec_set.name + ) response = run_sample() cleaner.append(response.operation.name) out, _ = capsys.readouterr() - assert 'Label_image operation name: ' in out + assert "Label_image operation name: " in out diff --git a/samples/snippets/label_text.py b/samples/snippets/label_text.py index 6b86590..c85ad5f 100644 --- a/samples/snippets/label_text.py +++ b/samples/snippets/label_text.py @@ -21,66 +21,71 @@ # [START datalabeling_label_text_beta] -def label_text(dataset_resource_name, instruction_resource_name, - annotation_spec_set_resource_name): +def label_text( + dataset_resource_name, instruction_resource_name, annotation_spec_set_resource_name +): """Labels a text dataset.""" from google.cloud import datalabeling_v1beta1 as datalabeling + client = datalabeling.DataLabelingServiceClient() # [END datalabeling_label_text_beta] # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) # [START datalabeling_label_text_beta] - basic_config = datalabeling.types.HumanAnnotationConfig( + basic_config = datalabeling.HumanAnnotationConfig( instruction=instruction_resource_name, - annotated_dataset_display_name='YOUR_ANNOTATED_DATASET_DISPLAY_NAME', - label_group='YOUR_LABEL_GROUP', - replica_count=1 + annotated_dataset_display_name="YOUR_ANNOTATED_DATASET_DISPLAY_NAME", + label_group="YOUR_LABEL_GROUP", + replica_count=1, ) - feature = (datalabeling.enums.LabelTextRequest. - Feature.TEXT_ENTITY_EXTRACTION) + feature = datalabeling.LabelTextRequest.Feature.TEXT_ENTITY_EXTRACTION - config = datalabeling.types.TextEntityExtractionConfig( - annotation_spec_set=annotation_spec_set_resource_name) + config = datalabeling.TextEntityExtractionConfig( + annotation_spec_set=annotation_spec_set_resource_name + ) response = client.label_text( - parent=dataset_resource_name, - basic_config=basic_config, - feature=feature, - text_entity_extraction_config=config + request={ + "parent": dataset_resource_name, + "basic_config": basic_config, + "feature": feature, + "text_classification_config": config, + } ) - print('Label_text operation name: {}'.format(response.operation.name)) + print("Label_text operation name: {}".format(response.operation.name)) return response + + # [END datalabeling_label_text_beta] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument( - '--dataset-resource-name', - help='Dataset resource name. Required.', - required=True + "--dataset-resource-name", + help="Dataset resource name. Required.", + required=True, ) parser.add_argument( - '--instruction-resource-name', - help='Instruction resource name. Required.', - required=True + "--instruction-resource-name", + help="Instruction resource name. Required.", + required=True, ) parser.add_argument( - '--annotation-spec-set-resource-name', - help='Annotation spec set resource name. Required.', - required=True + "--annotation-spec-set-resource-name", + help="Annotation spec set resource name. Required.", + required=True, ) args = parser.parse_args() @@ -88,5 +93,5 @@ def label_text(dataset_resource_name, instruction_resource_name, label_text( args.dataset_resource_name, args.instruction_resource_name, - args.annotation_spec_set_resource_name + args.annotation_spec_set_resource_name, ) diff --git a/samples/snippets/label_text_test.py b/samples/snippets/label_text_test.py index c90024b..c34b028 100644 --- a/samples/snippets/label_text_test.py +++ b/samples/snippets/label_text_test.py @@ -18,23 +18,23 @@ import backoff from google.api_core.exceptions import ServerError +from google.cloud import datalabeling import pytest import label_text import testing_lib -PROJECT_ID = os.getenv('GOOGLE_CLOUD_PROJECT') -INPUT_GCS_URI = 'gs://cloud-samples-data/datalabeling/text/input.csv' -INSTRUCTION_GCS_URI = ('gs://cloud-samples-data/datalabeling' - '/instruction/test.pdf') +PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") +INPUT_GCS_URI = "gs://cloud-samples-data/datalabeling/text/input.csv" +INSTRUCTION_GCS_URI = "gs://cloud-samples-data/datalabeling" "/instruction/test.pdf" -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def dataset(): # create a temporary dataset dataset = testing_lib.create_dataset(PROJECT_ID) - testing_lib.import_data(dataset.name, 'TEXT', INPUT_GCS_URI) + testing_lib.import_data(dataset.name, "TEXT", INPUT_GCS_URI) yield dataset @@ -42,7 +42,7 @@ def dataset(): testing_lib.delete_dataset(dataset.name) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def annotation_spec_set(): # create a temporary annotation_spec_set response = testing_lib.create_annotation_spec_set(PROJECT_ID) @@ -52,11 +52,12 @@ def annotation_spec_set(): testing_lib.delete_annotation_spec_set(response.name) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def instruction(): # create a temporary instruction instruction = testing_lib.create_instruction( - PROJECT_ID, 'IMAGE', INSTRUCTION_GCS_URI) + PROJECT_ID, datalabeling.DataType.IMAGE, INSTRUCTION_GCS_URI + ) yield instruction @@ -64,7 +65,7 @@ def instruction(): testing_lib.delete_instruction(instruction.name) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def cleaner(): resource_names = [] @@ -78,19 +79,20 @@ def cleaner(): # to be deleted before the annotation_spec_set can be deleted. @pytest.mark.skip("Constantly failing") def test_label_text(capsys, annotation_spec_set, instruction, dataset, cleaner): - @backoff.on_exception( - backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE) + backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE + ) def run_sample(): # Start labeling. return label_text.label_text( - dataset.name, instruction.name, annotation_spec_set.name) + dataset.name, instruction.name, annotation_spec_set.name + ) response = run_sample() cleaner.append(response.operation.name) out, _ = capsys.readouterr() - assert 'Label_text operation name: ' in out + assert "Label_text operation name: " in out # Cancels the labeling operation. response.cancel() diff --git a/samples/snippets/label_video.py b/samples/snippets/label_video.py index a3425b4..09c46bb 100644 --- a/samples/snippets/label_video.py +++ b/samples/snippets/label_video.py @@ -21,66 +21,71 @@ # [START datalabeling_label_video_beta] -def label_video(dataset_resource_name, instruction_resource_name, - annotation_spec_set_resource_name): +def label_video( + dataset_resource_name, instruction_resource_name, annotation_spec_set_resource_name +): """Labels a video dataset.""" from google.cloud import datalabeling_v1beta1 as datalabeling + client = datalabeling.DataLabelingServiceClient() # [END datalabeling_label_video_beta] # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) # [START datalabeling_label_video_beta] - basic_config = datalabeling.types.HumanAnnotationConfig( + basic_config = datalabeling.HumanAnnotationConfig( instruction=instruction_resource_name, - annotated_dataset_display_name='YOUR_ANNOTATED_DATASET_DISPLAY_NAME', - label_group='YOUR_LABEL_GROUP', - replica_count=1 + annotated_dataset_display_name="YOUR_ANNOTATED_DATASET_DISPLAY_NAME", + label_group="YOUR_LABEL_GROUP", + replica_count=1, ) - feature = datalabeling.enums.LabelVideoRequest.Feature.OBJECT_TRACKING + feature = datalabeling.LabelVideoRequest.Feature.OBJECT_TRACKING - config = datalabeling.types.ObjectTrackingConfig( + config = datalabeling.ObjectTrackingConfig( annotation_spec_set=annotation_spec_set_resource_name ) response = client.label_video( - dataset_resource_name, - basic_config, - feature, - object_tracking_config=config + request={ + "parent": dataset_resource_name, + "basic_config": basic_config, + "feature": feature, + "object_tracking_config": config, + } ) - print('Label_video operation name: {}'.format(response.operation.name)) + print("Label_video operation name: {}".format(response.operation.name)) return response + + # [END datalabeling_label_video_beta] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) parser.add_argument( - '--dataset-resource-name', - help='Dataset resource name. Required.', - required=True + "--dataset-resource-name", + help="Dataset resource name. Required.", + required=True, ) parser.add_argument( - '--instruction-resource-name', - help='Instruction resource name. Required.', - required=True + "--instruction-resource-name", + help="Instruction resource name. Required.", + required=True, ) parser.add_argument( - '--annotation-spec-set-resource-name', - help='Annotation spec set resource name. Required.', - required=True + "--annotation-spec-set-resource-name", + help="Annotation spec set resource name. Required.", + required=True, ) args = parser.parse_args() @@ -88,5 +93,5 @@ def label_video(dataset_resource_name, instruction_resource_name, label_video( args.dataset_resource_name, args.instruction_resource_name, - args.annotation_spec_set_resource_name + args.annotation_spec_set_resource_name, ) diff --git a/samples/snippets/label_video_test.py b/samples/snippets/label_video_test.py index 05d3c5a..02fd531 100644 --- a/samples/snippets/label_video_test.py +++ b/samples/snippets/label_video_test.py @@ -18,23 +18,23 @@ import backoff from google.api_core.exceptions import ServerError +from google.cloud import datalabeling import pytest import label_video import testing_lib -PROJECT_ID = os.getenv('GOOGLE_CLOUD_PROJECT') -INPUT_GCS_URI = 'gs://cloud-samples-data/datalabeling/videos/video_dataset.csv' -INSTRUCTION_GCS_URI = ('gs://cloud-samples-data/datalabeling' - '/instruction/test.pdf') +PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") +INPUT_GCS_URI = "gs://cloud-samples-data/datalabeling/videos/video_dataset.csv" +INSTRUCTION_GCS_URI = "gs://cloud-samples-data/datalabeling" "/instruction/test.pdf" -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def dataset(): # create a temporary dataset dataset = testing_lib.create_dataset(PROJECT_ID) - testing_lib.import_data(dataset.name, 'VIDEO', INPUT_GCS_URI) + testing_lib.import_data(dataset.name, datalabeling.DataType.VIDEO, INPUT_GCS_URI) yield dataset @@ -42,7 +42,7 @@ def dataset(): testing_lib.delete_dataset(dataset.name) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def annotation_spec_set(): # create a temporary annotation_spec_set response = testing_lib.create_annotation_spec_set(PROJECT_ID) @@ -52,11 +52,12 @@ def annotation_spec_set(): testing_lib.delete_annotation_spec_set(response.name) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def instruction(): # create a temporary instruction instruction = testing_lib.create_instruction( - PROJECT_ID, 'VIDEO', INSTRUCTION_GCS_URI) + PROJECT_ID, datalabeling.DataType.VIDEO, INSTRUCTION_GCS_URI + ) yield instruction @@ -64,7 +65,7 @@ def instruction(): testing_lib.delete_instruction(instruction.name) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def cleaner(): resource_names = [] @@ -76,21 +77,22 @@ def cleaner(): # Passing in dataset as the last argument in test_label_image since it needs # to be deleted before the annotation_spec_set can be deleted. -def test_label_video( - capsys, annotation_spec_set, instruction, dataset, cleaner): - +@pytest.mark.skip(reason="currently unavailable") +def test_label_video(capsys, annotation_spec_set, instruction, dataset, cleaner): @backoff.on_exception( - backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE) + backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE + ) def run_sample(): # Start labeling. return label_video.label_video( - dataset.name, instruction.name, annotation_spec_set.name) + dataset.name, instruction.name, annotation_spec_set.name + ) response = run_sample() cleaner.append(response.operation.name) out, _ = capsys.readouterr() - assert 'Label_video operation name: ' in out + assert "Label_video operation name: " in out # Cancels the labeling operation. response.cancel() diff --git a/samples/snippets/manage_dataset.py b/samples/snippets/manage_dataset.py index a100bf4..2bc3730 100644 --- a/samples/snippets/manage_dataset.py +++ b/samples/snippets/manage_dataset.py @@ -24,34 +24,38 @@ def create_dataset(project_id): """Creates a dataset for the given Google Cloud project.""" from google.cloud import datalabeling_v1beta1 as datalabeling + client = datalabeling.DataLabelingServiceClient() # [END datalabeling_create_dataset_beta] # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) # [START datalabeling_create_dataset_beta] - formatted_project_name = client.project_path(project_id) + formatted_project_name = f"projects/{project_id}" - dataset = datalabeling.types.Dataset( - display_name='YOUR_DATASET_SET_DISPLAY_NAME', - description='YOUR_DESCRIPTION' + dataset = datalabeling.Dataset( + display_name="YOUR_DATASET_SET_DISPLAY_NAME", description="YOUR_DESCRIPTION" ) - response = client.create_dataset(formatted_project_name, dataset) + response = client.create_dataset( + request={"parent": formatted_project_name, "dataset": dataset} + ) # The format of resource name: # project_id/{project_id}/datasets/{dataset_id} - print('The dataset resource name: {}'.format(response.name)) - print('Display name: {}'.format(response.display_name)) - print('Description: {}'.format(response.description)) - print('Create time:') - print('\tseconds: {}'.format(response.create_time.seconds)) - print('\tnanos: {}\n'.format(response.create_time.nanos)) + print("The dataset resource name: {}".format(response.name)) + print("Display name: {}".format(response.display_name)) + print("Description: {}".format(response.description)) + print("Create time:") + print("\tseconds: {}".format(response.create_time.timestamp_pb().seconds)) + print("\tnanos: {}\n".format(response.create_time.timestamp_pb().nanos)) return response + + # [END datalabeling_create_dataset_beta] @@ -59,27 +63,30 @@ def create_dataset(project_id): def list_datasets(project_id): """Lists datasets for the given Google Cloud project.""" from google.cloud import datalabeling_v1beta1 as datalabeling + client = datalabeling.DataLabelingServiceClient() # [END datalabeling_list_datasets_beta] # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) # [START datalabeling_list_datasets_beta] - formatted_project_name = client.project_path(project_id) + formatted_project_name = f"projects/{project_id}" - response = client.list_datasets(formatted_project_name) + response = client.list_datasets(request={"parent": formatted_project_name}) for element in response: # The format of resource name: # project_id/{project_id}/datasets/{dataset_id} - print('The dataset resource name: {}\n'.format(element.name)) - print('Display name: {}'.format(element.display_name)) - print('Description: {}'.format(element.description)) - print('Create time:') - print('\tseconds: {}'.format(element.create_time.seconds)) - print('\tnanos: {}'.format(element.create_time.nanos)) + print("The dataset resource name: {}\n".format(element.name)) + print("Display name: {}".format(element.display_name)) + print("Description: {}".format(element.description)) + print("Create time:") + print("\tseconds: {}".format(element.create_time.timestamp_pb().seconds)) + print("\tnanos: {}".format(element.create_time.timestamp_pb().nanos)) + + # [END datalabeling_list_datasets_beta] @@ -87,23 +94,26 @@ def list_datasets(project_id): def get_dataset(dataset_resource_name): """Gets a dataset for the given Google Cloud project.""" from google.cloud import datalabeling_v1beta1 as datalabeling + client = datalabeling.DataLabelingServiceClient() # [END datalabeling_get_dataset_beta] # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) # [START datalabeling_get_dataset_beta] - response = client.get_dataset(dataset_resource_name) + response = client.get_dataset(request={"name": dataset_resource_name}) + + print("The dataset resource name: {}\n".format(response.name)) + print("Display name: {}".format(response.display_name)) + print("Description: {}".format(response.description)) + print("Create time:") + print("\tseconds: {}".format(response.create_time.timestamp_pb().seconds)) + print("\tnanos: {}".format(response.create_time.timestamp_pb().nanos)) + - print('The dataset resource name: {}\n'.format(response.name)) - print('Display name: {}'.format(response.display_name)) - print('Description: {}'.format(response.description)) - print('Create time:') - print('\tseconds: {}'.format(response.create_time.seconds)) - print('\tnanos: {}'.format(response.create_time.nanos)) # [END datalabeling_get_dataset_beta] @@ -111,67 +121,66 @@ def get_dataset(dataset_resource_name): def delete_dataset(dataset_resource_name): """Deletes a dataset for the given Google Cloud project.""" from google.cloud import datalabeling_v1beta1 as datalabeling + client = datalabeling.DataLabelingServiceClient() # [END datalabeling_delete_dataset_beta] # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) # [START datalabeling_delete_dataset_beta] - response = client.delete_dataset(dataset_resource_name) + response = client.delete_dataset(request={"name": dataset_resource_name}) + + print("Dataset deleted. {}\n".format(response)) + - print('Dataset deleted. {}\n'.format(response)) # [END datalabeling_delete_dataset_beta] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) - subparsers = parser.add_subparsers(dest='command') + subparsers = parser.add_subparsers(dest="command") - create_parser = subparsers.add_parser( - 'create', help='Create a new dataset.') + create_parser = subparsers.add_parser("create", help="Create a new dataset.") create_parser.add_argument( - '--project-id', - help='Project ID. Required.', - required=True + "--project-id", help="Project ID. Required.", required=True ) - list_parser = subparsers.add_parser('list', help='List all datasets.') + list_parser = subparsers.add_parser("list", help="List all datasets.") list_parser.add_argument( - '--project-id', - help='Project ID. Required.', - required=True + "--project-id", help="Project ID. Required.", required=True ) get_parser = subparsers.add_parser( - 'get', help='Get a dataset by the dataset resource name.') + "get", help="Get a dataset by the dataset resource name." + ) get_parser.add_argument( - '--dataset-resource-name', - help='The dataset resource name. Used in the get or delete operation.', - required=True + "--dataset-resource-name", + help="The dataset resource name. Used in the get or delete operation.", + required=True, ) delete_parser = subparsers.add_parser( - 'delete', help='Delete a dataset by the dataset resource name.') + "delete", help="Delete a dataset by the dataset resource name." + ) delete_parser.add_argument( - '--dataset-resource-name', - help='The dataset resource name. Used in the get or delete operation.', - required=True + "--dataset-resource-name", + help="The dataset resource name. Used in the get or delete operation.", + required=True, ) args = parser.parse_args() - if args.command == 'create': + if args.command == "create": create_dataset(args.project_id) - elif args.command == 'list': + elif args.command == "list": list_datasets(args.project_id) - elif args.command == 'get': + elif args.command == "get": get_dataset(args.dataset_resource_name) - elif args.command == 'delete': + elif args.command == "delete": delete_dataset(args.dataset_resource_name) diff --git a/samples/snippets/manage_dataset_test.py b/samples/snippets/manage_dataset_test.py index b6abc25..3ecc175 100644 --- a/samples/snippets/manage_dataset_test.py +++ b/samples/snippets/manage_dataset_test.py @@ -28,7 +28,7 @@ PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def dataset(): # create a temporary dataset dataset = testing_lib.create_dataset(PROJECT_ID) @@ -39,7 +39,7 @@ def dataset(): testing_lib.delete_dataset(dataset.name) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def cleaner(): # First delete old datasets. try: @@ -58,9 +58,9 @@ def cleaner(): def test_create_dataset(cleaner, capsys): - @backoff.on_exception( - backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE) + backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE + ) def run_sample(): return manage_dataset.create_dataset(PROJECT_ID) @@ -73,9 +73,9 @@ def run_sample(): @pytest.mark.skip("Constantly failing") def test_list_dataset(capsys, dataset): - @backoff.on_exception( - backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE) + backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE + ) def run_sample(): manage_dataset.list_datasets(PROJECT_ID) @@ -86,7 +86,8 @@ def run_sample(): def test_get_dataset(capsys, dataset): @backoff.on_exception( - backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE) + backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE + ) def run_sample(): manage_dataset.get_dataset(dataset.name) @@ -97,7 +98,8 @@ def run_sample(): def test_delete_dataset(capsys, dataset): @backoff.on_exception( - backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE) + backoff.expo, ServerError, max_time=testing_lib.RETRY_DEADLINE + ) def run_sample(): manage_dataset.delete_dataset(dataset.name) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index ba55d7c..5660f08 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -37,24 +37,22 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": ["2.7"], # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -69,12 +67,12 @@ def get_pytest_env_vars(): ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -83,7 +81,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -138,7 +136,7 @@ def lint(session): args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) @@ -182,9 +180,9 @@ def py(session): if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # diff --git a/samples/snippets/testing_lib.py b/samples/snippets/testing_lib.py index c9674a9..7c39d82 100644 --- a/samples/snippets/testing_lib.py +++ b/samples/snippets/testing_lib.py @@ -33,8 +33,8 @@ def create_client(): # If provided, use a provided test endpoint - this will prevent tests on # this snippet from triggering any action by a real human - if 'DATALABELING_ENDPOINT' in os.environ: - opts = ClientOptions(api_endpoint=os.getenv('DATALABELING_ENDPOINT')) + if "DATALABELING_ENDPOINT" in os.environ: + opts = ClientOptions(api_endpoint=os.getenv("DATALABELING_ENDPOINT")) client = datalabeling.DataLabelingServiceClient(client_options=opts) else: client = datalabeling.DataLabelingServiceClient() @@ -53,13 +53,13 @@ def delete_dataset(name): def delete_old_datasets(project_id): client = create_client() - formatted_project_name = client.project_path(project_id) + formatted_project_name = f"projects/{project_id}" - response = client.list_datasets(formatted_project_name) + response = client.list_datasets(request={"parent": formatted_project_name}) # It will delete datasets created more than 2 hours ago cutoff_time = time.time() - 7200 for element in response: - if element.create_time.seconds < cutoff_time: + if element.create_time.timestamp_pb().seconds < cutoff_time: print("Deleting {}".format(element.name)) try: dataset_sample.delete_dataset(element.name) @@ -80,7 +80,7 @@ def create_annotation_spec_set(project_id): @backoff.on_exception(backoff.expo, DeadlineExceeded, max_time=RETRY_DEADLINE) def delete_annotation_spec_set(name): client = create_client() - client.delete_annotation_spec_set(name) + client.delete_annotation_spec_set(request={"name": name}) @backoff.on_exception(backoff.expo, DeadlineExceeded, max_time=RETRY_DEADLINE) @@ -91,13 +91,13 @@ def create_instruction(project_id, data_type, gcs_uri): @backoff.on_exception(backoff.expo, DeadlineExceeded, max_time=RETRY_DEADLINE) def delete_instruction(name): client = create_client() - client.delete_instruction(name) + client.delete_instruction(request={"name": name}) @backoff.on_exception(backoff.expo, DeadlineExceeded, max_time=RETRY_DEADLINE) def cancel_operation(name): client = create_client() - client.transport._operations_client.cancel_operation(name) + client._transport.operations_client.cancel_operation(name) @backoff.on_exception(backoff.expo, DeadlineExceeded, max_time=RETRY_DEADLINE) diff --git a/scripts/fixup_datalabeling_v1beta1_keywords.py b/scripts/fixup_datalabeling_v1beta1_keywords.py new file mode 100644 index 0000000..9a49fac --- /dev/null +++ b/scripts/fixup_datalabeling_v1beta1_keywords.py @@ -0,0 +1,211 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class datalabelingCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_annotation_spec_set': ('parent', 'annotation_spec_set', ), + 'create_dataset': ('parent', 'dataset', ), + 'create_evaluation_job': ('parent', 'job', ), + 'create_instruction': ('parent', 'instruction', ), + 'delete_annotated_dataset': ('name', ), + 'delete_annotation_spec_set': ('name', ), + 'delete_dataset': ('name', ), + 'delete_evaluation_job': ('name', ), + 'delete_instruction': ('name', ), + 'export_data': ('name', 'annotated_dataset', 'output_config', 'filter', 'user_email_address', ), + 'get_annotated_dataset': ('name', ), + 'get_annotation_spec_set': ('name', ), + 'get_data_item': ('name', ), + 'get_dataset': ('name', ), + 'get_evaluation': ('name', ), + 'get_evaluation_job': ('name', ), + 'get_example': ('name', 'filter', ), + 'get_instruction': ('name', ), + 'import_data': ('name', 'input_config', 'user_email_address', ), + 'label_image': ('parent', 'basic_config', 'feature', 'image_classification_config', 'bounding_poly_config', 'polyline_config', 'segmentation_config', ), + 'label_text': ('parent', 'basic_config', 'feature', 'text_classification_config', 'text_entity_extraction_config', ), + 'label_video': ('parent', 'basic_config', 'feature', 'video_classification_config', 'object_detection_config', 'object_tracking_config', 'event_config', ), + 'list_annotated_datasets': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_annotation_spec_sets': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_data_items': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_datasets': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_evaluation_jobs': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_examples': ('parent', 'filter', 'page_size', 'page_token', ), + 'list_instructions': ('parent', 'filter', 'page_size', 'page_token', ), + 'pause_evaluation_job': ('name', ), + 'resume_evaluation_job': ('name', ), + 'search_evaluations': ('parent', 'filter', 'page_size', 'page_token', ), + 'search_example_comparisons': ('parent', 'page_size', 'page_token', ), + 'update_evaluation_job': ('evaluation_job', 'update_mask', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=datalabelingCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the datalabeling client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/setup.py b/setup.py index c260056..6d9515a 100644 --- a/setup.py +++ b/setup.py @@ -24,8 +24,9 @@ version = "0.4.1" release_status = "Development Status :: 4 - Beta" dependencies = [ - "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - 'enum34; python_version < "3.4"', + "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "libcst >= 0.2.5", + "proto-plus >= 1.4.0", ] package_root = os.path.abspath(os.path.dirname(__file__)) @@ -35,7 +36,9 @@ readme = readme_file.read() packages = [ - package for package in setuptools.find_packages() if package.startswith("google") + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") ] namespaces = ["google"] @@ -56,17 +59,17 @@ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", "Topic :: Internet", ], platforms="Posix; MacOS X; Windows", packages=packages, + python_requires=">=3.6", + scripts=["scripts/fixup_datalabeling_v1beta1_keywords.py"], namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, diff --git a/synth.metadata b/synth.metadata index 5431552..746af20 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,21 +4,21 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-datalabeling.git", - "sha": "aa2ef07908ba9585fc93e263d4eb169cd171e9e9" + "sha": "448682cf313c58ff9c61303d13d72a04b3dc34af" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5f2f711c91199ba2f609d3f06a2fe22aee4e5be3" + "sha": "bd0deaa1113b588d70449535ab9cbf0f2bd0e72f" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5f2f711c91199ba2f609d3f06a2fe22aee4e5be3" + "sha": "bd0deaa1113b588d70449535ab9cbf0f2bd0e72f" } } ], diff --git a/synth.py b/synth.py index 4364145..346e602 100644 --- a/synth.py +++ b/synth.py @@ -30,53 +30,7 @@ include_protos=True, ) -s.move( - library, - excludes=[ - 'docs/conf.py', - 'docs/index.rst', - 'google/cloud/datalabeling_v1beta1/__init__.py', - 'README.rst', - 'nox*.py', - 'setup.py', - 'setup.cfg', - ], -) - -# Fixup issues in generated code -s.replace( - "./**/gapic/**/*client.py", - r"operations_pb2.ImportDataOperationResponse", - "proto_operations_pb2.ImportDataOperationResponse", -) - -s.replace( - "google/**/*client.py", - r"=operations_pb2", - "=proto_operations_pb2", -) - -s.replace( - "tests/**/test*_client*.py", - r"operations_pb2.Operation\(", - "longrunning_operations_pb2.Operation(", -) - -# Fix docstrings with no summary line -s.replace( - "google/cloud/**/proto/*_pb2.py", - '''['"]__doc__['"]: """Attributes:''', - '''"__doc__": """ - Attributes:''', -) - -# Escape '_' at the end of the line in pb2 docstrings -s.replace( -"google/cloud/**/*_pb2.py", -"""\_$""", -"""\_""", -) - +s.move(library, excludes=["docs/index.rst", "nox.py", "README.rst", "setup.py"]) # TODO(busunkim): Use latest sphinx after microgenerator transition s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"') @@ -84,8 +38,12 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=79, samples=True) -s.move(templated_files) +templated_files = common.py_library( + samples=True, # set to True only if there are samples + microgenerator=True, +) +s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file + # ---------------------------------------------------------------------------- # Samples templates diff --git a/tests/unit/gapic/datalabeling_v1beta1/__init__.py b/tests/unit/gapic/datalabeling_v1beta1/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/unit/gapic/datalabeling_v1beta1/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py b/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py new file mode 100644 index 0000000..7d9aada --- /dev/null +++ b/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py @@ -0,0 +1,9667 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async +from google.api_core import operations_v1 +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.datalabeling_v1beta1.services.data_labeling_service import ( + DataLabelingServiceAsyncClient, +) +from google.cloud.datalabeling_v1beta1.services.data_labeling_service import ( + DataLabelingServiceClient, +) +from google.cloud.datalabeling_v1beta1.services.data_labeling_service import pagers +from google.cloud.datalabeling_v1beta1.services.data_labeling_service import transports +from google.cloud.datalabeling_v1beta1.types import annotation +from google.cloud.datalabeling_v1beta1.types import annotation_spec_set +from google.cloud.datalabeling_v1beta1.types import ( + annotation_spec_set as gcd_annotation_spec_set, +) +from google.cloud.datalabeling_v1beta1.types import data_labeling_service +from google.cloud.datalabeling_v1beta1.types import data_payloads +from google.cloud.datalabeling_v1beta1.types import dataset +from google.cloud.datalabeling_v1beta1.types import dataset as gcd_dataset +from google.cloud.datalabeling_v1beta1.types import evaluation +from google.cloud.datalabeling_v1beta1.types import evaluation_job +from google.cloud.datalabeling_v1beta1.types import evaluation_job as gcd_evaluation_job +from google.cloud.datalabeling_v1beta1.types import human_annotation_config +from google.cloud.datalabeling_v1beta1.types import instruction +from google.cloud.datalabeling_v1beta1.types import instruction as gcd_instruction +from google.cloud.datalabeling_v1beta1.types import operations +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import any_pb2 as any # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataLabelingServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DataLabelingServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DataLabelingServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DataLabelingServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataLabelingServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataLabelingServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [DataLabelingServiceClient, DataLabelingServiceAsyncClient] +) +def test_data_labeling_service_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "datalabeling.googleapis.com:443" + + +def test_data_labeling_service_client_get_transport_class(): + transport = DataLabelingServiceClient.get_transport_class() + assert transport == transports.DataLabelingServiceGrpcTransport + + transport = DataLabelingServiceClient.get_transport_class("grpc") + assert transport == transports.DataLabelingServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DataLabelingServiceClient, + transports.DataLabelingServiceGrpcTransport, + "grpc", + ), + ( + DataLabelingServiceAsyncClient, + transports.DataLabelingServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + DataLabelingServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataLabelingServiceClient), +) +@mock.patch.object( + DataLabelingServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataLabelingServiceAsyncClient), +) +def test_data_labeling_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataLabelingServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataLabelingServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DataLabelingServiceClient, + transports.DataLabelingServiceGrpcTransport, + "grpc", + ), + ( + DataLabelingServiceAsyncClient, + transports.DataLabelingServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_data_labeling_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DataLabelingServiceClient, + transports.DataLabelingServiceGrpcTransport, + "grpc", + ), + ( + DataLabelingServiceAsyncClient, + transports.DataLabelingServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_data_labeling_service_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + +def test_data_labeling_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.datalabeling_v1beta1.services.data_labeling_service.transports.DataLabelingServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DataLabelingServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + ) + + +def test_create_dataset( + transport: str = "grpc", request_type=data_labeling_service.CreateDatasetRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + blocking_resources=["blocking_resources_value"], + data_item_count=1584, + ) + + response = client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.CreateDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_dataset.Dataset) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.blocking_resources == ["blocking_resources_value"] + + assert response.data_item_count == 1584 + + +def test_create_dataset_from_dict(): + test_create_dataset(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_dataset_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.CreateDatasetRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + blocking_resources=["blocking_resources_value"], + data_item_count=1584, + ) + ) + + response = await client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_dataset.Dataset) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.blocking_resources == ["blocking_resources_value"] + + assert response.data_item_count == 1584 + + +def test_create_dataset_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.CreateDatasetRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_dataset), "__call__") as call: + call.return_value = gcd_dataset.Dataset() + + client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_dataset_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.CreateDatasetRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_dataset), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_dataset.Dataset()) + + await client.create_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_dataset_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_dataset.Dataset() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dataset( + parent="parent_value", dataset=gcd_dataset.Dataset(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].dataset == gcd_dataset.Dataset(name="name_value") + + +def test_create_dataset_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dataset( + data_labeling_service.CreateDatasetRequest(), + parent="parent_value", + dataset=gcd_dataset.Dataset(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_dataset_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_dataset.Dataset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcd_dataset.Dataset()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dataset( + parent="parent_value", dataset=gcd_dataset.Dataset(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].dataset == gcd_dataset.Dataset(name="name_value") + + +@pytest.mark.asyncio +async def test_create_dataset_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dataset( + data_labeling_service.CreateDatasetRequest(), + parent="parent_value", + dataset=gcd_dataset.Dataset(name="name_value"), + ) + + +def test_get_dataset( + transport: str = "grpc", request_type=data_labeling_service.GetDatasetRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + blocking_resources=["blocking_resources_value"], + data_item_count=1584, + ) + + response = client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.GetDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.Dataset) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.blocking_resources == ["blocking_resources_value"] + + assert response.data_item_count == 1584 + + +def test_get_dataset_from_dict(): + test_get_dataset(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_dataset_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.GetDatasetRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dataset.Dataset( + name="name_value", + display_name="display_name_value", + description="description_value", + blocking_resources=["blocking_resources_value"], + data_item_count=1584, + ) + ) + + response = await client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.Dataset) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.blocking_resources == ["blocking_resources_value"] + + assert response.data_item_count == 1584 + + +def test_get_dataset_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetDatasetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_dataset), "__call__") as call: + call.return_value = dataset.Dataset() + + client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_dataset_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetDatasetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_dataset), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) + + await client.get_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_dataset_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Dataset() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dataset(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_dataset_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dataset( + data_labeling_service.GetDatasetRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_dataset_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Dataset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dataset(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_dataset_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dataset( + data_labeling_service.GetDatasetRequest(), name="name_value", + ) + + +def test_list_datasets( + transport: str = "grpc", request_type=data_labeling_service.ListDatasetsRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_datasets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListDatasetsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.ListDatasetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_datasets_from_dict(): + test_list_datasets(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_datasets_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.ListDatasetsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListDatasetsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDatasetsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_datasets_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListDatasetsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_datasets), "__call__") as call: + call.return_value = data_labeling_service.ListDatasetsResponse() + + client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_datasets_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListDatasetsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_datasets), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListDatasetsResponse() + ) + + await client.list_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_datasets_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_datasets), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListDatasetsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_datasets( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +def test_list_datasets_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_datasets( + data_labeling_service.ListDatasetsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_datasets_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListDatasetsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListDatasetsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_datasets( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_list_datasets_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_datasets( + data_labeling_service.ListDatasetsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +def test_list_datasets_pager(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_datasets), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], + next_page_token="abc", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[], next_page_token="def", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(),], next_page_token="ghi", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(), dataset.Dataset(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_datasets(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dataset.Dataset) for i in results) + + +def test_list_datasets_pages(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_datasets), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], + next_page_token="abc", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[], next_page_token="def", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(),], next_page_token="ghi", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(), dataset.Dataset(),], + ), + RuntimeError, + ) + pages = list(client.list_datasets(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_datasets_async_pager(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_datasets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], + next_page_token="abc", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[], next_page_token="def", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(),], next_page_token="ghi", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(), dataset.Dataset(),], + ), + RuntimeError, + ) + async_pager = await client.list_datasets(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dataset.Dataset) for i in responses) + + +@pytest.mark.asyncio +async def test_list_datasets_async_pages(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_datasets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(), dataset.Dataset(), dataset.Dataset(),], + next_page_token="abc", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[], next_page_token="def", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(),], next_page_token="ghi", + ), + data_labeling_service.ListDatasetsResponse( + datasets=[dataset.Dataset(), dataset.Dataset(),], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_datasets(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_delete_dataset( + transport: str = "grpc", request_type=data_labeling_service.DeleteDatasetRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.DeleteDatasetRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dataset_from_dict(): + test_delete_dataset(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_dataset_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.DeleteDatasetRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dataset_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.DeleteDatasetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_dataset), "__call__") as call: + call.return_value = None + + client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_dataset_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.DeleteDatasetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_dataset), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_dataset_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_dataset), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dataset(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_dataset_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dataset( + data_labeling_service.DeleteDatasetRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_dataset_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dataset(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_dataset_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dataset( + data_labeling_service.DeleteDatasetRequest(), name="name_value", + ) + + +def test_import_data( + transport: str = "grpc", request_type=data_labeling_service.ImportDataRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.ImportDataRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_data_from_dict(): + test_import_data(request_type=dict) + + +@pytest.mark.asyncio +async def test_import_data_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.ImportDataRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.import_data), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_data_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ImportDataRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.import_data), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_data_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ImportDataRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.import_data), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.import_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_import_data_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.import_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.import_data( + name="name_value", + input_config=dataset.InputConfig( + text_metadata=dataset.TextMetadata(language_code="language_code_value") + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].input_config == dataset.InputConfig( + text_metadata=dataset.TextMetadata(language_code="language_code_value") + ) + + +def test_import_data_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_data( + data_labeling_service.ImportDataRequest(), + name="name_value", + input_config=dataset.InputConfig( + text_metadata=dataset.TextMetadata(language_code="language_code_value") + ), + ) + + +@pytest.mark.asyncio +async def test_import_data_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.import_data), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.import_data( + name="name_value", + input_config=dataset.InputConfig( + text_metadata=dataset.TextMetadata(language_code="language_code_value") + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].input_config == dataset.InputConfig( + text_metadata=dataset.TextMetadata(language_code="language_code_value") + ) + + +@pytest.mark.asyncio +async def test_import_data_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.import_data( + data_labeling_service.ImportDataRequest(), + name="name_value", + input_config=dataset.InputConfig( + text_metadata=dataset.TextMetadata(language_code="language_code_value") + ), + ) + + +def test_export_data( + transport: str = "grpc", request_type=data_labeling_service.ExportDataRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.ExportDataRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_data_from_dict(): + test_export_data(request_type=dict) + + +@pytest.mark.asyncio +async def test_export_data_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.ExportDataRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.export_data), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_data_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ExportDataRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.export_data), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_data_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ExportDataRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.export_data), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.export_data(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_export_data_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.export_data), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.export_data( + name="name_value", + annotated_dataset="annotated_dataset_value", + filter="filter_value", + output_config=dataset.OutputConfig( + gcs_destination=dataset.GcsDestination(output_uri="output_uri_value") + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].annotated_dataset == "annotated_dataset_value" + + assert args[0].filter == "filter_value" + + assert args[0].output_config == dataset.OutputConfig( + gcs_destination=dataset.GcsDestination(output_uri="output_uri_value") + ) + + +def test_export_data_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_data( + data_labeling_service.ExportDataRequest(), + name="name_value", + annotated_dataset="annotated_dataset_value", + filter="filter_value", + output_config=dataset.OutputConfig( + gcs_destination=dataset.GcsDestination(output_uri="output_uri_value") + ), + ) + + +@pytest.mark.asyncio +async def test_export_data_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.export_data), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.export_data( + name="name_value", + annotated_dataset="annotated_dataset_value", + filter="filter_value", + output_config=dataset.OutputConfig( + gcs_destination=dataset.GcsDestination(output_uri="output_uri_value") + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].annotated_dataset == "annotated_dataset_value" + + assert args[0].filter == "filter_value" + + assert args[0].output_config == dataset.OutputConfig( + gcs_destination=dataset.GcsDestination(output_uri="output_uri_value") + ) + + +@pytest.mark.asyncio +async def test_export_data_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.export_data( + data_labeling_service.ExportDataRequest(), + name="name_value", + annotated_dataset="annotated_dataset_value", + filter="filter_value", + output_config=dataset.OutputConfig( + gcs_destination=dataset.GcsDestination(output_uri="output_uri_value") + ), + ) + + +def test_get_data_item( + transport: str = "grpc", request_type=data_labeling_service.GetDataItemRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_data_item), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.DataItem( + name="name_value", + image_payload=data_payloads.ImagePayload(mime_type="mime_type_value"), + ) + + response = client.get_data_item(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.GetDataItemRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.DataItem) + + assert response.name == "name_value" + + +def test_get_data_item_from_dict(): + test_get_data_item(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_data_item_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.GetDataItemRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_data_item), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dataset.DataItem(name="name_value",) + ) + + response = await client.get_data_item(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.DataItem) + + assert response.name == "name_value" + + +def test_get_data_item_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetDataItemRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_data_item), "__call__") as call: + call.return_value = dataset.DataItem() + + client.get_data_item(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_item_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetDataItemRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_data_item), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.DataItem()) + + await client.get_data_item(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_data_item_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_data_item), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.DataItem() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_item(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_data_item_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_item( + data_labeling_service.GetDataItemRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_item_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_data_item), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.DataItem() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.DataItem()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_item(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_data_item_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_item( + data_labeling_service.GetDataItemRequest(), name="name_value", + ) + + +def test_list_data_items( + transport: str = "grpc", request_type=data_labeling_service.ListDataItemsRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_data_items), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListDataItemsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_data_items(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.ListDataItemsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataItemsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_items_from_dict(): + test_list_data_items(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_data_items_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.ListDataItemsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_data_items), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListDataItemsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_data_items(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataItemsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_items_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListDataItemsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_data_items), "__call__") as call: + call.return_value = data_labeling_service.ListDataItemsResponse() + + client.list_data_items(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_items_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListDataItemsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_data_items), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListDataItemsResponse() + ) + + await client.list_data_items(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_data_items_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_data_items), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListDataItemsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_items( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +def test_list_data_items_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_items( + data_labeling_service.ListDataItemsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_items_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_data_items), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListDataItemsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListDataItemsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_items( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_list_data_items_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_items( + data_labeling_service.ListDataItemsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +def test_list_data_items_pager(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_data_items), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListDataItemsResponse( + data_items=[ + dataset.DataItem(), + dataset.DataItem(), + dataset.DataItem(), + ], + next_page_token="abc", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[], next_page_token="def", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[dataset.DataItem(),], next_page_token="ghi", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[dataset.DataItem(), dataset.DataItem(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_items(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dataset.DataItem) for i in results) + + +def test_list_data_items_pages(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_data_items), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListDataItemsResponse( + data_items=[ + dataset.DataItem(), + dataset.DataItem(), + dataset.DataItem(), + ], + next_page_token="abc", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[], next_page_token="def", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[dataset.DataItem(),], next_page_token="ghi", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[dataset.DataItem(), dataset.DataItem(),], + ), + RuntimeError, + ) + pages = list(client.list_data_items(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_items_async_pager(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_data_items), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListDataItemsResponse( + data_items=[ + dataset.DataItem(), + dataset.DataItem(), + dataset.DataItem(), + ], + next_page_token="abc", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[], next_page_token="def", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[dataset.DataItem(),], next_page_token="ghi", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[dataset.DataItem(), dataset.DataItem(),], + ), + RuntimeError, + ) + async_pager = await client.list_data_items(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dataset.DataItem) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_items_async_pages(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_data_items), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListDataItemsResponse( + data_items=[ + dataset.DataItem(), + dataset.DataItem(), + dataset.DataItem(), + ], + next_page_token="abc", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[], next_page_token="def", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[dataset.DataItem(),], next_page_token="ghi", + ), + data_labeling_service.ListDataItemsResponse( + data_items=[dataset.DataItem(), dataset.DataItem(),], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_data_items(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_get_annotated_dataset( + transport: str = "grpc", + request_type=data_labeling_service.GetAnnotatedDatasetRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_annotated_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.AnnotatedDataset( + name="name_value", + display_name="display_name_value", + description="description_value", + annotation_source=annotation.AnnotationSource.OPERATOR, + annotation_type=annotation.AnnotationType.IMAGE_CLASSIFICATION_ANNOTATION, + example_count=1396, + completed_example_count=2448, + blocking_resources=["blocking_resources_value"], + ) + + response = client.get_annotated_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.GetAnnotatedDatasetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.AnnotatedDataset) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.annotation_source == annotation.AnnotationSource.OPERATOR + + assert ( + response.annotation_type + == annotation.AnnotationType.IMAGE_CLASSIFICATION_ANNOTATION + ) + + assert response.example_count == 1396 + + assert response.completed_example_count == 2448 + + assert response.blocking_resources == ["blocking_resources_value"] + + +def test_get_annotated_dataset_from_dict(): + test_get_annotated_dataset(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_annotated_dataset_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.GetAnnotatedDatasetRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_annotated_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dataset.AnnotatedDataset( + name="name_value", + display_name="display_name_value", + description="description_value", + annotation_source=annotation.AnnotationSource.OPERATOR, + annotation_type=annotation.AnnotationType.IMAGE_CLASSIFICATION_ANNOTATION, + example_count=1396, + completed_example_count=2448, + blocking_resources=["blocking_resources_value"], + ) + ) + + response = await client.get_annotated_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.AnnotatedDataset) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.annotation_source == annotation.AnnotationSource.OPERATOR + + assert ( + response.annotation_type + == annotation.AnnotationType.IMAGE_CLASSIFICATION_ANNOTATION + ) + + assert response.example_count == 1396 + + assert response.completed_example_count == 2448 + + assert response.blocking_resources == ["blocking_resources_value"] + + +def test_get_annotated_dataset_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetAnnotatedDatasetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_annotated_dataset), "__call__" + ) as call: + call.return_value = dataset.AnnotatedDataset() + + client.get_annotated_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_annotated_dataset_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetAnnotatedDatasetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_annotated_dataset), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dataset.AnnotatedDataset() + ) + + await client.get_annotated_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_annotated_dataset_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_annotated_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.AnnotatedDataset() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_annotated_dataset(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_annotated_dataset_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_annotated_dataset( + data_labeling_service.GetAnnotatedDatasetRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_annotated_dataset_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_annotated_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.AnnotatedDataset() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dataset.AnnotatedDataset() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_annotated_dataset(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_annotated_dataset_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_annotated_dataset( + data_labeling_service.GetAnnotatedDatasetRequest(), name="name_value", + ) + + +def test_list_annotated_datasets( + transport: str = "grpc", + request_type=data_labeling_service.ListAnnotatedDatasetsRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_annotated_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListAnnotatedDatasetsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_annotated_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.ListAnnotatedDatasetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnnotatedDatasetsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_annotated_datasets_from_dict(): + test_list_annotated_datasets(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_annotated_datasets_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.ListAnnotatedDatasetsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_annotated_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListAnnotatedDatasetsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_annotated_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnnotatedDatasetsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_annotated_datasets_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListAnnotatedDatasetsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_annotated_datasets), "__call__" + ) as call: + call.return_value = data_labeling_service.ListAnnotatedDatasetsResponse() + + client.list_annotated_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_annotated_datasets_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListAnnotatedDatasetsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_annotated_datasets), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListAnnotatedDatasetsResponse() + ) + + await client.list_annotated_datasets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_annotated_datasets_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_annotated_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListAnnotatedDatasetsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_annotated_datasets( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +def test_list_annotated_datasets_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_annotated_datasets( + data_labeling_service.ListAnnotatedDatasetsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_annotated_datasets_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_annotated_datasets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListAnnotatedDatasetsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListAnnotatedDatasetsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_annotated_datasets( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_list_annotated_datasets_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_annotated_datasets( + data_labeling_service.ListAnnotatedDatasetsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +def test_list_annotated_datasets_pager(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_annotated_datasets), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[ + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + ], + next_page_token="abc", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[], next_page_token="def", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[dataset.AnnotatedDataset(),], next_page_token="ghi", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[ + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_annotated_datasets(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dataset.AnnotatedDataset) for i in results) + + +def test_list_annotated_datasets_pages(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_annotated_datasets), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[ + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + ], + next_page_token="abc", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[], next_page_token="def", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[dataset.AnnotatedDataset(),], next_page_token="ghi", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[ + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + ], + ), + RuntimeError, + ) + pages = list(client.list_annotated_datasets(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_annotated_datasets_async_pager(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_annotated_datasets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[ + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + ], + next_page_token="abc", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[], next_page_token="def", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[dataset.AnnotatedDataset(),], next_page_token="ghi", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[ + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_annotated_datasets(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dataset.AnnotatedDataset) for i in responses) + + +@pytest.mark.asyncio +async def test_list_annotated_datasets_async_pages(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_annotated_datasets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[ + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + ], + next_page_token="abc", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[], next_page_token="def", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[dataset.AnnotatedDataset(),], next_page_token="ghi", + ), + data_labeling_service.ListAnnotatedDatasetsResponse( + annotated_datasets=[ + dataset.AnnotatedDataset(), + dataset.AnnotatedDataset(), + ], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_annotated_datasets(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_delete_annotated_dataset( + transport: str = "grpc", + request_type=data_labeling_service.DeleteAnnotatedDatasetRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_annotated_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_annotated_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.DeleteAnnotatedDatasetRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_annotated_dataset_from_dict(): + test_delete_annotated_dataset(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_annotated_dataset_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.DeleteAnnotatedDatasetRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_annotated_dataset), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_annotated_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_annotated_dataset_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.DeleteAnnotatedDatasetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_annotated_dataset), "__call__" + ) as call: + call.return_value = None + + client.delete_annotated_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_annotated_dataset_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.DeleteAnnotatedDatasetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_annotated_dataset), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_annotated_dataset(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_label_image( + transport: str = "grpc", request_type=data_labeling_service.LabelImageRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.label_image), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.label_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.LabelImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_label_image_from_dict(): + test_label_image(request_type=dict) + + +@pytest.mark.asyncio +async def test_label_image_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.LabelImageRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.label_image), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.label_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_label_image_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.LabelImageRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.label_image), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.label_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_label_image_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.LabelImageRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.label_image), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.label_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_label_image_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.label_image), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.label_image( + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelImageRequest.Feature.CLASSIFICATION, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].basic_config == human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ) + + assert ( + args[0].feature + == data_labeling_service.LabelImageRequest.Feature.CLASSIFICATION + ) + + +def test_label_image_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.label_image( + data_labeling_service.LabelImageRequest(), + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelImageRequest.Feature.CLASSIFICATION, + ) + + +@pytest.mark.asyncio +async def test_label_image_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.label_image), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.label_image( + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelImageRequest.Feature.CLASSIFICATION, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].basic_config == human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ) + + assert ( + args[0].feature + == data_labeling_service.LabelImageRequest.Feature.CLASSIFICATION + ) + + +@pytest.mark.asyncio +async def test_label_image_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.label_image( + data_labeling_service.LabelImageRequest(), + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelImageRequest.Feature.CLASSIFICATION, + ) + + +def test_label_video( + transport: str = "grpc", request_type=data_labeling_service.LabelVideoRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.label_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.label_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.LabelVideoRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_label_video_from_dict(): + test_label_video(request_type=dict) + + +@pytest.mark.asyncio +async def test_label_video_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.LabelVideoRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.label_video), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.label_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_label_video_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.LabelVideoRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.label_video), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.label_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_label_video_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.LabelVideoRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.label_video), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.label_video(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_label_video_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.label_video), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.label_video( + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelVideoRequest.Feature.CLASSIFICATION, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].basic_config == human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ) + + assert ( + args[0].feature + == data_labeling_service.LabelVideoRequest.Feature.CLASSIFICATION + ) + + +def test_label_video_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.label_video( + data_labeling_service.LabelVideoRequest(), + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelVideoRequest.Feature.CLASSIFICATION, + ) + + +@pytest.mark.asyncio +async def test_label_video_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.label_video), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.label_video( + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelVideoRequest.Feature.CLASSIFICATION, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].basic_config == human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ) + + assert ( + args[0].feature + == data_labeling_service.LabelVideoRequest.Feature.CLASSIFICATION + ) + + +@pytest.mark.asyncio +async def test_label_video_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.label_video( + data_labeling_service.LabelVideoRequest(), + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelVideoRequest.Feature.CLASSIFICATION, + ) + + +def test_label_text( + transport: str = "grpc", request_type=data_labeling_service.LabelTextRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.label_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.label_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.LabelTextRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_label_text_from_dict(): + test_label_text(request_type=dict) + + +@pytest.mark.asyncio +async def test_label_text_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.LabelTextRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.label_text), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.label_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_label_text_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.LabelTextRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.label_text), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.label_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_label_text_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.LabelTextRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.label_text), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.label_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_label_text_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.label_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.label_text( + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelTextRequest.Feature.TEXT_CLASSIFICATION, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].basic_config == human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ) + + assert ( + args[0].feature + == data_labeling_service.LabelTextRequest.Feature.TEXT_CLASSIFICATION + ) + + +def test_label_text_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.label_text( + data_labeling_service.LabelTextRequest(), + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelTextRequest.Feature.TEXT_CLASSIFICATION, + ) + + +@pytest.mark.asyncio +async def test_label_text_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.label_text), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.label_text( + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelTextRequest.Feature.TEXT_CLASSIFICATION, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].basic_config == human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ) + + assert ( + args[0].feature + == data_labeling_service.LabelTextRequest.Feature.TEXT_CLASSIFICATION + ) + + +@pytest.mark.asyncio +async def test_label_text_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.label_text( + data_labeling_service.LabelTextRequest(), + parent="parent_value", + basic_config=human_annotation_config.HumanAnnotationConfig( + instruction="instruction_value" + ), + feature=data_labeling_service.LabelTextRequest.Feature.TEXT_CLASSIFICATION, + ) + + +def test_get_example( + transport: str = "grpc", request_type=data_labeling_service.GetExampleRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_example), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Example( + name="name_value", + image_payload=data_payloads.ImagePayload(mime_type="mime_type_value"), + ) + + response = client.get_example(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.GetExampleRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.Example) + + assert response.name == "name_value" + + +def test_get_example_from_dict(): + test_get_example(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_example_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.GetExampleRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_example), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dataset.Example(name="name_value",) + ) + + response = await client.get_example(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dataset.Example) + + assert response.name == "name_value" + + +def test_get_example_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetExampleRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_example), "__call__") as call: + call.return_value = dataset.Example() + + client.get_example(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_example_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetExampleRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_example), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Example()) + + await client.get_example(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_example_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_example), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Example() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_example( + name="name_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].filter == "filter_value" + + +def test_get_example_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_example( + data_labeling_service.GetExampleRequest(), + name="name_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_get_example_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_example), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dataset.Example() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Example()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_example(name="name_value", filter="filter_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_get_example_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_example( + data_labeling_service.GetExampleRequest(), + name="name_value", + filter="filter_value", + ) + + +def test_list_examples( + transport: str = "grpc", request_type=data_labeling_service.ListExamplesRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_examples), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListExamplesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_examples(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.ListExamplesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExamplesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_examples_from_dict(): + test_list_examples(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_examples_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.ListExamplesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_examples), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListExamplesResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_examples(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExamplesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_examples_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListExamplesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_examples), "__call__") as call: + call.return_value = data_labeling_service.ListExamplesResponse() + + client.list_examples(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_examples_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListExamplesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_examples), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListExamplesResponse() + ) + + await client.list_examples(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_examples_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_examples), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListExamplesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_examples( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +def test_list_examples_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_examples( + data_labeling_service.ListExamplesRequest(), + parent="parent_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_examples_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_examples), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListExamplesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListExamplesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_examples( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_list_examples_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_examples( + data_labeling_service.ListExamplesRequest(), + parent="parent_value", + filter="filter_value", + ) + + +def test_list_examples_pager(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_examples), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(), dataset.Example(), dataset.Example(),], + next_page_token="abc", + ), + data_labeling_service.ListExamplesResponse( + examples=[], next_page_token="def", + ), + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(),], next_page_token="ghi", + ), + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(), dataset.Example(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_examples(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dataset.Example) for i in results) + + +def test_list_examples_pages(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_examples), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(), dataset.Example(), dataset.Example(),], + next_page_token="abc", + ), + data_labeling_service.ListExamplesResponse( + examples=[], next_page_token="def", + ), + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(),], next_page_token="ghi", + ), + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(), dataset.Example(),], + ), + RuntimeError, + ) + pages = list(client.list_examples(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_examples_async_pager(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_examples), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(), dataset.Example(), dataset.Example(),], + next_page_token="abc", + ), + data_labeling_service.ListExamplesResponse( + examples=[], next_page_token="def", + ), + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(),], next_page_token="ghi", + ), + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(), dataset.Example(),], + ), + RuntimeError, + ) + async_pager = await client.list_examples(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dataset.Example) for i in responses) + + +@pytest.mark.asyncio +async def test_list_examples_async_pages(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_examples), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(), dataset.Example(), dataset.Example(),], + next_page_token="abc", + ), + data_labeling_service.ListExamplesResponse( + examples=[], next_page_token="def", + ), + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(),], next_page_token="ghi", + ), + data_labeling_service.ListExamplesResponse( + examples=[dataset.Example(), dataset.Example(),], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_examples(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_create_annotation_spec_set( + transport: str = "grpc", + request_type=data_labeling_service.CreateAnnotationSpecSetRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_annotation_spec_set.AnnotationSpecSet( + name="name_value", + display_name="display_name_value", + description="description_value", + blocking_resources=["blocking_resources_value"], + ) + + response = client.create_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.CreateAnnotationSpecSetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_annotation_spec_set.AnnotationSpecSet) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.blocking_resources == ["blocking_resources_value"] + + +def test_create_annotation_spec_set_from_dict(): + test_create_annotation_spec_set(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_annotation_spec_set_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.CreateAnnotationSpecSetRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_annotation_spec_set.AnnotationSpecSet( + name="name_value", + display_name="display_name_value", + description="description_value", + blocking_resources=["blocking_resources_value"], + ) + ) + + response = await client.create_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_annotation_spec_set.AnnotationSpecSet) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.blocking_resources == ["blocking_resources_value"] + + +def test_create_annotation_spec_set_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.CreateAnnotationSpecSetRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_annotation_spec_set), "__call__" + ) as call: + call.return_value = gcd_annotation_spec_set.AnnotationSpecSet() + + client.create_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_annotation_spec_set_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.CreateAnnotationSpecSetRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_annotation_spec_set), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_annotation_spec_set.AnnotationSpecSet() + ) + + await client.create_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_annotation_spec_set_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_annotation_spec_set.AnnotationSpecSet() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_annotation_spec_set( + parent="parent_value", + annotation_spec_set=gcd_annotation_spec_set.AnnotationSpecSet( + name="name_value" + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].annotation_spec_set == gcd_annotation_spec_set.AnnotationSpecSet( + name="name_value" + ) + + +def test_create_annotation_spec_set_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_annotation_spec_set( + data_labeling_service.CreateAnnotationSpecSetRequest(), + parent="parent_value", + annotation_spec_set=gcd_annotation_spec_set.AnnotationSpecSet( + name="name_value" + ), + ) + + +@pytest.mark.asyncio +async def test_create_annotation_spec_set_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_annotation_spec_set.AnnotationSpecSet() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_annotation_spec_set.AnnotationSpecSet() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_annotation_spec_set( + parent="parent_value", + annotation_spec_set=gcd_annotation_spec_set.AnnotationSpecSet( + name="name_value" + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].annotation_spec_set == gcd_annotation_spec_set.AnnotationSpecSet( + name="name_value" + ) + + +@pytest.mark.asyncio +async def test_create_annotation_spec_set_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_annotation_spec_set( + data_labeling_service.CreateAnnotationSpecSetRequest(), + parent="parent_value", + annotation_spec_set=gcd_annotation_spec_set.AnnotationSpecSet( + name="name_value" + ), + ) + + +def test_get_annotation_spec_set( + transport: str = "grpc", + request_type=data_labeling_service.GetAnnotationSpecSetRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = annotation_spec_set.AnnotationSpecSet( + name="name_value", + display_name="display_name_value", + description="description_value", + blocking_resources=["blocking_resources_value"], + ) + + response = client.get_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.GetAnnotationSpecSetRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, annotation_spec_set.AnnotationSpecSet) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.blocking_resources == ["blocking_resources_value"] + + +def test_get_annotation_spec_set_from_dict(): + test_get_annotation_spec_set(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_annotation_spec_set_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.GetAnnotationSpecSetRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + annotation_spec_set.AnnotationSpecSet( + name="name_value", + display_name="display_name_value", + description="description_value", + blocking_resources=["blocking_resources_value"], + ) + ) + + response = await client.get_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, annotation_spec_set.AnnotationSpecSet) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.blocking_resources == ["blocking_resources_value"] + + +def test_get_annotation_spec_set_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetAnnotationSpecSetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_annotation_spec_set), "__call__" + ) as call: + call.return_value = annotation_spec_set.AnnotationSpecSet() + + client.get_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_annotation_spec_set_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetAnnotationSpecSetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_annotation_spec_set), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + annotation_spec_set.AnnotationSpecSet() + ) + + await client.get_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_annotation_spec_set_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = annotation_spec_set.AnnotationSpecSet() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_annotation_spec_set(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_annotation_spec_set_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_annotation_spec_set( + data_labeling_service.GetAnnotationSpecSetRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_annotation_spec_set_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = annotation_spec_set.AnnotationSpecSet() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + annotation_spec_set.AnnotationSpecSet() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_annotation_spec_set(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_annotation_spec_set_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_annotation_spec_set( + data_labeling_service.GetAnnotationSpecSetRequest(), name="name_value", + ) + + +def test_list_annotation_spec_sets( + transport: str = "grpc", + request_type=data_labeling_service.ListAnnotationSpecSetsRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_annotation_spec_sets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListAnnotationSpecSetsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_annotation_spec_sets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.ListAnnotationSpecSetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnnotationSpecSetsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_annotation_spec_sets_from_dict(): + test_list_annotation_spec_sets(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_annotation_spec_sets_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.ListAnnotationSpecSetsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_annotation_spec_sets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListAnnotationSpecSetsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_annotation_spec_sets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAnnotationSpecSetsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_annotation_spec_sets_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListAnnotationSpecSetsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_annotation_spec_sets), "__call__" + ) as call: + call.return_value = data_labeling_service.ListAnnotationSpecSetsResponse() + + client.list_annotation_spec_sets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_annotation_spec_sets_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListAnnotationSpecSetsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_annotation_spec_sets), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListAnnotationSpecSetsResponse() + ) + + await client.list_annotation_spec_sets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_annotation_spec_sets_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_annotation_spec_sets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListAnnotationSpecSetsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_annotation_spec_sets( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +def test_list_annotation_spec_sets_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_annotation_spec_sets( + data_labeling_service.ListAnnotationSpecSetsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_annotation_spec_sets_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_annotation_spec_sets), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListAnnotationSpecSetsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListAnnotationSpecSetsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_annotation_spec_sets( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_list_annotation_spec_sets_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_annotation_spec_sets( + data_labeling_service.ListAnnotationSpecSetsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +def test_list_annotation_spec_sets_pager(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_annotation_spec_sets), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[ + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + ], + next_page_token="abc", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[], next_page_token="def", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[annotation_spec_set.AnnotationSpecSet(),], + next_page_token="ghi", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[ + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_annotation_spec_sets(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all( + isinstance(i, annotation_spec_set.AnnotationSpecSet) for i in results + ) + + +def test_list_annotation_spec_sets_pages(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_annotation_spec_sets), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[ + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + ], + next_page_token="abc", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[], next_page_token="def", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[annotation_spec_set.AnnotationSpecSet(),], + next_page_token="ghi", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[ + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + ], + ), + RuntimeError, + ) + pages = list(client.list_annotation_spec_sets(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_annotation_spec_sets_async_pager(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_annotation_spec_sets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[ + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + ], + next_page_token="abc", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[], next_page_token="def", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[annotation_spec_set.AnnotationSpecSet(),], + next_page_token="ghi", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[ + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_annotation_spec_sets(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, annotation_spec_set.AnnotationSpecSet) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_annotation_spec_sets_async_pages(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_annotation_spec_sets), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[ + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + ], + next_page_token="abc", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[], next_page_token="def", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[annotation_spec_set.AnnotationSpecSet(),], + next_page_token="ghi", + ), + data_labeling_service.ListAnnotationSpecSetsResponse( + annotation_spec_sets=[ + annotation_spec_set.AnnotationSpecSet(), + annotation_spec_set.AnnotationSpecSet(), + ], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_annotation_spec_sets(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_delete_annotation_spec_set( + transport: str = "grpc", + request_type=data_labeling_service.DeleteAnnotationSpecSetRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.DeleteAnnotationSpecSetRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_annotation_spec_set_from_dict(): + test_delete_annotation_spec_set(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_annotation_spec_set_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.DeleteAnnotationSpecSetRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_annotation_spec_set_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.DeleteAnnotationSpecSetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_annotation_spec_set), "__call__" + ) as call: + call.return_value = None + + client.delete_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_annotation_spec_set_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.DeleteAnnotationSpecSetRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_annotation_spec_set), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_annotation_spec_set(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_annotation_spec_set_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_annotation_spec_set(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_annotation_spec_set_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_annotation_spec_set( + data_labeling_service.DeleteAnnotationSpecSetRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_annotation_spec_set_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_annotation_spec_set), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_annotation_spec_set(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_annotation_spec_set_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_annotation_spec_set( + data_labeling_service.DeleteAnnotationSpecSetRequest(), name="name_value", + ) + + +def test_create_instruction( + transport: str = "grpc", request_type=data_labeling_service.CreateInstructionRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_instruction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + + response = client.create_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.CreateInstructionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instruction_from_dict(): + test_create_instruction(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_instruction_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.CreateInstructionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_instruction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + + response = await client.create_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instruction_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.CreateInstructionRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_instruction), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + + client.create_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_instruction_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.CreateInstructionRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_instruction), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + + await client.create_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_instruction_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_instruction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instruction( + parent="parent_value", + instruction=gcd_instruction.Instruction(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].instruction == gcd_instruction.Instruction(name="name_value") + + +def test_create_instruction_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instruction( + data_labeling_service.CreateInstructionRequest(), + parent="parent_value", + instruction=gcd_instruction.Instruction(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_instruction_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_instruction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instruction( + parent="parent_value", + instruction=gcd_instruction.Instruction(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].instruction == gcd_instruction.Instruction(name="name_value") + + +@pytest.mark.asyncio +async def test_create_instruction_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instruction( + data_labeling_service.CreateInstructionRequest(), + parent="parent_value", + instruction=gcd_instruction.Instruction(name="name_value"), + ) + + +def test_get_instruction( + transport: str = "grpc", request_type=data_labeling_service.GetInstructionRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_instruction), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = instruction.Instruction( + name="name_value", + display_name="display_name_value", + description="description_value", + data_type=dataset.DataType.IMAGE, + blocking_resources=["blocking_resources_value"], + ) + + response = client.get_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.GetInstructionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, instruction.Instruction) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.data_type == dataset.DataType.IMAGE + + assert response.blocking_resources == ["blocking_resources_value"] + + +def test_get_instruction_from_dict(): + test_get_instruction(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_instruction_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.GetInstructionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_instruction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instruction.Instruction( + name="name_value", + display_name="display_name_value", + description="description_value", + data_type=dataset.DataType.IMAGE, + blocking_resources=["blocking_resources_value"], + ) + ) + + response = await client.get_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, instruction.Instruction) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.data_type == dataset.DataType.IMAGE + + assert response.blocking_resources == ["blocking_resources_value"] + + +def test_get_instruction_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetInstructionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_instruction), "__call__") as call: + call.return_value = instruction.Instruction() + + client.get_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_instruction_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetInstructionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_instruction), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instruction.Instruction() + ) + + await client.get_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_instruction_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_instruction), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = instruction.Instruction() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instruction(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_instruction_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instruction( + data_labeling_service.GetInstructionRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_instruction_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_instruction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = instruction.Instruction() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + instruction.Instruction() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instruction(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_instruction_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instruction( + data_labeling_service.GetInstructionRequest(), name="name_value", + ) + + +def test_list_instructions( + transport: str = "grpc", request_type=data_labeling_service.ListInstructionsRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_instructions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListInstructionsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_instructions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.ListInstructionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstructionsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_instructions_from_dict(): + test_list_instructions(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_instructions_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.ListInstructionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_instructions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListInstructionsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_instructions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstructionsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_instructions_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListInstructionsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_instructions), "__call__" + ) as call: + call.return_value = data_labeling_service.ListInstructionsResponse() + + client.list_instructions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_instructions_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListInstructionsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_instructions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListInstructionsResponse() + ) + + await client.list_instructions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_instructions_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_instructions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListInstructionsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instructions( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +def test_list_instructions_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instructions( + data_labeling_service.ListInstructionsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_instructions_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_instructions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListInstructionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListInstructionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instructions( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_list_instructions_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instructions( + data_labeling_service.ListInstructionsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +def test_list_instructions_pager(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_instructions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListInstructionsResponse( + instructions=[ + instruction.Instruction(), + instruction.Instruction(), + instruction.Instruction(), + ], + next_page_token="abc", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[], next_page_token="def", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[instruction.Instruction(),], next_page_token="ghi", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[instruction.Instruction(), instruction.Instruction(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_instructions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, instruction.Instruction) for i in results) + + +def test_list_instructions_pages(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_instructions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListInstructionsResponse( + instructions=[ + instruction.Instruction(), + instruction.Instruction(), + instruction.Instruction(), + ], + next_page_token="abc", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[], next_page_token="def", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[instruction.Instruction(),], next_page_token="ghi", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[instruction.Instruction(), instruction.Instruction(),], + ), + RuntimeError, + ) + pages = list(client.list_instructions(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_instructions_async_pager(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_instructions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListInstructionsResponse( + instructions=[ + instruction.Instruction(), + instruction.Instruction(), + instruction.Instruction(), + ], + next_page_token="abc", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[], next_page_token="def", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[instruction.Instruction(),], next_page_token="ghi", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[instruction.Instruction(), instruction.Instruction(),], + ), + RuntimeError, + ) + async_pager = await client.list_instructions(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, instruction.Instruction) for i in responses) + + +@pytest.mark.asyncio +async def test_list_instructions_async_pages(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_instructions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListInstructionsResponse( + instructions=[ + instruction.Instruction(), + instruction.Instruction(), + instruction.Instruction(), + ], + next_page_token="abc", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[], next_page_token="def", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[instruction.Instruction(),], next_page_token="ghi", + ), + data_labeling_service.ListInstructionsResponse( + instructions=[instruction.Instruction(), instruction.Instruction(),], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_instructions(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_delete_instruction( + transport: str = "grpc", request_type=data_labeling_service.DeleteInstructionRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_instruction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.DeleteInstructionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_instruction_from_dict(): + test_delete_instruction(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_instruction_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.DeleteInstructionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_instruction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_instruction_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.DeleteInstructionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_instruction), "__call__" + ) as call: + call.return_value = None + + client.delete_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_instruction_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.DeleteInstructionRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_instruction), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_instruction(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_instruction_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_instruction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instruction(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_instruction_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instruction( + data_labeling_service.DeleteInstructionRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_instruction_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_instruction), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instruction(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_instruction_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instruction( + data_labeling_service.DeleteInstructionRequest(), name="name_value", + ) + + +def test_get_evaluation( + transport: str = "grpc", request_type=data_labeling_service.GetEvaluationRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_evaluation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = evaluation.Evaluation( + name="name_value", + annotation_type=annotation.AnnotationType.IMAGE_CLASSIFICATION_ANNOTATION, + evaluated_item_count=2129, + ) + + response = client.get_evaluation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.GetEvaluationRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, evaluation.Evaluation) + + assert response.name == "name_value" + + assert ( + response.annotation_type + == annotation.AnnotationType.IMAGE_CLASSIFICATION_ANNOTATION + ) + + assert response.evaluated_item_count == 2129 + + +def test_get_evaluation_from_dict(): + test_get_evaluation(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_evaluation_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.GetEvaluationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_evaluation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + evaluation.Evaluation( + name="name_value", + annotation_type=annotation.AnnotationType.IMAGE_CLASSIFICATION_ANNOTATION, + evaluated_item_count=2129, + ) + ) + + response = await client.get_evaluation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, evaluation.Evaluation) + + assert response.name == "name_value" + + assert ( + response.annotation_type + == annotation.AnnotationType.IMAGE_CLASSIFICATION_ANNOTATION + ) + + assert response.evaluated_item_count == 2129 + + +def test_get_evaluation_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetEvaluationRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_evaluation), "__call__") as call: + call.return_value = evaluation.Evaluation() + + client.get_evaluation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_evaluation_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetEvaluationRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_evaluation), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + evaluation.Evaluation() + ) + + await client.get_evaluation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_evaluation_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_evaluation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = evaluation.Evaluation() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_evaluation(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_evaluation_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_evaluation( + data_labeling_service.GetEvaluationRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_evaluation_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_evaluation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = evaluation.Evaluation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + evaluation.Evaluation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_evaluation(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_evaluation_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_evaluation( + data_labeling_service.GetEvaluationRequest(), name="name_value", + ) + + +def test_search_evaluations( + transport: str = "grpc", request_type=data_labeling_service.SearchEvaluationsRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.search_evaluations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.SearchEvaluationsResponse( + next_page_token="next_page_token_value", + ) + + response = client.search_evaluations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.SearchEvaluationsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchEvaluationsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_search_evaluations_from_dict(): + test_search_evaluations(request_type=dict) + + +@pytest.mark.asyncio +async def test_search_evaluations_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.SearchEvaluationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.search_evaluations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.SearchEvaluationsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.search_evaluations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchEvaluationsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_search_evaluations_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.SearchEvaluationsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.search_evaluations), "__call__" + ) as call: + call.return_value = data_labeling_service.SearchEvaluationsResponse() + + client.search_evaluations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_evaluations_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.SearchEvaluationsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.search_evaluations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.SearchEvaluationsResponse() + ) + + await client.search_evaluations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_search_evaluations_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.search_evaluations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.SearchEvaluationsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_evaluations( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +def test_search_evaluations_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_evaluations( + data_labeling_service.SearchEvaluationsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_search_evaluations_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.search_evaluations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.SearchEvaluationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.SearchEvaluationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_evaluations( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_search_evaluations_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_evaluations( + data_labeling_service.SearchEvaluationsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +def test_search_evaluations_pager(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.search_evaluations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.SearchEvaluationsResponse( + evaluations=[ + evaluation.Evaluation(), + evaluation.Evaluation(), + evaluation.Evaluation(), + ], + next_page_token="abc", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[], next_page_token="def", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[evaluation.Evaluation(),], next_page_token="ghi", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[evaluation.Evaluation(), evaluation.Evaluation(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.search_evaluations(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, evaluation.Evaluation) for i in results) + + +def test_search_evaluations_pages(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.search_evaluations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.SearchEvaluationsResponse( + evaluations=[ + evaluation.Evaluation(), + evaluation.Evaluation(), + evaluation.Evaluation(), + ], + next_page_token="abc", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[], next_page_token="def", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[evaluation.Evaluation(),], next_page_token="ghi", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[evaluation.Evaluation(), evaluation.Evaluation(),], + ), + RuntimeError, + ) + pages = list(client.search_evaluations(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_evaluations_async_pager(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.search_evaluations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.SearchEvaluationsResponse( + evaluations=[ + evaluation.Evaluation(), + evaluation.Evaluation(), + evaluation.Evaluation(), + ], + next_page_token="abc", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[], next_page_token="def", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[evaluation.Evaluation(),], next_page_token="ghi", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[evaluation.Evaluation(), evaluation.Evaluation(),], + ), + RuntimeError, + ) + async_pager = await client.search_evaluations(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, evaluation.Evaluation) for i in responses) + + +@pytest.mark.asyncio +async def test_search_evaluations_async_pages(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.search_evaluations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.SearchEvaluationsResponse( + evaluations=[ + evaluation.Evaluation(), + evaluation.Evaluation(), + evaluation.Evaluation(), + ], + next_page_token="abc", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[], next_page_token="def", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[evaluation.Evaluation(),], next_page_token="ghi", + ), + data_labeling_service.SearchEvaluationsResponse( + evaluations=[evaluation.Evaluation(), evaluation.Evaluation(),], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.search_evaluations(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_search_example_comparisons( + transport: str = "grpc", + request_type=data_labeling_service.SearchExampleComparisonsRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.search_example_comparisons), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.SearchExampleComparisonsResponse( + next_page_token="next_page_token_value", + ) + + response = client.search_example_comparisons(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.SearchExampleComparisonsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchExampleComparisonsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_search_example_comparisons_from_dict(): + test_search_example_comparisons(request_type=dict) + + +@pytest.mark.asyncio +async def test_search_example_comparisons_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.SearchExampleComparisonsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.search_example_comparisons), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.SearchExampleComparisonsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.search_example_comparisons(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchExampleComparisonsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_search_example_comparisons_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.SearchExampleComparisonsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.search_example_comparisons), "__call__" + ) as call: + call.return_value = data_labeling_service.SearchExampleComparisonsResponse() + + client.search_example_comparisons(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_example_comparisons_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.SearchExampleComparisonsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.search_example_comparisons), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.SearchExampleComparisonsResponse() + ) + + await client.search_example_comparisons(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_search_example_comparisons_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.search_example_comparisons), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.SearchExampleComparisonsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_example_comparisons(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_search_example_comparisons_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_example_comparisons( + data_labeling_service.SearchExampleComparisonsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_search_example_comparisons_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.search_example_comparisons), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.SearchExampleComparisonsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.SearchExampleComparisonsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_example_comparisons(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_search_example_comparisons_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_example_comparisons( + data_labeling_service.SearchExampleComparisonsRequest(), + parent="parent_value", + ) + + +def test_search_example_comparisons_pager(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.search_example_comparisons), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + next_page_token="abc", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[], next_page_token="def", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + next_page_token="ghi", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.search_example_comparisons(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all( + isinstance( + i, + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison, + ) + for i in results + ) + + +def test_search_example_comparisons_pages(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.search_example_comparisons), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + next_page_token="abc", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[], next_page_token="def", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + next_page_token="ghi", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + ), + RuntimeError, + ) + pages = list(client.search_example_comparisons(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_example_comparisons_async_pager(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.search_example_comparisons), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + next_page_token="abc", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[], next_page_token="def", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + next_page_token="ghi", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_example_comparisons(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance( + i, + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison, + ) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_search_example_comparisons_async_pages(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.search_example_comparisons), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + next_page_token="abc", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[], next_page_token="def", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + next_page_token="ghi", + ), + data_labeling_service.SearchExampleComparisonsResponse( + example_comparisons=[ + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + data_labeling_service.SearchExampleComparisonsResponse.ExampleComparison(), + ], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.search_example_comparisons(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_create_evaluation_job( + transport: str = "grpc", + request_type=data_labeling_service.CreateEvaluationJobRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = evaluation_job.EvaluationJob( + name="name_value", + description="description_value", + state=evaluation_job.EvaluationJob.State.SCHEDULED, + schedule="schedule_value", + model_version="model_version_value", + annotation_spec_set="annotation_spec_set_value", + label_missing_ground_truth=True, + ) + + response = client.create_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.CreateEvaluationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, evaluation_job.EvaluationJob) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.state == evaluation_job.EvaluationJob.State.SCHEDULED + + assert response.schedule == "schedule_value" + + assert response.model_version == "model_version_value" + + assert response.annotation_spec_set == "annotation_spec_set_value" + + assert response.label_missing_ground_truth is True + + +def test_create_evaluation_job_from_dict(): + test_create_evaluation_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_evaluation_job_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.CreateEvaluationJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + evaluation_job.EvaluationJob( + name="name_value", + description="description_value", + state=evaluation_job.EvaluationJob.State.SCHEDULED, + schedule="schedule_value", + model_version="model_version_value", + annotation_spec_set="annotation_spec_set_value", + label_missing_ground_truth=True, + ) + ) + + response = await client.create_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, evaluation_job.EvaluationJob) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.state == evaluation_job.EvaluationJob.State.SCHEDULED + + assert response.schedule == "schedule_value" + + assert response.model_version == "model_version_value" + + assert response.annotation_spec_set == "annotation_spec_set_value" + + assert response.label_missing_ground_truth is True + + +def test_create_evaluation_job_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.CreateEvaluationJobRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_evaluation_job), "__call__" + ) as call: + call.return_value = evaluation_job.EvaluationJob() + + client.create_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_evaluation_job_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.CreateEvaluationJobRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_evaluation_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + evaluation_job.EvaluationJob() + ) + + await client.create_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_evaluation_job_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = evaluation_job.EvaluationJob() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_evaluation_job( + parent="parent_value", job=evaluation_job.EvaluationJob(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].job == evaluation_job.EvaluationJob(name="name_value") + + +def test_create_evaluation_job_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_evaluation_job( + data_labeling_service.CreateEvaluationJobRequest(), + parent="parent_value", + job=evaluation_job.EvaluationJob(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_evaluation_job_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = evaluation_job.EvaluationJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + evaluation_job.EvaluationJob() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_evaluation_job( + parent="parent_value", job=evaluation_job.EvaluationJob(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].job == evaluation_job.EvaluationJob(name="name_value") + + +@pytest.mark.asyncio +async def test_create_evaluation_job_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_evaluation_job( + data_labeling_service.CreateEvaluationJobRequest(), + parent="parent_value", + job=evaluation_job.EvaluationJob(name="name_value"), + ) + + +def test_update_evaluation_job( + transport: str = "grpc", + request_type=data_labeling_service.UpdateEvaluationJobRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_evaluation_job.EvaluationJob( + name="name_value", + description="description_value", + state=gcd_evaluation_job.EvaluationJob.State.SCHEDULED, + schedule="schedule_value", + model_version="model_version_value", + annotation_spec_set="annotation_spec_set_value", + label_missing_ground_truth=True, + ) + + response = client.update_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.UpdateEvaluationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_evaluation_job.EvaluationJob) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.state == gcd_evaluation_job.EvaluationJob.State.SCHEDULED + + assert response.schedule == "schedule_value" + + assert response.model_version == "model_version_value" + + assert response.annotation_spec_set == "annotation_spec_set_value" + + assert response.label_missing_ground_truth is True + + +def test_update_evaluation_job_from_dict(): + test_update_evaluation_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_evaluation_job_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.UpdateEvaluationJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_evaluation_job.EvaluationJob( + name="name_value", + description="description_value", + state=gcd_evaluation_job.EvaluationJob.State.SCHEDULED, + schedule="schedule_value", + model_version="model_version_value", + annotation_spec_set="annotation_spec_set_value", + label_missing_ground_truth=True, + ) + ) + + response = await client.update_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_evaluation_job.EvaluationJob) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.state == gcd_evaluation_job.EvaluationJob.State.SCHEDULED + + assert response.schedule == "schedule_value" + + assert response.model_version == "model_version_value" + + assert response.annotation_spec_set == "annotation_spec_set_value" + + assert response.label_missing_ground_truth is True + + +def test_update_evaluation_job_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.UpdateEvaluationJobRequest() + request.evaluation_job.name = "evaluation_job.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_evaluation_job), "__call__" + ) as call: + call.return_value = gcd_evaluation_job.EvaluationJob() + + client.update_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "evaluation_job.name=evaluation_job.name/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_evaluation_job_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.UpdateEvaluationJobRequest() + request.evaluation_job.name = "evaluation_job.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_evaluation_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_evaluation_job.EvaluationJob() + ) + + await client.update_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "evaluation_job.name=evaluation_job.name/value", + ) in kw["metadata"] + + +def test_update_evaluation_job_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_evaluation_job.EvaluationJob() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_evaluation_job( + evaluation_job=gcd_evaluation_job.EvaluationJob(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].evaluation_job == gcd_evaluation_job.EvaluationJob( + name="name_value" + ) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_evaluation_job_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_evaluation_job( + data_labeling_service.UpdateEvaluationJobRequest(), + evaluation_job=gcd_evaluation_job.EvaluationJob(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_evaluation_job_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_evaluation_job.EvaluationJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_evaluation_job.EvaluationJob() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_evaluation_job( + evaluation_job=gcd_evaluation_job.EvaluationJob(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].evaluation_job == gcd_evaluation_job.EvaluationJob( + name="name_value" + ) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_evaluation_job_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_evaluation_job( + data_labeling_service.UpdateEvaluationJobRequest(), + evaluation_job=gcd_evaluation_job.EvaluationJob(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_get_evaluation_job( + transport: str = "grpc", request_type=data_labeling_service.GetEvaluationJobRequest +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = evaluation_job.EvaluationJob( + name="name_value", + description="description_value", + state=evaluation_job.EvaluationJob.State.SCHEDULED, + schedule="schedule_value", + model_version="model_version_value", + annotation_spec_set="annotation_spec_set_value", + label_missing_ground_truth=True, + ) + + response = client.get_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.GetEvaluationJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, evaluation_job.EvaluationJob) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.state == evaluation_job.EvaluationJob.State.SCHEDULED + + assert response.schedule == "schedule_value" + + assert response.model_version == "model_version_value" + + assert response.annotation_spec_set == "annotation_spec_set_value" + + assert response.label_missing_ground_truth is True + + +def test_get_evaluation_job_from_dict(): + test_get_evaluation_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_evaluation_job_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.GetEvaluationJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + evaluation_job.EvaluationJob( + name="name_value", + description="description_value", + state=evaluation_job.EvaluationJob.State.SCHEDULED, + schedule="schedule_value", + model_version="model_version_value", + annotation_spec_set="annotation_spec_set_value", + label_missing_ground_truth=True, + ) + ) + + response = await client.get_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, evaluation_job.EvaluationJob) + + assert response.name == "name_value" + + assert response.description == "description_value" + + assert response.state == evaluation_job.EvaluationJob.State.SCHEDULED + + assert response.schedule == "schedule_value" + + assert response.model_version == "model_version_value" + + assert response.annotation_spec_set == "annotation_spec_set_value" + + assert response.label_missing_ground_truth is True + + +def test_get_evaluation_job_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetEvaluationJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_evaluation_job), "__call__" + ) as call: + call.return_value = evaluation_job.EvaluationJob() + + client.get_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_evaluation_job_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.GetEvaluationJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_evaluation_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + evaluation_job.EvaluationJob() + ) + + await client.get_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_evaluation_job_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = evaluation_job.EvaluationJob() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_evaluation_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_evaluation_job_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_evaluation_job( + data_labeling_service.GetEvaluationJobRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_evaluation_job_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = evaluation_job.EvaluationJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + evaluation_job.EvaluationJob() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_evaluation_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_evaluation_job_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_evaluation_job( + data_labeling_service.GetEvaluationJobRequest(), name="name_value", + ) + + +def test_pause_evaluation_job( + transport: str = "grpc", + request_type=data_labeling_service.PauseEvaluationJobRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.pause_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.pause_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.PauseEvaluationJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_pause_evaluation_job_from_dict(): + test_pause_evaluation_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_pause_evaluation_job_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.PauseEvaluationJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.pause_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_pause_evaluation_job_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.PauseEvaluationJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.pause_evaluation_job), "__call__" + ) as call: + call.return_value = None + + client.pause_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pause_evaluation_job_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.PauseEvaluationJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_evaluation_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.pause_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_pause_evaluation_job_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.pause_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pause_evaluation_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_pause_evaluation_job_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pause_evaluation_job( + data_labeling_service.PauseEvaluationJobRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_pause_evaluation_job_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.pause_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pause_evaluation_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_pause_evaluation_job_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pause_evaluation_job( + data_labeling_service.PauseEvaluationJobRequest(), name="name_value", + ) + + +def test_resume_evaluation_job( + transport: str = "grpc", + request_type=data_labeling_service.ResumeEvaluationJobRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.resume_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.resume_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.ResumeEvaluationJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_resume_evaluation_job_from_dict(): + test_resume_evaluation_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_resume_evaluation_job_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.ResumeEvaluationJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.resume_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_resume_evaluation_job_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ResumeEvaluationJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.resume_evaluation_job), "__call__" + ) as call: + call.return_value = None + + client.resume_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_resume_evaluation_job_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ResumeEvaluationJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_evaluation_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.resume_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_resume_evaluation_job_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.resume_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.resume_evaluation_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_resume_evaluation_job_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.resume_evaluation_job( + data_labeling_service.ResumeEvaluationJobRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_resume_evaluation_job_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.resume_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.resume_evaluation_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_resume_evaluation_job_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.resume_evaluation_job( + data_labeling_service.ResumeEvaluationJobRequest(), name="name_value", + ) + + +def test_delete_evaluation_job( + transport: str = "grpc", + request_type=data_labeling_service.DeleteEvaluationJobRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.DeleteEvaluationJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_evaluation_job_from_dict(): + test_delete_evaluation_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_evaluation_job_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.DeleteEvaluationJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_evaluation_job_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.DeleteEvaluationJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_evaluation_job), "__call__" + ) as call: + call.return_value = None + + client.delete_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_evaluation_job_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.DeleteEvaluationJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_evaluation_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_evaluation_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_evaluation_job_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_evaluation_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_evaluation_job_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_evaluation_job( + data_labeling_service.DeleteEvaluationJobRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_evaluation_job_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_evaluation_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_evaluation_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_evaluation_job_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_evaluation_job( + data_labeling_service.DeleteEvaluationJobRequest(), name="name_value", + ) + + +def test_list_evaluation_jobs( + transport: str = "grpc", + request_type=data_labeling_service.ListEvaluationJobsRequest, +): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_evaluation_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListEvaluationJobsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_evaluation_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == data_labeling_service.ListEvaluationJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEvaluationJobsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_evaluation_jobs_from_dict(): + test_list_evaluation_jobs(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_evaluation_jobs_async(transport: str = "grpc_asyncio"): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = data_labeling_service.ListEvaluationJobsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_evaluation_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListEvaluationJobsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_evaluation_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEvaluationJobsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_evaluation_jobs_field_headers(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListEvaluationJobsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_evaluation_jobs), "__call__" + ) as call: + call.return_value = data_labeling_service.ListEvaluationJobsResponse() + + client.list_evaluation_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_evaluation_jobs_field_headers_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_labeling_service.ListEvaluationJobsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_evaluation_jobs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListEvaluationJobsResponse() + ) + + await client.list_evaluation_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_evaluation_jobs_flattened(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_evaluation_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListEvaluationJobsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_evaluation_jobs( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +def test_list_evaluation_jobs_flattened_error(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_evaluation_jobs( + data_labeling_service.ListEvaluationJobsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_list_evaluation_jobs_flattened_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_evaluation_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_labeling_service.ListEvaluationJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_labeling_service.ListEvaluationJobsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_evaluation_jobs( + parent="parent_value", filter="filter_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].filter == "filter_value" + + +@pytest.mark.asyncio +async def test_list_evaluation_jobs_flattened_error_async(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_evaluation_jobs( + data_labeling_service.ListEvaluationJobsRequest(), + parent="parent_value", + filter="filter_value", + ) + + +def test_list_evaluation_jobs_pager(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_evaluation_jobs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[ + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + ], + next_page_token="abc", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[], next_page_token="def", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[evaluation_job.EvaluationJob(),], + next_page_token="ghi", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[ + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_evaluation_jobs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, evaluation_job.EvaluationJob) for i in results) + + +def test_list_evaluation_jobs_pages(): + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_evaluation_jobs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[ + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + ], + next_page_token="abc", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[], next_page_token="def", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[evaluation_job.EvaluationJob(),], + next_page_token="ghi", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[ + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_evaluation_jobs(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_evaluation_jobs_async_pager(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_evaluation_jobs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[ + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + ], + next_page_token="abc", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[], next_page_token="def", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[evaluation_job.EvaluationJob(),], + next_page_token="ghi", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[ + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_evaluation_jobs(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, evaluation_job.EvaluationJob) for i in responses) + + +@pytest.mark.asyncio +async def test_list_evaluation_jobs_async_pages(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_evaluation_jobs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[ + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + ], + next_page_token="abc", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[], next_page_token="def", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[evaluation_job.EvaluationJob(),], + next_page_token="ghi", + ), + data_labeling_service.ListEvaluationJobsResponse( + evaluation_jobs=[ + evaluation_job.EvaluationJob(), + evaluation_job.EvaluationJob(), + ], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_evaluation_jobs(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataLabelingServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataLabelingServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataLabelingServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataLabelingServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataLabelingServiceClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataLabelingServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = DataLabelingServiceClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataLabelingServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataLabelingServiceGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataLabelingServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.DataLabelingServiceGrpcTransport,) + + +def test_data_labeling_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.DataLabelingServiceTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_data_labeling_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.datalabeling_v1beta1.services.data_labeling_service.transports.DataLabelingServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DataLabelingServiceTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_dataset", + "get_dataset", + "list_datasets", + "delete_dataset", + "import_data", + "export_data", + "get_data_item", + "list_data_items", + "get_annotated_dataset", + "list_annotated_datasets", + "delete_annotated_dataset", + "label_image", + "label_video", + "label_text", + "get_example", + "list_examples", + "create_annotation_spec_set", + "get_annotation_spec_set", + "list_annotation_spec_sets", + "delete_annotation_spec_set", + "create_instruction", + "get_instruction", + "list_instructions", + "delete_instruction", + "get_evaluation", + "search_evaluations", + "search_example_comparisons", + "create_evaluation_job", + "update_evaluation_job", + "get_evaluation_job", + "pause_evaluation_job", + "resume_evaluation_job", + "delete_evaluation_job", + "list_evaluation_jobs", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +def test_data_labeling_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.datalabeling_v1beta1.services.data_labeling_service.transports.DataLabelingServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DataLabelingServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_data_labeling_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + DataLabelingServiceClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_data_labeling_service_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.DataLabelingServiceGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_data_labeling_service_host_no_port(): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datalabeling.googleapis.com" + ), + ) + assert client._transport._host == "datalabeling.googleapis.com:443" + + +def test_data_labeling_service_host_with_port(): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datalabeling.googleapis.com:8000" + ), + ) + assert client._transport._host == "datalabeling.googleapis.com:8000" + + +def test_data_labeling_service_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.DataLabelingServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_data_labeling_service_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.DataLabelingServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_data_labeling_service_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.DataLabelingServiceGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_data_labeling_service_grpc_asyncio_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.DataLabelingServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_data_labeling_service_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.DataLabelingServiceGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_data_labeling_service_grpc_asyncio_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.DataLabelingServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_labeling_service_grpc_lro_client(): + client = DataLabelingServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + transport = client._transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_labeling_service_grpc_lro_async_client(): + client = DataLabelingServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + transport = client._client._transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instruction_path(): + project = "squid" + instruction = "clam" + + expected = "projects/{project}/instructions/{instruction}".format( + project=project, instruction=instruction, + ) + actual = DataLabelingServiceClient.instruction_path(project, instruction) + assert expected == actual + + +def test_parse_instruction_path(): + expected = { + "project": "whelk", + "instruction": "octopus", + } + path = DataLabelingServiceClient.instruction_path(**expected) + + # Check that the path construction is reversible. + actual = DataLabelingServiceClient.parse_instruction_path(path) + assert expected == actual + + +def test_annotation_spec_set_path(): + project = "squid" + annotation_spec_set = "clam" + + expected = "projects/{project}/annotationSpecSets/{annotation_spec_set}".format( + project=project, annotation_spec_set=annotation_spec_set, + ) + actual = DataLabelingServiceClient.annotation_spec_set_path( + project, annotation_spec_set + ) + assert expected == actual + + +def test_parse_annotation_spec_set_path(): + expected = { + "project": "whelk", + "annotation_spec_set": "octopus", + } + path = DataLabelingServiceClient.annotation_spec_set_path(**expected) + + # Check that the path construction is reversible. + actual = DataLabelingServiceClient.parse_annotation_spec_set_path(path) + assert expected == actual + + +def test_evaluation_job_path(): + project = "squid" + evaluation_job = "clam" + + expected = "projects/{project}/evaluationJobs/{evaluation_job}".format( + project=project, evaluation_job=evaluation_job, + ) + actual = DataLabelingServiceClient.evaluation_job_path(project, evaluation_job) + assert expected == actual + + +def test_parse_evaluation_job_path(): + expected = { + "project": "whelk", + "evaluation_job": "octopus", + } + path = DataLabelingServiceClient.evaluation_job_path(**expected) + + # Check that the path construction is reversible. + actual = DataLabelingServiceClient.parse_evaluation_job_path(path) + assert expected == actual + + +def test_dataset_path(): + project = "squid" + dataset = "clam" + + expected = "projects/{project}/datasets/{dataset}".format( + project=project, dataset=dataset, + ) + actual = DataLabelingServiceClient.dataset_path(project, dataset) + assert expected == actual + + +def test_parse_dataset_path(): + expected = { + "project": "whelk", + "dataset": "octopus", + } + path = DataLabelingServiceClient.dataset_path(**expected) + + # Check that the path construction is reversible. + actual = DataLabelingServiceClient.parse_dataset_path(path) + assert expected == actual diff --git a/tests/unit/gapic/v1beta1/test_data_labeling_service_client_v1beta1.py b/tests/unit/gapic/v1beta1/test_data_labeling_service_client_v1beta1.py deleted file mode 100644 index 48ffdc6..0000000 --- a/tests/unit/gapic/v1beta1/test_data_labeling_service_client_v1beta1.py +++ /dev/null @@ -1,1712 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.rpc import status_pb2 - -from google.cloud import datalabeling_v1beta1 -from google.cloud.datalabeling_v1beta1 import enums -from google.cloud.datalabeling_v1beta1.proto import annotation_spec_set_pb2 -from google.cloud.datalabeling_v1beta1.proto import data_labeling_service_pb2 -from google.cloud.datalabeling_v1beta1.proto import dataset_pb2 -from google.cloud.datalabeling_v1beta1.proto import evaluation_job_pb2 -from google.cloud.datalabeling_v1beta1.proto import evaluation_pb2 -from google.cloud.datalabeling_v1beta1.proto import human_annotation_config_pb2 -from google.cloud.datalabeling_v1beta1.proto import instruction_pb2 -from google.cloud.datalabeling_v1beta1.proto import ( - operations_pb2 as proto_operations_pb2, -) -from google.longrunning import operations_pb2 as longrunning_operations_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestDataLabelingServiceClient(object): - def test_create_dataset(self): - # Setup Expected Response - name = "name3373707" - display_name = "displayName1615086568" - description = "description-1724546052" - data_item_count = 2014260376 - expected_response = { - "name": name, - "display_name": display_name, - "description": description, - "data_item_count": data_item_count, - } - expected_response = dataset_pb2.Dataset(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - dataset = {} - - response = client.create_dataset(parent, dataset) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.CreateDatasetRequest( - parent=parent, dataset=dataset - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_dataset_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - dataset = {} - - with pytest.raises(CustomException): - client.create_dataset(parent, dataset) - - def test_get_dataset(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - description = "description-1724546052" - data_item_count = 2014260376 - expected_response = { - "name": name_2, - "display_name": display_name, - "description": description, - "data_item_count": data_item_count, - } - expected_response = dataset_pb2.Dataset(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.dataset_path("[PROJECT]", "[DATASET]") - - response = client.get_dataset(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.GetDatasetRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_dataset_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.dataset_path("[PROJECT]", "[DATASET]") - - with pytest.raises(CustomException): - client.get_dataset(name) - - def test_list_datasets(self): - # Setup Expected Response - next_page_token = "" - datasets_element = {} - datasets = [datasets_element] - expected_response = {"next_page_token": next_page_token, "datasets": datasets} - expected_response = data_labeling_service_pb2.ListDatasetsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_datasets(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.datasets[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.ListDatasetsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_datasets_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_datasets(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_dataset(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.dataset_path("[PROJECT]", "[DATASET]") - - client.delete_dataset(name) - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.DeleteDatasetRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_dataset_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.dataset_path("[PROJECT]", "[DATASET]") - - with pytest.raises(CustomException): - client.delete_dataset(name) - - def test_import_data(self): - # Setup Expected Response - dataset = "dataset1443214456" - total_count = 407761836 - import_count = 1721296907 - expected_response = { - "dataset": dataset, - "total_count": total_count, - "import_count": import_count, - } - expected_response = proto_operations_pb2.ImportDataOperationResponse( - **expected_response - ) - operation = longrunning_operations_pb2.Operation( - name="operations/test_import_data", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.dataset_path("[PROJECT]", "[DATASET]") - input_config = {} - - response = client.import_data(name, input_config) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.ImportDataRequest( - name=name, input_config=input_config - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_import_data_exception(self): - # Setup Response - error = status_pb2.Status() - operation = longrunning_operations_pb2.Operation( - name="operations/test_import_data_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.dataset_path("[PROJECT]", "[DATASET]") - input_config = {} - - response = client.import_data(name, input_config) - exception = response.exception() - assert exception.errors[0] == error - - def test_export_data(self): - # Setup Expected Response - dataset = "dataset1443214456" - total_count = 407761836 - export_count = 529256252 - expected_response = { - "dataset": dataset, - "total_count": total_count, - "export_count": export_count, - } - expected_response = proto_operations_pb2.ExportDataOperationResponse( - **expected_response - ) - operation = longrunning_operations_pb2.Operation( - name="operations/test_export_data", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.dataset_path("[PROJECT]", "[DATASET]") - annotated_dataset = client.annotated_dataset_path( - "[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]" - ) - output_config = {} - - response = client.export_data(name, annotated_dataset, output_config) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.ExportDataRequest( - name=name, annotated_dataset=annotated_dataset, output_config=output_config - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_export_data_exception(self): - # Setup Response - error = status_pb2.Status() - operation = longrunning_operations_pb2.Operation( - name="operations/test_export_data_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.dataset_path("[PROJECT]", "[DATASET]") - annotated_dataset = client.annotated_dataset_path( - "[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]" - ) - output_config = {} - - response = client.export_data(name, annotated_dataset, output_config) - exception = response.exception() - assert exception.errors[0] == error - - def test_get_data_item(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = dataset_pb2.DataItem(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.data_item_path("[PROJECT]", "[DATASET]", "[DATA_ITEM]") - - response = client.get_data_item(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.GetDataItemRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_data_item_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.data_item_path("[PROJECT]", "[DATASET]", "[DATA_ITEM]") - - with pytest.raises(CustomException): - client.get_data_item(name) - - def test_list_data_items(self): - # Setup Expected Response - next_page_token = "" - data_items_element = {} - data_items = [data_items_element] - expected_response = { - "next_page_token": next_page_token, - "data_items": data_items, - } - expected_response = data_labeling_service_pb2.ListDataItemsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.dataset_path("[PROJECT]", "[DATASET]") - - paged_list_response = client.list_data_items(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.data_items[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.ListDataItemsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_data_items_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.dataset_path("[PROJECT]", "[DATASET]") - - paged_list_response = client.list_data_items(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_annotated_dataset(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - description = "description-1724546052" - example_count = 1517063674 - completed_example_count = 612567290 - expected_response = { - "name": name_2, - "display_name": display_name, - "description": description, - "example_count": example_count, - "completed_example_count": completed_example_count, - } - expected_response = dataset_pb2.AnnotatedDataset(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.annotated_dataset_path( - "[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]" - ) - - response = client.get_annotated_dataset(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.GetAnnotatedDatasetRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_annotated_dataset_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.annotated_dataset_path( - "[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]" - ) - - with pytest.raises(CustomException): - client.get_annotated_dataset(name) - - def test_list_annotated_datasets(self): - # Setup Expected Response - next_page_token = "" - annotated_datasets_element = {} - annotated_datasets = [annotated_datasets_element] - expected_response = { - "next_page_token": next_page_token, - "annotated_datasets": annotated_datasets, - } - expected_response = data_labeling_service_pb2.ListAnnotatedDatasetsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.dataset_path("[PROJECT]", "[DATASET]") - - paged_list_response = client.list_annotated_datasets(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.annotated_datasets[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.ListAnnotatedDatasetsRequest( - parent=parent - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_annotated_datasets_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.dataset_path("[PROJECT]", "[DATASET]") - - paged_list_response = client.list_annotated_datasets(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_annotated_dataset(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.annotated_dataset_path( - "[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]" - ) - - client.delete_annotated_dataset(name) - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.DeleteAnnotatedDatasetRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_annotated_dataset_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.annotated_dataset_path( - "[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]" - ) - - with pytest.raises(CustomException): - client.delete_annotated_dataset(name) - - def test_label_image(self): - # Setup Expected Response - name = "name3373707" - display_name = "displayName1615086568" - description = "description-1724546052" - example_count = 1517063674 - completed_example_count = 612567290 - expected_response = { - "name": name, - "display_name": display_name, - "description": description, - "example_count": example_count, - "completed_example_count": completed_example_count, - } - expected_response = dataset_pb2.AnnotatedDataset(**expected_response) - operation = longrunning_operations_pb2.Operation( - name="operations/test_label_image", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.dataset_path("[PROJECT]", "[DATASET]") - basic_config = {} - feature = enums.LabelImageRequest.Feature.FEATURE_UNSPECIFIED - - response = client.label_image(parent, basic_config, feature) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.LabelImageRequest( - parent=parent, basic_config=basic_config, feature=feature - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_label_image_exception(self): - # Setup Response - error = status_pb2.Status() - operation = longrunning_operations_pb2.Operation( - name="operations/test_label_image_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.dataset_path("[PROJECT]", "[DATASET]") - basic_config = {} - feature = enums.LabelImageRequest.Feature.FEATURE_UNSPECIFIED - - response = client.label_image(parent, basic_config, feature) - exception = response.exception() - assert exception.errors[0] == error - - def test_label_video(self): - # Setup Expected Response - name = "name3373707" - display_name = "displayName1615086568" - description = "description-1724546052" - example_count = 1517063674 - completed_example_count = 612567290 - expected_response = { - "name": name, - "display_name": display_name, - "description": description, - "example_count": example_count, - "completed_example_count": completed_example_count, - } - expected_response = dataset_pb2.AnnotatedDataset(**expected_response) - operation = longrunning_operations_pb2.Operation( - name="operations/test_label_video", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.dataset_path("[PROJECT]", "[DATASET]") - basic_config = {} - feature = enums.LabelVideoRequest.Feature.FEATURE_UNSPECIFIED - - response = client.label_video(parent, basic_config, feature) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.LabelVideoRequest( - parent=parent, basic_config=basic_config, feature=feature - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_label_video_exception(self): - # Setup Response - error = status_pb2.Status() - operation = longrunning_operations_pb2.Operation( - name="operations/test_label_video_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.dataset_path("[PROJECT]", "[DATASET]") - basic_config = {} - feature = enums.LabelVideoRequest.Feature.FEATURE_UNSPECIFIED - - response = client.label_video(parent, basic_config, feature) - exception = response.exception() - assert exception.errors[0] == error - - def test_label_text(self): - # Setup Expected Response - name = "name3373707" - display_name = "displayName1615086568" - description = "description-1724546052" - example_count = 1517063674 - completed_example_count = 612567290 - expected_response = { - "name": name, - "display_name": display_name, - "description": description, - "example_count": example_count, - "completed_example_count": completed_example_count, - } - expected_response = dataset_pb2.AnnotatedDataset(**expected_response) - operation = longrunning_operations_pb2.Operation( - name="operations/test_label_text", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.dataset_path("[PROJECT]", "[DATASET]") - basic_config = {} - feature = enums.LabelTextRequest.Feature.FEATURE_UNSPECIFIED - - response = client.label_text(parent, basic_config, feature) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.LabelTextRequest( - parent=parent, basic_config=basic_config, feature=feature - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_label_text_exception(self): - # Setup Response - error = status_pb2.Status() - operation = longrunning_operations_pb2.Operation( - name="operations/test_label_text_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.dataset_path("[PROJECT]", "[DATASET]") - basic_config = {} - feature = enums.LabelTextRequest.Feature.FEATURE_UNSPECIFIED - - response = client.label_text(parent, basic_config, feature) - exception = response.exception() - assert exception.errors[0] == error - - def test_get_example(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = dataset_pb2.Example(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.example_path( - "[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]", "[EXAMPLE]" - ) - - response = client.get_example(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.GetExampleRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_example_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.example_path( - "[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]", "[EXAMPLE]" - ) - - with pytest.raises(CustomException): - client.get_example(name) - - def test_list_examples(self): - # Setup Expected Response - next_page_token = "" - examples_element = {} - examples = [examples_element] - expected_response = {"next_page_token": next_page_token, "examples": examples} - expected_response = data_labeling_service_pb2.ListExamplesResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.annotated_dataset_path( - "[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]" - ) - - paged_list_response = client.list_examples(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.examples[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.ListExamplesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_examples_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.annotated_dataset_path( - "[PROJECT]", "[DATASET]", "[ANNOTATED_DATASET]" - ) - - paged_list_response = client.list_examples(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_create_annotation_spec_set(self): - # Setup Expected Response - name = "name3373707" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name, - "display_name": display_name, - "description": description, - } - expected_response = annotation_spec_set_pb2.AnnotationSpecSet( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - annotation_spec_set = {} - - response = client.create_annotation_spec_set(parent, annotation_spec_set) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.CreateAnnotationSpecSetRequest( - parent=parent, annotation_spec_set=annotation_spec_set - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_annotation_spec_set_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - annotation_spec_set = {} - - with pytest.raises(CustomException): - client.create_annotation_spec_set(parent, annotation_spec_set) - - def test_get_annotation_spec_set(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name_2, - "display_name": display_name, - "description": description, - } - expected_response = annotation_spec_set_pb2.AnnotationSpecSet( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.annotation_spec_set_path("[PROJECT]", "[ANNOTATION_SPEC_SET]") - - response = client.get_annotation_spec_set(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.GetAnnotationSpecSetRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_annotation_spec_set_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.annotation_spec_set_path("[PROJECT]", "[ANNOTATION_SPEC_SET]") - - with pytest.raises(CustomException): - client.get_annotation_spec_set(name) - - def test_list_annotation_spec_sets(self): - # Setup Expected Response - next_page_token = "" - annotation_spec_sets_element = {} - annotation_spec_sets = [annotation_spec_sets_element] - expected_response = { - "next_page_token": next_page_token, - "annotation_spec_sets": annotation_spec_sets, - } - expected_response = data_labeling_service_pb2.ListAnnotationSpecSetsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_annotation_spec_sets(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.annotation_spec_sets[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.ListAnnotationSpecSetsRequest( - parent=parent - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_annotation_spec_sets_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_annotation_spec_sets(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_annotation_spec_set(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.annotation_spec_set_path("[PROJECT]", "[ANNOTATION_SPEC_SET]") - - client.delete_annotation_spec_set(name) - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.DeleteAnnotationSpecSetRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_annotation_spec_set_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.annotation_spec_set_path("[PROJECT]", "[ANNOTATION_SPEC_SET]") - - with pytest.raises(CustomException): - client.delete_annotation_spec_set(name) - - def test_create_instruction(self): - # Setup Expected Response - name = "name3373707" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name, - "display_name": display_name, - "description": description, - } - expected_response = instruction_pb2.Instruction(**expected_response) - operation = longrunning_operations_pb2.Operation( - name="operations/test_create_instruction", done=True - ) - operation.response.Pack(expected_response) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - instruction = {} - - response = client.create_instruction(parent, instruction) - result = response.result() - assert expected_response == result - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.CreateInstructionRequest( - parent=parent, instruction=instruction - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_instruction_exception(self): - # Setup Response - error = status_pb2.Status() - operation = longrunning_operations_pb2.Operation( - name="operations/test_create_instruction_exception", done=True - ) - operation.error.CopyFrom(error) - - # Mock the API response - channel = ChannelStub(responses=[operation]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - instruction = {} - - response = client.create_instruction(parent, instruction) - exception = response.exception() - assert exception.errors[0] == error - - def test_get_instruction(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name_2, - "display_name": display_name, - "description": description, - } - expected_response = instruction_pb2.Instruction(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.instruction_path("[PROJECT]", "[INSTRUCTION]") - - response = client.get_instruction(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.GetInstructionRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_instruction_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.instruction_path("[PROJECT]", "[INSTRUCTION]") - - with pytest.raises(CustomException): - client.get_instruction(name) - - def test_list_instructions(self): - # Setup Expected Response - next_page_token = "" - instructions_element = {} - instructions = [instructions_element] - expected_response = { - "next_page_token": next_page_token, - "instructions": instructions, - } - expected_response = data_labeling_service_pb2.ListInstructionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_instructions(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.instructions[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.ListInstructionsRequest( - parent=parent - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_instructions_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_instructions(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_instruction(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.instruction_path("[PROJECT]", "[INSTRUCTION]") - - client.delete_instruction(name) - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.DeleteInstructionRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_instruction_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.instruction_path("[PROJECT]", "[INSTRUCTION]") - - with pytest.raises(CustomException): - client.delete_instruction(name) - - def test_get_evaluation(self): - # Setup Expected Response - name_2 = "name2-1052831874" - evaluated_item_count = 358077111 - expected_response = { - "name": name_2, - "evaluated_item_count": evaluated_item_count, - } - expected_response = evaluation_pb2.Evaluation(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.evaluation_path("[PROJECT]", "[DATASET]", "[EVALUATION]") - - response = client.get_evaluation(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.GetEvaluationRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_evaluation_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.evaluation_path("[PROJECT]", "[DATASET]", "[EVALUATION]") - - with pytest.raises(CustomException): - client.get_evaluation(name) - - def test_search_evaluations(self): - # Setup Expected Response - next_page_token = "" - evaluations_element = {} - evaluations = [evaluations_element] - expected_response = { - "next_page_token": next_page_token, - "evaluations": evaluations, - } - expected_response = data_labeling_service_pb2.SearchEvaluationsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.evaluation_path("[PROJECT]", "[DATASET]", "[EVALUATION]") - - paged_list_response = client.search_evaluations(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.evaluations[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.SearchEvaluationsRequest( - parent=parent - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_search_evaluations_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.evaluation_path("[PROJECT]", "[DATASET]", "[EVALUATION]") - - paged_list_response = client.search_evaluations(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_search_example_comparisons(self): - # Setup Expected Response - next_page_token = "" - example_comparisons_element = {} - example_comparisons = [example_comparisons_element] - expected_response = { - "next_page_token": next_page_token, - "example_comparisons": example_comparisons, - } - expected_response = data_labeling_service_pb2.SearchExampleComparisonsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.evaluation_path("[PROJECT]", "[DATASET]", "[EVALUATION]") - - paged_list_response = client.search_example_comparisons(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.example_comparisons[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.SearchExampleComparisonsRequest( - parent=parent - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_search_example_comparisons_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.evaluation_path("[PROJECT]", "[DATASET]", "[EVALUATION]") - - paged_list_response = client.search_example_comparisons(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_create_evaluation_job(self): - # Setup Expected Response - name = "name3373707" - description = "description-1724546052" - schedule = "schedule-697920873" - model_version = "modelVersion-1669102142" - annotation_spec_set = "annotationSpecSet1881405678" - label_missing_ground_truth = False - expected_response = { - "name": name, - "description": description, - "schedule": schedule, - "model_version": model_version, - "annotation_spec_set": annotation_spec_set, - "label_missing_ground_truth": label_missing_ground_truth, - } - expected_response = evaluation_job_pb2.EvaluationJob(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - job = {} - - response = client.create_evaluation_job(parent, job) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.CreateEvaluationJobRequest( - parent=parent, job=job - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_evaluation_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - job = {} - - with pytest.raises(CustomException): - client.create_evaluation_job(parent, job) - - def test_update_evaluation_job(self): - # Setup Expected Response - name = "name3373707" - description = "description-1724546052" - schedule = "schedule-697920873" - model_version = "modelVersion-1669102142" - annotation_spec_set = "annotationSpecSet1881405678" - label_missing_ground_truth = False - expected_response = { - "name": name, - "description": description, - "schedule": schedule, - "model_version": model_version, - "annotation_spec_set": annotation_spec_set, - "label_missing_ground_truth": label_missing_ground_truth, - } - expected_response = evaluation_job_pb2.EvaluationJob(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - evaluation_job = {} - - response = client.update_evaluation_job(evaluation_job) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.UpdateEvaluationJobRequest( - evaluation_job=evaluation_job - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_evaluation_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - evaluation_job = {} - - with pytest.raises(CustomException): - client.update_evaluation_job(evaluation_job) - - def test_get_evaluation_job(self): - # Setup Expected Response - name_2 = "name2-1052831874" - description = "description-1724546052" - schedule = "schedule-697920873" - model_version = "modelVersion-1669102142" - annotation_spec_set = "annotationSpecSet1881405678" - label_missing_ground_truth = False - expected_response = { - "name": name_2, - "description": description, - "schedule": schedule, - "model_version": model_version, - "annotation_spec_set": annotation_spec_set, - "label_missing_ground_truth": label_missing_ground_truth, - } - expected_response = evaluation_job_pb2.EvaluationJob(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]") - - response = client.get_evaluation_job(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.GetEvaluationJobRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_evaluation_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]") - - with pytest.raises(CustomException): - client.get_evaluation_job(name) - - def test_pause_evaluation_job(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]") - - client.pause_evaluation_job(name) - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.PauseEvaluationJobRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_pause_evaluation_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]") - - with pytest.raises(CustomException): - client.pause_evaluation_job(name) - - def test_resume_evaluation_job(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]") - - client.resume_evaluation_job(name) - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.ResumeEvaluationJobRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_resume_evaluation_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]") - - with pytest.raises(CustomException): - client.resume_evaluation_job(name) - - def test_delete_evaluation_job(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]") - - client.delete_evaluation_job(name) - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.DeleteEvaluationJobRequest( - name=name - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_evaluation_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - name = client.evaluation_job_path("[PROJECT]", "[EVALUATION_JOB]") - - with pytest.raises(CustomException): - client.delete_evaluation_job(name) - - def test_list_evaluation_jobs(self): - # Setup Expected Response - next_page_token = "" - evaluation_jobs_element = {} - evaluation_jobs = [evaluation_jobs_element] - expected_response = { - "next_page_token": next_page_token, - "evaluation_jobs": evaluation_jobs, - } - expected_response = data_labeling_service_pb2.ListEvaluationJobsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_evaluation_jobs(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.evaluation_jobs[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = data_labeling_service_pb2.ListEvaluationJobsRequest( - parent=parent - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_evaluation_jobs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = datalabeling_v1beta1.DataLabelingServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_evaluation_jobs(parent) - with pytest.raises(CustomException): - list(paged_list_response)