diff --git a/google/cloud/dataproc_v1/types/jobs.py b/google/cloud/dataproc_v1/types/jobs.py index c666f5ac..723d95a1 100644 --- a/google/cloud/dataproc_v1/types/jobs.py +++ b/google/cloud/dataproc_v1/types/jobs.py @@ -65,8 +65,9 @@ class LoggingConfig(proto.Message): This may include "root" package name to configure rootLogger. Examples: - 'com.google = FATAL', 'root = INFO', - 'org.apache = DEBUG' + - 'com.google = FATAL' + - 'root = INFO' + - 'org.apache = DEBUG' """ class Level(proto.Enum): @@ -165,7 +166,7 @@ class HadoopJob(proto.Message): Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include - properties set in /etc/hadoop/conf/*-site and classes in + properties set in ``/etc/hadoop/conf/*-site`` and classes in user code. logging_config (google.cloud.dataproc_v1.types.LoggingConfig): Optional. The runtime log config for job @@ -443,7 +444,7 @@ class HiveJob(proto.Message): Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties - set in /etc/hadoop/conf/*-site.xml, + set in ``/etc/hadoop/conf/*-site.xml``, /etc/hive/conf/hive-site.xml, and classes in user code. jar_file_uris (MutableSequence[str]): Optional. HCFS URIs of jar files to add to @@ -585,7 +586,7 @@ class PigJob(proto.Message): Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties - set in /etc/hadoop/conf/*-site.xml, + set in ``/etc/hadoop/conf/*-site.xml``, /etc/pig/conf/pig.properties, and classes in user code. jar_file_uris (MutableSequence[str]): Optional. HCFS URIs of jar files to add to diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc deleted file mode 100644 index d5ee82ac..00000000 --- a/owl-bot-staging/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/dataproc/__init__.py - google/cloud/dataproc/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 deleted file mode 100644 index 29227d4c..00000000 --- a/owl-bot-staging/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in deleted file mode 100644 index 425f6657..00000000 --- a/owl-bot-staging/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/dataproc *.py -recursive-include google/cloud/dataproc_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst deleted file mode 100644 index b751dfd9..00000000 --- a/owl-bot-staging/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Dataproc API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Dataproc API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py deleted file mode 100644 index ed02c50d..00000000 --- a/owl-bot-staging/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-dataproc documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-dataproc" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-dataproc-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-dataproc.tex", - u"google-cloud-dataproc Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-dataproc", - u"Google Cloud Dataproc Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-dataproc", - u"google-cloud-dataproc Documentation", - author, - "google-cloud-dataproc", - "GAPIC library for Google Cloud Dataproc API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/dataproc_v1/autoscaling_policy_service.rst b/owl-bot-staging/v1/docs/dataproc_v1/autoscaling_policy_service.rst deleted file mode 100644 index 9b885c57..00000000 --- a/owl-bot-staging/v1/docs/dataproc_v1/autoscaling_policy_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AutoscalingPolicyService ------------------------------------------- - -.. automodule:: google.cloud.dataproc_v1.services.autoscaling_policy_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/dataproc_v1/batch_controller.rst b/owl-bot-staging/v1/docs/dataproc_v1/batch_controller.rst deleted file mode 100644 index e28563d2..00000000 --- a/owl-bot-staging/v1/docs/dataproc_v1/batch_controller.rst +++ /dev/null @@ -1,10 +0,0 @@ -BatchController ---------------------------------- - -.. automodule:: google.cloud.dataproc_v1.services.batch_controller - :members: - :inherited-members: - -.. automodule:: google.cloud.dataproc_v1.services.batch_controller.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/dataproc_v1/cluster_controller.rst b/owl-bot-staging/v1/docs/dataproc_v1/cluster_controller.rst deleted file mode 100644 index d9b7f2ad..00000000 --- a/owl-bot-staging/v1/docs/dataproc_v1/cluster_controller.rst +++ /dev/null @@ -1,10 +0,0 @@ -ClusterController ------------------------------------ - -.. automodule:: google.cloud.dataproc_v1.services.cluster_controller - :members: - :inherited-members: - -.. automodule:: google.cloud.dataproc_v1.services.cluster_controller.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/dataproc_v1/job_controller.rst b/owl-bot-staging/v1/docs/dataproc_v1/job_controller.rst deleted file mode 100644 index 5f14863b..00000000 --- a/owl-bot-staging/v1/docs/dataproc_v1/job_controller.rst +++ /dev/null @@ -1,10 +0,0 @@ -JobController -------------------------------- - -.. automodule:: google.cloud.dataproc_v1.services.job_controller - :members: - :inherited-members: - -.. automodule:: google.cloud.dataproc_v1.services.job_controller.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/dataproc_v1/node_group_controller.rst b/owl-bot-staging/v1/docs/dataproc_v1/node_group_controller.rst deleted file mode 100644 index 55d67f48..00000000 --- a/owl-bot-staging/v1/docs/dataproc_v1/node_group_controller.rst +++ /dev/null @@ -1,6 +0,0 @@ -NodeGroupController -------------------------------------- - -.. automodule:: google.cloud.dataproc_v1.services.node_group_controller - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/dataproc_v1/services.rst b/owl-bot-staging/v1/docs/dataproc_v1/services.rst deleted file mode 100644 index aee63982..00000000 --- a/owl-bot-staging/v1/docs/dataproc_v1/services.rst +++ /dev/null @@ -1,11 +0,0 @@ -Services for Google Cloud Dataproc v1 API -========================================= -.. toctree:: - :maxdepth: 2 - - autoscaling_policy_service - batch_controller - cluster_controller - job_controller - node_group_controller - workflow_template_service diff --git a/owl-bot-staging/v1/docs/dataproc_v1/types.rst b/owl-bot-staging/v1/docs/dataproc_v1/types.rst deleted file mode 100644 index 5dde0cd6..00000000 --- a/owl-bot-staging/v1/docs/dataproc_v1/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Dataproc v1 API -====================================== - -.. automodule:: google.cloud.dataproc_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v1/docs/dataproc_v1/workflow_template_service.rst b/owl-bot-staging/v1/docs/dataproc_v1/workflow_template_service.rst deleted file mode 100644 index 0f301cee..00000000 --- a/owl-bot-staging/v1/docs/dataproc_v1/workflow_template_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -WorkflowTemplateService ------------------------------------------ - -.. automodule:: google.cloud.dataproc_v1.services.workflow_template_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dataproc_v1.services.workflow_template_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst deleted file mode 100644 index 3bf4df8b..00000000 --- a/owl-bot-staging/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - dataproc_v1/services - dataproc_v1/types diff --git a/owl-bot-staging/v1/google/cloud/dataproc/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc/__init__.py deleted file mode 100644 index 73dd3886..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc/__init__.py +++ /dev/null @@ -1,301 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dataproc import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.dataproc_v1.services.autoscaling_policy_service.client import AutoscalingPolicyServiceClient -from google.cloud.dataproc_v1.services.autoscaling_policy_service.async_client import AutoscalingPolicyServiceAsyncClient -from google.cloud.dataproc_v1.services.batch_controller.client import BatchControllerClient -from google.cloud.dataproc_v1.services.batch_controller.async_client import BatchControllerAsyncClient -from google.cloud.dataproc_v1.services.cluster_controller.client import ClusterControllerClient -from google.cloud.dataproc_v1.services.cluster_controller.async_client import ClusterControllerAsyncClient -from google.cloud.dataproc_v1.services.job_controller.client import JobControllerClient -from google.cloud.dataproc_v1.services.job_controller.async_client import JobControllerAsyncClient -from google.cloud.dataproc_v1.services.node_group_controller.client import NodeGroupControllerClient -from google.cloud.dataproc_v1.services.node_group_controller.async_client import NodeGroupControllerAsyncClient -from google.cloud.dataproc_v1.services.workflow_template_service.client import WorkflowTemplateServiceClient -from google.cloud.dataproc_v1.services.workflow_template_service.async_client import WorkflowTemplateServiceAsyncClient - -from google.cloud.dataproc_v1.types.autoscaling_policies import AutoscalingPolicy -from google.cloud.dataproc_v1.types.autoscaling_policies import BasicAutoscalingAlgorithm -from google.cloud.dataproc_v1.types.autoscaling_policies import BasicYarnAutoscalingConfig -from google.cloud.dataproc_v1.types.autoscaling_policies import CreateAutoscalingPolicyRequest -from google.cloud.dataproc_v1.types.autoscaling_policies import DeleteAutoscalingPolicyRequest -from google.cloud.dataproc_v1.types.autoscaling_policies import GetAutoscalingPolicyRequest -from google.cloud.dataproc_v1.types.autoscaling_policies import InstanceGroupAutoscalingPolicyConfig -from google.cloud.dataproc_v1.types.autoscaling_policies import ListAutoscalingPoliciesRequest -from google.cloud.dataproc_v1.types.autoscaling_policies import ListAutoscalingPoliciesResponse -from google.cloud.dataproc_v1.types.autoscaling_policies import UpdateAutoscalingPolicyRequest -from google.cloud.dataproc_v1.types.batches import Batch -from google.cloud.dataproc_v1.types.batches import CreateBatchRequest -from google.cloud.dataproc_v1.types.batches import DeleteBatchRequest -from google.cloud.dataproc_v1.types.batches import GetBatchRequest -from google.cloud.dataproc_v1.types.batches import ListBatchesRequest -from google.cloud.dataproc_v1.types.batches import ListBatchesResponse -from google.cloud.dataproc_v1.types.batches import PySparkBatch -from google.cloud.dataproc_v1.types.batches import SparkBatch -from google.cloud.dataproc_v1.types.batches import SparkRBatch -from google.cloud.dataproc_v1.types.batches import SparkSqlBatch -from google.cloud.dataproc_v1.types.clusters import AcceleratorConfig -from google.cloud.dataproc_v1.types.clusters import AutoscalingConfig -from google.cloud.dataproc_v1.types.clusters import AuxiliaryNodeGroup -from google.cloud.dataproc_v1.types.clusters import AuxiliaryServicesConfig -from google.cloud.dataproc_v1.types.clusters import Cluster -from google.cloud.dataproc_v1.types.clusters import ClusterConfig -from google.cloud.dataproc_v1.types.clusters import ClusterMetrics -from google.cloud.dataproc_v1.types.clusters import ClusterStatus -from google.cloud.dataproc_v1.types.clusters import ConfidentialInstanceConfig -from google.cloud.dataproc_v1.types.clusters import CreateClusterRequest -from google.cloud.dataproc_v1.types.clusters import DataprocMetricConfig -from google.cloud.dataproc_v1.types.clusters import DeleteClusterRequest -from google.cloud.dataproc_v1.types.clusters import DiagnoseClusterRequest -from google.cloud.dataproc_v1.types.clusters import DiagnoseClusterResults -from google.cloud.dataproc_v1.types.clusters import DiskConfig -from google.cloud.dataproc_v1.types.clusters import EncryptionConfig -from google.cloud.dataproc_v1.types.clusters import EndpointConfig -from google.cloud.dataproc_v1.types.clusters import GceClusterConfig -from google.cloud.dataproc_v1.types.clusters import GetClusterRequest -from google.cloud.dataproc_v1.types.clusters import IdentityConfig -from google.cloud.dataproc_v1.types.clusters import InstanceGroupConfig -from google.cloud.dataproc_v1.types.clusters import KerberosConfig -from google.cloud.dataproc_v1.types.clusters import LifecycleConfig -from google.cloud.dataproc_v1.types.clusters import ListClustersRequest -from google.cloud.dataproc_v1.types.clusters import ListClustersResponse -from google.cloud.dataproc_v1.types.clusters import ManagedGroupConfig -from google.cloud.dataproc_v1.types.clusters import MetastoreConfig -from google.cloud.dataproc_v1.types.clusters import NodeGroup -from google.cloud.dataproc_v1.types.clusters import NodeGroupAffinity -from google.cloud.dataproc_v1.types.clusters import NodeInitializationAction -from google.cloud.dataproc_v1.types.clusters import ReservationAffinity -from google.cloud.dataproc_v1.types.clusters import SecurityConfig -from google.cloud.dataproc_v1.types.clusters import ShieldedInstanceConfig -from google.cloud.dataproc_v1.types.clusters import SoftwareConfig -from google.cloud.dataproc_v1.types.clusters import StartClusterRequest -from google.cloud.dataproc_v1.types.clusters import StopClusterRequest -from google.cloud.dataproc_v1.types.clusters import UpdateClusterRequest -from google.cloud.dataproc_v1.types.clusters import VirtualClusterConfig -from google.cloud.dataproc_v1.types.jobs import CancelJobRequest -from google.cloud.dataproc_v1.types.jobs import DeleteJobRequest -from google.cloud.dataproc_v1.types.jobs import DriverSchedulingConfig -from google.cloud.dataproc_v1.types.jobs import GetJobRequest -from google.cloud.dataproc_v1.types.jobs import HadoopJob -from google.cloud.dataproc_v1.types.jobs import HiveJob -from google.cloud.dataproc_v1.types.jobs import Job -from google.cloud.dataproc_v1.types.jobs import JobMetadata -from google.cloud.dataproc_v1.types.jobs import JobPlacement -from google.cloud.dataproc_v1.types.jobs import JobReference -from google.cloud.dataproc_v1.types.jobs import JobScheduling -from google.cloud.dataproc_v1.types.jobs import JobStatus -from google.cloud.dataproc_v1.types.jobs import ListJobsRequest -from google.cloud.dataproc_v1.types.jobs import ListJobsResponse -from google.cloud.dataproc_v1.types.jobs import LoggingConfig -from google.cloud.dataproc_v1.types.jobs import PigJob -from google.cloud.dataproc_v1.types.jobs import PrestoJob -from google.cloud.dataproc_v1.types.jobs import PySparkJob -from google.cloud.dataproc_v1.types.jobs import QueryList -from google.cloud.dataproc_v1.types.jobs import SparkJob -from google.cloud.dataproc_v1.types.jobs import SparkRJob -from google.cloud.dataproc_v1.types.jobs import SparkSqlJob -from google.cloud.dataproc_v1.types.jobs import SubmitJobRequest -from google.cloud.dataproc_v1.types.jobs import TrinoJob -from google.cloud.dataproc_v1.types.jobs import UpdateJobRequest -from google.cloud.dataproc_v1.types.jobs import YarnApplication -from google.cloud.dataproc_v1.types.node_groups import CreateNodeGroupRequest -from google.cloud.dataproc_v1.types.node_groups import GetNodeGroupRequest -from google.cloud.dataproc_v1.types.node_groups import ResizeNodeGroupRequest -from google.cloud.dataproc_v1.types.operations import BatchOperationMetadata -from google.cloud.dataproc_v1.types.operations import ClusterOperationMetadata -from google.cloud.dataproc_v1.types.operations import ClusterOperationStatus -from google.cloud.dataproc_v1.types.operations import NodeGroupOperationMetadata -from google.cloud.dataproc_v1.types.shared import EnvironmentConfig -from google.cloud.dataproc_v1.types.shared import ExecutionConfig -from google.cloud.dataproc_v1.types.shared import GkeClusterConfig -from google.cloud.dataproc_v1.types.shared import GkeNodePoolConfig -from google.cloud.dataproc_v1.types.shared import GkeNodePoolTarget -from google.cloud.dataproc_v1.types.shared import KubernetesClusterConfig -from google.cloud.dataproc_v1.types.shared import KubernetesSoftwareConfig -from google.cloud.dataproc_v1.types.shared import PeripheralsConfig -from google.cloud.dataproc_v1.types.shared import RuntimeConfig -from google.cloud.dataproc_v1.types.shared import RuntimeInfo -from google.cloud.dataproc_v1.types.shared import SparkHistoryServerConfig -from google.cloud.dataproc_v1.types.shared import UsageMetrics -from google.cloud.dataproc_v1.types.shared import UsageSnapshot -from google.cloud.dataproc_v1.types.shared import Component -from google.cloud.dataproc_v1.types.shared import FailureAction -from google.cloud.dataproc_v1.types.workflow_templates import ClusterOperation -from google.cloud.dataproc_v1.types.workflow_templates import ClusterSelector -from google.cloud.dataproc_v1.types.workflow_templates import CreateWorkflowTemplateRequest -from google.cloud.dataproc_v1.types.workflow_templates import DeleteWorkflowTemplateRequest -from google.cloud.dataproc_v1.types.workflow_templates import GetWorkflowTemplateRequest -from google.cloud.dataproc_v1.types.workflow_templates import InstantiateInlineWorkflowTemplateRequest -from google.cloud.dataproc_v1.types.workflow_templates import InstantiateWorkflowTemplateRequest -from google.cloud.dataproc_v1.types.workflow_templates import ListWorkflowTemplatesRequest -from google.cloud.dataproc_v1.types.workflow_templates import ListWorkflowTemplatesResponse -from google.cloud.dataproc_v1.types.workflow_templates import ManagedCluster -from google.cloud.dataproc_v1.types.workflow_templates import OrderedJob -from google.cloud.dataproc_v1.types.workflow_templates import ParameterValidation -from google.cloud.dataproc_v1.types.workflow_templates import RegexValidation -from google.cloud.dataproc_v1.types.workflow_templates import TemplateParameter -from google.cloud.dataproc_v1.types.workflow_templates import UpdateWorkflowTemplateRequest -from google.cloud.dataproc_v1.types.workflow_templates import ValueValidation -from google.cloud.dataproc_v1.types.workflow_templates import WorkflowGraph -from google.cloud.dataproc_v1.types.workflow_templates import WorkflowMetadata -from google.cloud.dataproc_v1.types.workflow_templates import WorkflowNode -from google.cloud.dataproc_v1.types.workflow_templates import WorkflowTemplate -from google.cloud.dataproc_v1.types.workflow_templates import WorkflowTemplatePlacement - -__all__ = ('AutoscalingPolicyServiceClient', - 'AutoscalingPolicyServiceAsyncClient', - 'BatchControllerClient', - 'BatchControllerAsyncClient', - 'ClusterControllerClient', - 'ClusterControllerAsyncClient', - 'JobControllerClient', - 'JobControllerAsyncClient', - 'NodeGroupControllerClient', - 'NodeGroupControllerAsyncClient', - 'WorkflowTemplateServiceClient', - 'WorkflowTemplateServiceAsyncClient', - 'AutoscalingPolicy', - 'BasicAutoscalingAlgorithm', - 'BasicYarnAutoscalingConfig', - 'CreateAutoscalingPolicyRequest', - 'DeleteAutoscalingPolicyRequest', - 'GetAutoscalingPolicyRequest', - 'InstanceGroupAutoscalingPolicyConfig', - 'ListAutoscalingPoliciesRequest', - 'ListAutoscalingPoliciesResponse', - 'UpdateAutoscalingPolicyRequest', - 'Batch', - 'CreateBatchRequest', - 'DeleteBatchRequest', - 'GetBatchRequest', - 'ListBatchesRequest', - 'ListBatchesResponse', - 'PySparkBatch', - 'SparkBatch', - 'SparkRBatch', - 'SparkSqlBatch', - 'AcceleratorConfig', - 'AutoscalingConfig', - 'AuxiliaryNodeGroup', - 'AuxiliaryServicesConfig', - 'Cluster', - 'ClusterConfig', - 'ClusterMetrics', - 'ClusterStatus', - 'ConfidentialInstanceConfig', - 'CreateClusterRequest', - 'DataprocMetricConfig', - 'DeleteClusterRequest', - 'DiagnoseClusterRequest', - 'DiagnoseClusterResults', - 'DiskConfig', - 'EncryptionConfig', - 'EndpointConfig', - 'GceClusterConfig', - 'GetClusterRequest', - 'IdentityConfig', - 'InstanceGroupConfig', - 'KerberosConfig', - 'LifecycleConfig', - 'ListClustersRequest', - 'ListClustersResponse', - 'ManagedGroupConfig', - 'MetastoreConfig', - 'NodeGroup', - 'NodeGroupAffinity', - 'NodeInitializationAction', - 'ReservationAffinity', - 'SecurityConfig', - 'ShieldedInstanceConfig', - 'SoftwareConfig', - 'StartClusterRequest', - 'StopClusterRequest', - 'UpdateClusterRequest', - 'VirtualClusterConfig', - 'CancelJobRequest', - 'DeleteJobRequest', - 'DriverSchedulingConfig', - 'GetJobRequest', - 'HadoopJob', - 'HiveJob', - 'Job', - 'JobMetadata', - 'JobPlacement', - 'JobReference', - 'JobScheduling', - 'JobStatus', - 'ListJobsRequest', - 'ListJobsResponse', - 'LoggingConfig', - 'PigJob', - 'PrestoJob', - 'PySparkJob', - 'QueryList', - 'SparkJob', - 'SparkRJob', - 'SparkSqlJob', - 'SubmitJobRequest', - 'TrinoJob', - 'UpdateJobRequest', - 'YarnApplication', - 'CreateNodeGroupRequest', - 'GetNodeGroupRequest', - 'ResizeNodeGroupRequest', - 'BatchOperationMetadata', - 'ClusterOperationMetadata', - 'ClusterOperationStatus', - 'NodeGroupOperationMetadata', - 'EnvironmentConfig', - 'ExecutionConfig', - 'GkeClusterConfig', - 'GkeNodePoolConfig', - 'GkeNodePoolTarget', - 'KubernetesClusterConfig', - 'KubernetesSoftwareConfig', - 'PeripheralsConfig', - 'RuntimeConfig', - 'RuntimeInfo', - 'SparkHistoryServerConfig', - 'UsageMetrics', - 'UsageSnapshot', - 'Component', - 'FailureAction', - 'ClusterOperation', - 'ClusterSelector', - 'CreateWorkflowTemplateRequest', - 'DeleteWorkflowTemplateRequest', - 'GetWorkflowTemplateRequest', - 'InstantiateInlineWorkflowTemplateRequest', - 'InstantiateWorkflowTemplateRequest', - 'ListWorkflowTemplatesRequest', - 'ListWorkflowTemplatesResponse', - 'ManagedCluster', - 'OrderedJob', - 'ParameterValidation', - 'RegexValidation', - 'TemplateParameter', - 'UpdateWorkflowTemplateRequest', - 'ValueValidation', - 'WorkflowGraph', - 'WorkflowMetadata', - 'WorkflowNode', - 'WorkflowTemplate', - 'WorkflowTemplatePlacement', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc/gapic_version.py b/owl-bot-staging/v1/google/cloud/dataproc/gapic_version.py deleted file mode 100644 index 360a0d13..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/dataproc/py.typed b/owl-bot-staging/v1/google/cloud/dataproc/py.typed deleted file mode 100644 index aac99cba..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dataproc package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/__init__.py deleted file mode 100644 index fb4a0c1c..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/__init__.py +++ /dev/null @@ -1,302 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dataproc_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.autoscaling_policy_service import AutoscalingPolicyServiceClient -from .services.autoscaling_policy_service import AutoscalingPolicyServiceAsyncClient -from .services.batch_controller import BatchControllerClient -from .services.batch_controller import BatchControllerAsyncClient -from .services.cluster_controller import ClusterControllerClient -from .services.cluster_controller import ClusterControllerAsyncClient -from .services.job_controller import JobControllerClient -from .services.job_controller import JobControllerAsyncClient -from .services.node_group_controller import NodeGroupControllerClient -from .services.node_group_controller import NodeGroupControllerAsyncClient -from .services.workflow_template_service import WorkflowTemplateServiceClient -from .services.workflow_template_service import WorkflowTemplateServiceAsyncClient - -from .types.autoscaling_policies import AutoscalingPolicy -from .types.autoscaling_policies import BasicAutoscalingAlgorithm -from .types.autoscaling_policies import BasicYarnAutoscalingConfig -from .types.autoscaling_policies import CreateAutoscalingPolicyRequest -from .types.autoscaling_policies import DeleteAutoscalingPolicyRequest -from .types.autoscaling_policies import GetAutoscalingPolicyRequest -from .types.autoscaling_policies import InstanceGroupAutoscalingPolicyConfig -from .types.autoscaling_policies import ListAutoscalingPoliciesRequest -from .types.autoscaling_policies import ListAutoscalingPoliciesResponse -from .types.autoscaling_policies import UpdateAutoscalingPolicyRequest -from .types.batches import Batch -from .types.batches import CreateBatchRequest -from .types.batches import DeleteBatchRequest -from .types.batches import GetBatchRequest -from .types.batches import ListBatchesRequest -from .types.batches import ListBatchesResponse -from .types.batches import PySparkBatch -from .types.batches import SparkBatch -from .types.batches import SparkRBatch -from .types.batches import SparkSqlBatch -from .types.clusters import AcceleratorConfig -from .types.clusters import AutoscalingConfig -from .types.clusters import AuxiliaryNodeGroup -from .types.clusters import AuxiliaryServicesConfig -from .types.clusters import Cluster -from .types.clusters import ClusterConfig -from .types.clusters import ClusterMetrics -from .types.clusters import ClusterStatus -from .types.clusters import ConfidentialInstanceConfig -from .types.clusters import CreateClusterRequest -from .types.clusters import DataprocMetricConfig -from .types.clusters import DeleteClusterRequest -from .types.clusters import DiagnoseClusterRequest -from .types.clusters import DiagnoseClusterResults -from .types.clusters import DiskConfig -from .types.clusters import EncryptionConfig -from .types.clusters import EndpointConfig -from .types.clusters import GceClusterConfig -from .types.clusters import GetClusterRequest -from .types.clusters import IdentityConfig -from .types.clusters import InstanceGroupConfig -from .types.clusters import KerberosConfig -from .types.clusters import LifecycleConfig -from .types.clusters import ListClustersRequest -from .types.clusters import ListClustersResponse -from .types.clusters import ManagedGroupConfig -from .types.clusters import MetastoreConfig -from .types.clusters import NodeGroup -from .types.clusters import NodeGroupAffinity -from .types.clusters import NodeInitializationAction -from .types.clusters import ReservationAffinity -from .types.clusters import SecurityConfig -from .types.clusters import ShieldedInstanceConfig -from .types.clusters import SoftwareConfig -from .types.clusters import StartClusterRequest -from .types.clusters import StopClusterRequest -from .types.clusters import UpdateClusterRequest -from .types.clusters import VirtualClusterConfig -from .types.jobs import CancelJobRequest -from .types.jobs import DeleteJobRequest -from .types.jobs import DriverSchedulingConfig -from .types.jobs import GetJobRequest -from .types.jobs import HadoopJob -from .types.jobs import HiveJob -from .types.jobs import Job -from .types.jobs import JobMetadata -from .types.jobs import JobPlacement -from .types.jobs import JobReference -from .types.jobs import JobScheduling -from .types.jobs import JobStatus -from .types.jobs import ListJobsRequest -from .types.jobs import ListJobsResponse -from .types.jobs import LoggingConfig -from .types.jobs import PigJob -from .types.jobs import PrestoJob -from .types.jobs import PySparkJob -from .types.jobs import QueryList -from .types.jobs import SparkJob -from .types.jobs import SparkRJob -from .types.jobs import SparkSqlJob -from .types.jobs import SubmitJobRequest -from .types.jobs import TrinoJob -from .types.jobs import UpdateJobRequest -from .types.jobs import YarnApplication -from .types.node_groups import CreateNodeGroupRequest -from .types.node_groups import GetNodeGroupRequest -from .types.node_groups import ResizeNodeGroupRequest -from .types.operations import BatchOperationMetadata -from .types.operations import ClusterOperationMetadata -from .types.operations import ClusterOperationStatus -from .types.operations import NodeGroupOperationMetadata -from .types.shared import EnvironmentConfig -from .types.shared import ExecutionConfig -from .types.shared import GkeClusterConfig -from .types.shared import GkeNodePoolConfig -from .types.shared import GkeNodePoolTarget -from .types.shared import KubernetesClusterConfig -from .types.shared import KubernetesSoftwareConfig -from .types.shared import PeripheralsConfig -from .types.shared import RuntimeConfig -from .types.shared import RuntimeInfo -from .types.shared import SparkHistoryServerConfig -from .types.shared import UsageMetrics -from .types.shared import UsageSnapshot -from .types.shared import Component -from .types.shared import FailureAction -from .types.workflow_templates import ClusterOperation -from .types.workflow_templates import ClusterSelector -from .types.workflow_templates import CreateWorkflowTemplateRequest -from .types.workflow_templates import DeleteWorkflowTemplateRequest -from .types.workflow_templates import GetWorkflowTemplateRequest -from .types.workflow_templates import InstantiateInlineWorkflowTemplateRequest -from .types.workflow_templates import InstantiateWorkflowTemplateRequest -from .types.workflow_templates import ListWorkflowTemplatesRequest -from .types.workflow_templates import ListWorkflowTemplatesResponse -from .types.workflow_templates import ManagedCluster -from .types.workflow_templates import OrderedJob -from .types.workflow_templates import ParameterValidation -from .types.workflow_templates import RegexValidation -from .types.workflow_templates import TemplateParameter -from .types.workflow_templates import UpdateWorkflowTemplateRequest -from .types.workflow_templates import ValueValidation -from .types.workflow_templates import WorkflowGraph -from .types.workflow_templates import WorkflowMetadata -from .types.workflow_templates import WorkflowNode -from .types.workflow_templates import WorkflowTemplate -from .types.workflow_templates import WorkflowTemplatePlacement - -__all__ = ( - 'AutoscalingPolicyServiceAsyncClient', - 'BatchControllerAsyncClient', - 'ClusterControllerAsyncClient', - 'JobControllerAsyncClient', - 'NodeGroupControllerAsyncClient', - 'WorkflowTemplateServiceAsyncClient', -'AcceleratorConfig', -'AutoscalingConfig', -'AutoscalingPolicy', -'AutoscalingPolicyServiceClient', -'AuxiliaryNodeGroup', -'AuxiliaryServicesConfig', -'BasicAutoscalingAlgorithm', -'BasicYarnAutoscalingConfig', -'Batch', -'BatchControllerClient', -'BatchOperationMetadata', -'CancelJobRequest', -'Cluster', -'ClusterConfig', -'ClusterControllerClient', -'ClusterMetrics', -'ClusterOperation', -'ClusterOperationMetadata', -'ClusterOperationStatus', -'ClusterSelector', -'ClusterStatus', -'Component', -'ConfidentialInstanceConfig', -'CreateAutoscalingPolicyRequest', -'CreateBatchRequest', -'CreateClusterRequest', -'CreateNodeGroupRequest', -'CreateWorkflowTemplateRequest', -'DataprocMetricConfig', -'DeleteAutoscalingPolicyRequest', -'DeleteBatchRequest', -'DeleteClusterRequest', -'DeleteJobRequest', -'DeleteWorkflowTemplateRequest', -'DiagnoseClusterRequest', -'DiagnoseClusterResults', -'DiskConfig', -'DriverSchedulingConfig', -'EncryptionConfig', -'EndpointConfig', -'EnvironmentConfig', -'ExecutionConfig', -'FailureAction', -'GceClusterConfig', -'GetAutoscalingPolicyRequest', -'GetBatchRequest', -'GetClusterRequest', -'GetJobRequest', -'GetNodeGroupRequest', -'GetWorkflowTemplateRequest', -'GkeClusterConfig', -'GkeNodePoolConfig', -'GkeNodePoolTarget', -'HadoopJob', -'HiveJob', -'IdentityConfig', -'InstanceGroupAutoscalingPolicyConfig', -'InstanceGroupConfig', -'InstantiateInlineWorkflowTemplateRequest', -'InstantiateWorkflowTemplateRequest', -'Job', -'JobControllerClient', -'JobMetadata', -'JobPlacement', -'JobReference', -'JobScheduling', -'JobStatus', -'KerberosConfig', -'KubernetesClusterConfig', -'KubernetesSoftwareConfig', -'LifecycleConfig', -'ListAutoscalingPoliciesRequest', -'ListAutoscalingPoliciesResponse', -'ListBatchesRequest', -'ListBatchesResponse', -'ListClustersRequest', -'ListClustersResponse', -'ListJobsRequest', -'ListJobsResponse', -'ListWorkflowTemplatesRequest', -'ListWorkflowTemplatesResponse', -'LoggingConfig', -'ManagedCluster', -'ManagedGroupConfig', -'MetastoreConfig', -'NodeGroup', -'NodeGroupAffinity', -'NodeGroupControllerClient', -'NodeGroupOperationMetadata', -'NodeInitializationAction', -'OrderedJob', -'ParameterValidation', -'PeripheralsConfig', -'PigJob', -'PrestoJob', -'PySparkBatch', -'PySparkJob', -'QueryList', -'RegexValidation', -'ReservationAffinity', -'ResizeNodeGroupRequest', -'RuntimeConfig', -'RuntimeInfo', -'SecurityConfig', -'ShieldedInstanceConfig', -'SoftwareConfig', -'SparkBatch', -'SparkHistoryServerConfig', -'SparkJob', -'SparkRBatch', -'SparkRJob', -'SparkSqlBatch', -'SparkSqlJob', -'StartClusterRequest', -'StopClusterRequest', -'SubmitJobRequest', -'TemplateParameter', -'TrinoJob', -'UpdateAutoscalingPolicyRequest', -'UpdateClusterRequest', -'UpdateJobRequest', -'UpdateWorkflowTemplateRequest', -'UsageMetrics', -'UsageSnapshot', -'ValueValidation', -'VirtualClusterConfig', -'WorkflowGraph', -'WorkflowMetadata', -'WorkflowNode', -'WorkflowTemplate', -'WorkflowTemplatePlacement', -'WorkflowTemplateServiceClient', -'YarnApplication', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/dataproc_v1/gapic_metadata.json deleted file mode 100644 index f8a05276..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/gapic_metadata.json +++ /dev/null @@ -1,633 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.dataproc_v1", - "protoPackage": "google.cloud.dataproc.v1", - "schema": "1.0", - "services": { - "AutoscalingPolicyService": { - "clients": { - "grpc": { - "libraryClient": "AutoscalingPolicyServiceClient", - "rpcs": { - "CreateAutoscalingPolicy": { - "methods": [ - "create_autoscaling_policy" - ] - }, - "DeleteAutoscalingPolicy": { - "methods": [ - "delete_autoscaling_policy" - ] - }, - "GetAutoscalingPolicy": { - "methods": [ - "get_autoscaling_policy" - ] - }, - "ListAutoscalingPolicies": { - "methods": [ - "list_autoscaling_policies" - ] - }, - "UpdateAutoscalingPolicy": { - "methods": [ - "update_autoscaling_policy" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AutoscalingPolicyServiceAsyncClient", - "rpcs": { - "CreateAutoscalingPolicy": { - "methods": [ - "create_autoscaling_policy" - ] - }, - "DeleteAutoscalingPolicy": { - "methods": [ - "delete_autoscaling_policy" - ] - }, - "GetAutoscalingPolicy": { - "methods": [ - "get_autoscaling_policy" - ] - }, - "ListAutoscalingPolicies": { - "methods": [ - "list_autoscaling_policies" - ] - }, - "UpdateAutoscalingPolicy": { - "methods": [ - "update_autoscaling_policy" - ] - } - } - }, - "rest": { - "libraryClient": "AutoscalingPolicyServiceClient", - "rpcs": { - "CreateAutoscalingPolicy": { - "methods": [ - "create_autoscaling_policy" - ] - }, - "DeleteAutoscalingPolicy": { - "methods": [ - "delete_autoscaling_policy" - ] - }, - "GetAutoscalingPolicy": { - "methods": [ - "get_autoscaling_policy" - ] - }, - "ListAutoscalingPolicies": { - "methods": [ - "list_autoscaling_policies" - ] - }, - "UpdateAutoscalingPolicy": { - "methods": [ - "update_autoscaling_policy" - ] - } - } - } - } - }, - "BatchController": { - "clients": { - "grpc": { - "libraryClient": "BatchControllerClient", - "rpcs": { - "CreateBatch": { - "methods": [ - "create_batch" - ] - }, - "DeleteBatch": { - "methods": [ - "delete_batch" - ] - }, - "GetBatch": { - "methods": [ - "get_batch" - ] - }, - "ListBatches": { - "methods": [ - "list_batches" - ] - } - } - }, - "grpc-async": { - "libraryClient": "BatchControllerAsyncClient", - "rpcs": { - "CreateBatch": { - "methods": [ - "create_batch" - ] - }, - "DeleteBatch": { - "methods": [ - "delete_batch" - ] - }, - "GetBatch": { - "methods": [ - "get_batch" - ] - }, - "ListBatches": { - "methods": [ - "list_batches" - ] - } - } - }, - "rest": { - "libraryClient": "BatchControllerClient", - "rpcs": { - "CreateBatch": { - "methods": [ - "create_batch" - ] - }, - "DeleteBatch": { - "methods": [ - "delete_batch" - ] - }, - "GetBatch": { - "methods": [ - "get_batch" - ] - }, - "ListBatches": { - "methods": [ - "list_batches" - ] - } - } - } - } - }, - "ClusterController": { - "clients": { - "grpc": { - "libraryClient": "ClusterControllerClient", - "rpcs": { - "CreateCluster": { - "methods": [ - "create_cluster" - ] - }, - "DeleteCluster": { - "methods": [ - "delete_cluster" - ] - }, - "DiagnoseCluster": { - "methods": [ - "diagnose_cluster" - ] - }, - "GetCluster": { - "methods": [ - "get_cluster" - ] - }, - "ListClusters": { - "methods": [ - "list_clusters" - ] - }, - "StartCluster": { - "methods": [ - "start_cluster" - ] - }, - "StopCluster": { - "methods": [ - "stop_cluster" - ] - }, - "UpdateCluster": { - "methods": [ - "update_cluster" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ClusterControllerAsyncClient", - "rpcs": { - "CreateCluster": { - "methods": [ - "create_cluster" - ] - }, - "DeleteCluster": { - "methods": [ - "delete_cluster" - ] - }, - "DiagnoseCluster": { - "methods": [ - "diagnose_cluster" - ] - }, - "GetCluster": { - "methods": [ - "get_cluster" - ] - }, - "ListClusters": { - "methods": [ - "list_clusters" - ] - }, - "StartCluster": { - "methods": [ - "start_cluster" - ] - }, - "StopCluster": { - "methods": [ - "stop_cluster" - ] - }, - "UpdateCluster": { - "methods": [ - "update_cluster" - ] - } - } - }, - "rest": { - "libraryClient": "ClusterControllerClient", - "rpcs": { - "CreateCluster": { - "methods": [ - "create_cluster" - ] - }, - "DeleteCluster": { - "methods": [ - "delete_cluster" - ] - }, - "DiagnoseCluster": { - "methods": [ - "diagnose_cluster" - ] - }, - "GetCluster": { - "methods": [ - "get_cluster" - ] - }, - "ListClusters": { - "methods": [ - "list_clusters" - ] - }, - "StartCluster": { - "methods": [ - "start_cluster" - ] - }, - "StopCluster": { - "methods": [ - "stop_cluster" - ] - }, - "UpdateCluster": { - "methods": [ - "update_cluster" - ] - } - } - } - } - }, - "JobController": { - "clients": { - "grpc": { - "libraryClient": "JobControllerClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "SubmitJob": { - "methods": [ - "submit_job" - ] - }, - "SubmitJobAsOperation": { - "methods": [ - "submit_job_as_operation" - ] - }, - "UpdateJob": { - "methods": [ - "update_job" - ] - } - } - }, - "grpc-async": { - "libraryClient": "JobControllerAsyncClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "SubmitJob": { - "methods": [ - "submit_job" - ] - }, - "SubmitJobAsOperation": { - "methods": [ - "submit_job_as_operation" - ] - }, - "UpdateJob": { - "methods": [ - "update_job" - ] - } - } - }, - "rest": { - "libraryClient": "JobControllerClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "SubmitJob": { - "methods": [ - "submit_job" - ] - }, - "SubmitJobAsOperation": { - "methods": [ - "submit_job_as_operation" - ] - }, - "UpdateJob": { - "methods": [ - "update_job" - ] - } - } - } - } - }, - "NodeGroupController": { - "clients": { - "grpc": { - "libraryClient": "NodeGroupControllerClient", - "rpcs": { - "CreateNodeGroup": { - "methods": [ - "create_node_group" - ] - }, - "GetNodeGroup": { - "methods": [ - "get_node_group" - ] - }, - "ResizeNodeGroup": { - "methods": [ - "resize_node_group" - ] - } - } - }, - "grpc-async": { - "libraryClient": "NodeGroupControllerAsyncClient", - "rpcs": { - "CreateNodeGroup": { - "methods": [ - "create_node_group" - ] - }, - "GetNodeGroup": { - "methods": [ - "get_node_group" - ] - }, - "ResizeNodeGroup": { - "methods": [ - "resize_node_group" - ] - } - } - }, - "rest": { - "libraryClient": "NodeGroupControllerClient", - "rpcs": { - "CreateNodeGroup": { - "methods": [ - "create_node_group" - ] - }, - "GetNodeGroup": { - "methods": [ - "get_node_group" - ] - }, - "ResizeNodeGroup": { - "methods": [ - "resize_node_group" - ] - } - } - } - } - }, - "WorkflowTemplateService": { - "clients": { - "grpc": { - "libraryClient": "WorkflowTemplateServiceClient", - "rpcs": { - "CreateWorkflowTemplate": { - "methods": [ - "create_workflow_template" - ] - }, - "DeleteWorkflowTemplate": { - "methods": [ - "delete_workflow_template" - ] - }, - "GetWorkflowTemplate": { - "methods": [ - "get_workflow_template" - ] - }, - "InstantiateInlineWorkflowTemplate": { - "methods": [ - "instantiate_inline_workflow_template" - ] - }, - "InstantiateWorkflowTemplate": { - "methods": [ - "instantiate_workflow_template" - ] - }, - "ListWorkflowTemplates": { - "methods": [ - "list_workflow_templates" - ] - }, - "UpdateWorkflowTemplate": { - "methods": [ - "update_workflow_template" - ] - } - } - }, - "grpc-async": { - "libraryClient": "WorkflowTemplateServiceAsyncClient", - "rpcs": { - "CreateWorkflowTemplate": { - "methods": [ - "create_workflow_template" - ] - }, - "DeleteWorkflowTemplate": { - "methods": [ - "delete_workflow_template" - ] - }, - "GetWorkflowTemplate": { - "methods": [ - "get_workflow_template" - ] - }, - "InstantiateInlineWorkflowTemplate": { - "methods": [ - "instantiate_inline_workflow_template" - ] - }, - "InstantiateWorkflowTemplate": { - "methods": [ - "instantiate_workflow_template" - ] - }, - "ListWorkflowTemplates": { - "methods": [ - "list_workflow_templates" - ] - }, - "UpdateWorkflowTemplate": { - "methods": [ - "update_workflow_template" - ] - } - } - }, - "rest": { - "libraryClient": "WorkflowTemplateServiceClient", - "rpcs": { - "CreateWorkflowTemplate": { - "methods": [ - "create_workflow_template" - ] - }, - "DeleteWorkflowTemplate": { - "methods": [ - "delete_workflow_template" - ] - }, - "GetWorkflowTemplate": { - "methods": [ - "get_workflow_template" - ] - }, - "InstantiateInlineWorkflowTemplate": { - "methods": [ - "instantiate_inline_workflow_template" - ] - }, - "InstantiateWorkflowTemplate": { - "methods": [ - "instantiate_workflow_template" - ] - }, - "ListWorkflowTemplates": { - "methods": [ - "list_workflow_templates" - ] - }, - "UpdateWorkflowTemplate": { - "methods": [ - "update_workflow_template" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/gapic_version.py deleted file mode 100644 index 360a0d13..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/py.typed b/owl-bot-staging/v1/google/cloud/dataproc_v1/py.typed deleted file mode 100644 index aac99cba..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dataproc package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/__init__.py deleted file mode 100644 index 89a37dc9..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py deleted file mode 100644 index 5f1165bb..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AutoscalingPolicyServiceClient -from .async_client import AutoscalingPolicyServiceAsyncClient - -__all__ = ( - 'AutoscalingPolicyServiceClient', - 'AutoscalingPolicyServiceAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py deleted file mode 100644 index 9b9f70bc..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py +++ /dev/null @@ -1,1295 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataproc_v1.services.autoscaling_policy_service import pagers -from google.cloud.dataproc_v1.types import autoscaling_policies -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from .transports.base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AutoscalingPolicyServiceGrpcAsyncIOTransport -from .client import AutoscalingPolicyServiceClient - - -class AutoscalingPolicyServiceAsyncClient: - """The API interface for managing autoscaling policies in the - Dataproc API. - """ - - _client: AutoscalingPolicyServiceClient - - DEFAULT_ENDPOINT = AutoscalingPolicyServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AutoscalingPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - - autoscaling_policy_path = staticmethod(AutoscalingPolicyServiceClient.autoscaling_policy_path) - parse_autoscaling_policy_path = staticmethod(AutoscalingPolicyServiceClient.parse_autoscaling_policy_path) - common_billing_account_path = staticmethod(AutoscalingPolicyServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AutoscalingPolicyServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AutoscalingPolicyServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AutoscalingPolicyServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AutoscalingPolicyServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AutoscalingPolicyServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AutoscalingPolicyServiceClient.common_project_path) - parse_common_project_path = staticmethod(AutoscalingPolicyServiceClient.parse_common_project_path) - common_location_path = staticmethod(AutoscalingPolicyServiceClient.common_location_path) - parse_common_location_path = staticmethod(AutoscalingPolicyServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoscalingPolicyServiceAsyncClient: The constructed client. - """ - return AutoscalingPolicyServiceClient.from_service_account_info.__func__(AutoscalingPolicyServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoscalingPolicyServiceAsyncClient: The constructed client. - """ - return AutoscalingPolicyServiceClient.from_service_account_file.__func__(AutoscalingPolicyServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AutoscalingPolicyServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AutoscalingPolicyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AutoscalingPolicyServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(AutoscalingPolicyServiceClient).get_transport_class, type(AutoscalingPolicyServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, AutoscalingPolicyServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the autoscaling policy service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.AutoscalingPolicyServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AutoscalingPolicyServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_autoscaling_policy(self, - request: Optional[Union[autoscaling_policies.CreateAutoscalingPolicyRequest, dict]] = None, - *, - parent: Optional[str] = None, - policy: Optional[autoscaling_policies.AutoscalingPolicy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> autoscaling_policies.AutoscalingPolicy: - r"""Creates new autoscaling policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_create_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() - - # Initialize request argument(s) - policy = dataproc_v1.AutoscalingPolicy() - policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 - policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 - policy.worker_config.max_instances = 1389 - - request = dataproc_v1.CreateAutoscalingPolicyRequest( - parent="parent_value", - policy=policy, - ) - - # Make the request - response = await client.create_autoscaling_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest, dict]]): - The request object. A request to create an autoscaling - policy. - parent (:class:`str`): - Required. The "resource name" of the region or location, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.create``, - the resource name of the region has the following - format: ``projects/{project_id}/regions/{region}`` - - - For - ``projects.locations.autoscalingPolicies.create``, - the resource name of the location has the following - format: - ``projects/{project_id}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - policy (:class:`google.cloud.dataproc_v1.types.AutoscalingPolicy`): - Required. The autoscaling policy to - create. - - This corresponds to the ``policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.AutoscalingPolicy: - Describes an autoscaling policy for - Dataproc cluster autoscaler. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, policy]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = autoscaling_policies.CreateAutoscalingPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if policy is not None: - request.policy = policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_autoscaling_policy, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_autoscaling_policy(self, - request: Optional[Union[autoscaling_policies.UpdateAutoscalingPolicyRequest, dict]] = None, - *, - policy: Optional[autoscaling_policies.AutoscalingPolicy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> autoscaling_policies.AutoscalingPolicy: - r"""Updates (replaces) autoscaling policy. - - Disabled check for update_mask, because all updates will be full - replacements. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_update_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() - - # Initialize request argument(s) - policy = dataproc_v1.AutoscalingPolicy() - policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 - policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 - policy.worker_config.max_instances = 1389 - - request = dataproc_v1.UpdateAutoscalingPolicyRequest( - policy=policy, - ) - - # Make the request - response = await client.update_autoscaling_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest, dict]]): - The request object. A request to update an autoscaling - policy. - policy (:class:`google.cloud.dataproc_v1.types.AutoscalingPolicy`): - Required. The updated autoscaling - policy. - - This corresponds to the ``policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.AutoscalingPolicy: - Describes an autoscaling policy for - Dataproc cluster autoscaler. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([policy]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = autoscaling_policies.UpdateAutoscalingPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if policy is not None: - request.policy = policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_autoscaling_policy, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("policy.name", request.policy.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_autoscaling_policy(self, - request: Optional[Union[autoscaling_policies.GetAutoscalingPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> autoscaling_policies.AutoscalingPolicy: - r"""Retrieves autoscaling policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_get_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetAutoscalingPolicyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_autoscaling_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest, dict]]): - The request object. A request to fetch an autoscaling - policy. - name (:class:`str`): - Required. The "resource name" of the autoscaling policy, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.get``, the - resource name of the policy has the following format: - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` - - - For ``projects.locations.autoscalingPolicies.get``, - the resource name of the policy has the following - format: - ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.AutoscalingPolicy: - Describes an autoscaling policy for - Dataproc cluster autoscaler. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = autoscaling_policies.GetAutoscalingPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_autoscaling_policy, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_autoscaling_policies(self, - request: Optional[Union[autoscaling_policies.ListAutoscalingPoliciesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAutoscalingPoliciesAsyncPager: - r"""Lists autoscaling policies in the project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_list_autoscaling_policies(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ListAutoscalingPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_autoscaling_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest, dict]]): - The request object. A request to list autoscaling - policies in a project. - parent (:class:`str`): - Required. The "resource name" of the region or location, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.list``, - the resource name of the region has the following - format: ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.autoscalingPolicies.list``, - the resource name of the location has the following - format: - ``projects/{project_id}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesAsyncPager: - A response to a request to list - autoscaling policies in a project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = autoscaling_policies.ListAutoscalingPoliciesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_autoscaling_policies, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAutoscalingPoliciesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_autoscaling_policy(self, - request: Optional[Union[autoscaling_policies.DeleteAutoscalingPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an autoscaling policy. It is an error to - delete an autoscaling policy that is in use by one or - more clusters. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_delete_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteAutoscalingPolicyRequest( - name="name_value", - ) - - # Make the request - await client.delete_autoscaling_policy(request=request) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest, dict]]): - The request object. A request to delete an autoscaling - policy. - Autoscaling policies in use by one or - more clusters will not be deleted. - name (:class:`str`): - Required. The "resource name" of the autoscaling policy, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.delete``, - the resource name of the policy has the following - format: - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` - - - For - ``projects.locations.autoscalingPolicies.delete``, - the resource name of the policy has the following - format: - ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = autoscaling_policies.DeleteAutoscalingPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_autoscaling_policy, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AutoscalingPolicyServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AutoscalingPolicyServiceAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py deleted file mode 100644 index 9d5525df..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py +++ /dev/null @@ -1,1475 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataproc_v1.services.autoscaling_policy_service import pagers -from google.cloud.dataproc_v1.types import autoscaling_policies -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from .transports.base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AutoscalingPolicyServiceGrpcTransport -from .transports.grpc_asyncio import AutoscalingPolicyServiceGrpcAsyncIOTransport -from .transports.rest import AutoscalingPolicyServiceRestTransport - - -class AutoscalingPolicyServiceClientMeta(type): - """Metaclass for the AutoscalingPolicyService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AutoscalingPolicyServiceTransport]] - _transport_registry["grpc"] = AutoscalingPolicyServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AutoscalingPolicyServiceGrpcAsyncIOTransport - _transport_registry["rest"] = AutoscalingPolicyServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AutoscalingPolicyServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AutoscalingPolicyServiceClient(metaclass=AutoscalingPolicyServiceClientMeta): - """The API interface for managing autoscaling policies in the - Dataproc API. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataproc.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoscalingPolicyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoscalingPolicyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AutoscalingPolicyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AutoscalingPolicyServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def autoscaling_policy_path(project: str,location: str,autoscaling_policy: str,) -> str: - """Returns a fully-qualified autoscaling_policy string.""" - return "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}".format(project=project, location=location, autoscaling_policy=autoscaling_policy, ) - - @staticmethod - def parse_autoscaling_policy_path(path: str) -> Dict[str,str]: - """Parses a autoscaling_policy path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/autoscalingPolicies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AutoscalingPolicyServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the autoscaling policy service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, AutoscalingPolicyServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, AutoscalingPolicyServiceTransport): - # transport is a AutoscalingPolicyServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_autoscaling_policy(self, - request: Optional[Union[autoscaling_policies.CreateAutoscalingPolicyRequest, dict]] = None, - *, - parent: Optional[str] = None, - policy: Optional[autoscaling_policies.AutoscalingPolicy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> autoscaling_policies.AutoscalingPolicy: - r"""Creates new autoscaling policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_create_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() - - # Initialize request argument(s) - policy = dataproc_v1.AutoscalingPolicy() - policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 - policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 - policy.worker_config.max_instances = 1389 - - request = dataproc_v1.CreateAutoscalingPolicyRequest( - parent="parent_value", - policy=policy, - ) - - # Make the request - response = client.create_autoscaling_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest, dict]): - The request object. A request to create an autoscaling - policy. - parent (str): - Required. The "resource name" of the region or location, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.create``, - the resource name of the region has the following - format: ``projects/{project_id}/regions/{region}`` - - - For - ``projects.locations.autoscalingPolicies.create``, - the resource name of the location has the following - format: - ``projects/{project_id}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): - Required. The autoscaling policy to - create. - - This corresponds to the ``policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.AutoscalingPolicy: - Describes an autoscaling policy for - Dataproc cluster autoscaler. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, policy]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a autoscaling_policies.CreateAutoscalingPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, autoscaling_policies.CreateAutoscalingPolicyRequest): - request = autoscaling_policies.CreateAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if policy is not None: - request.policy = policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_autoscaling_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_autoscaling_policy(self, - request: Optional[Union[autoscaling_policies.UpdateAutoscalingPolicyRequest, dict]] = None, - *, - policy: Optional[autoscaling_policies.AutoscalingPolicy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> autoscaling_policies.AutoscalingPolicy: - r"""Updates (replaces) autoscaling policy. - - Disabled check for update_mask, because all updates will be full - replacements. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_update_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() - - # Initialize request argument(s) - policy = dataproc_v1.AutoscalingPolicy() - policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 - policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 - policy.worker_config.max_instances = 1389 - - request = dataproc_v1.UpdateAutoscalingPolicyRequest( - policy=policy, - ) - - # Make the request - response = client.update_autoscaling_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest, dict]): - The request object. A request to update an autoscaling - policy. - policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): - Required. The updated autoscaling - policy. - - This corresponds to the ``policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.AutoscalingPolicy: - Describes an autoscaling policy for - Dataproc cluster autoscaler. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([policy]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a autoscaling_policies.UpdateAutoscalingPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, autoscaling_policies.UpdateAutoscalingPolicyRequest): - request = autoscaling_policies.UpdateAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if policy is not None: - request.policy = policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_autoscaling_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("policy.name", request.policy.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_autoscaling_policy(self, - request: Optional[Union[autoscaling_policies.GetAutoscalingPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> autoscaling_policies.AutoscalingPolicy: - r"""Retrieves autoscaling policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_get_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.GetAutoscalingPolicyRequest( - name="name_value", - ) - - # Make the request - response = client.get_autoscaling_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest, dict]): - The request object. A request to fetch an autoscaling - policy. - name (str): - Required. The "resource name" of the autoscaling policy, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.get``, the - resource name of the policy has the following format: - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` - - - For ``projects.locations.autoscalingPolicies.get``, - the resource name of the policy has the following - format: - ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.AutoscalingPolicy: - Describes an autoscaling policy for - Dataproc cluster autoscaler. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a autoscaling_policies.GetAutoscalingPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, autoscaling_policies.GetAutoscalingPolicyRequest): - request = autoscaling_policies.GetAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_autoscaling_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_autoscaling_policies(self, - request: Optional[Union[autoscaling_policies.ListAutoscalingPoliciesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListAutoscalingPoliciesPager: - r"""Lists autoscaling policies in the project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_list_autoscaling_policies(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.ListAutoscalingPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_autoscaling_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest, dict]): - The request object. A request to list autoscaling - policies in a project. - parent (str): - Required. The "resource name" of the region or location, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.list``, - the resource name of the region has the following - format: ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.autoscalingPolicies.list``, - the resource name of the location has the following - format: - ``projects/{project_id}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesPager: - A response to a request to list - autoscaling policies in a project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a autoscaling_policies.ListAutoscalingPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, autoscaling_policies.ListAutoscalingPoliciesRequest): - request = autoscaling_policies.ListAutoscalingPoliciesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_autoscaling_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAutoscalingPoliciesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_autoscaling_policy(self, - request: Optional[Union[autoscaling_policies.DeleteAutoscalingPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an autoscaling policy. It is an error to - delete an autoscaling policy that is in use by one or - more clusters. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_delete_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteAutoscalingPolicyRequest( - name="name_value", - ) - - # Make the request - client.delete_autoscaling_policy(request=request) - - Args: - request (Union[google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest, dict]): - The request object. A request to delete an autoscaling - policy. - Autoscaling policies in use by one or - more clusters will not be deleted. - name (str): - Required. The "resource name" of the autoscaling policy, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.delete``, - the resource name of the policy has the following - format: - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` - - - For - ``projects.locations.autoscalingPolicies.delete``, - the resource name of the policy has the following - format: - ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a autoscaling_policies.DeleteAutoscalingPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, autoscaling_policies.DeleteAutoscalingPolicyRequest): - request = autoscaling_policies.DeleteAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_autoscaling_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "AutoscalingPolicyServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AutoscalingPolicyServiceClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py deleted file mode 100644 index 91292bec..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.dataproc_v1.types import autoscaling_policies - - -class ListAutoscalingPoliciesPager: - """A pager for iterating through ``list_autoscaling_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``policies`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAutoscalingPolicies`` requests and continue to iterate - through the ``policies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., autoscaling_policies.ListAutoscalingPoliciesResponse], - request: autoscaling_policies.ListAutoscalingPoliciesRequest, - response: autoscaling_policies.ListAutoscalingPoliciesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest): - The initial request object. - response (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = autoscaling_policies.ListAutoscalingPoliciesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[autoscaling_policies.ListAutoscalingPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[autoscaling_policies.AutoscalingPolicy]: - for page in self.pages: - yield from page.policies - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAutoscalingPoliciesAsyncPager: - """A pager for iterating through ``list_autoscaling_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``policies`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAutoscalingPolicies`` requests and continue to iterate - through the ``policies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[autoscaling_policies.ListAutoscalingPoliciesResponse]], - request: autoscaling_policies.ListAutoscalingPoliciesRequest, - response: autoscaling_policies.ListAutoscalingPoliciesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest): - The initial request object. - response (google.cloud.dataproc_v1.types.ListAutoscalingPoliciesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = autoscaling_policies.ListAutoscalingPoliciesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[autoscaling_policies.ListAutoscalingPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[autoscaling_policies.AutoscalingPolicy]: - async def async_generator(): - async for page in self.pages: - for response in page.policies: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py deleted file mode 100644 index c3e10ce9..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AutoscalingPolicyServiceTransport -from .grpc import AutoscalingPolicyServiceGrpcTransport -from .grpc_asyncio import AutoscalingPolicyServiceGrpcAsyncIOTransport -from .rest import AutoscalingPolicyServiceRestTransport -from .rest import AutoscalingPolicyServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AutoscalingPolicyServiceTransport]] -_transport_registry['grpc'] = AutoscalingPolicyServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AutoscalingPolicyServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AutoscalingPolicyServiceRestTransport - -__all__ = ( - 'AutoscalingPolicyServiceTransport', - 'AutoscalingPolicyServiceGrpcTransport', - 'AutoscalingPolicyServiceGrpcAsyncIOTransport', - 'AutoscalingPolicyServiceRestTransport', - 'AutoscalingPolicyServiceRestInterceptor', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py deleted file mode 100644 index d9b64bb4..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py +++ /dev/null @@ -1,295 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataproc_v1.types import autoscaling_policies -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AutoscalingPolicyServiceTransport(abc.ABC): - """Abstract transport class for AutoscalingPolicyService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataproc.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_autoscaling_policy: gapic_v1.method.wrap_method( - self.create_autoscaling_policy, - default_timeout=600.0, - client_info=client_info, - ), - self.update_autoscaling_policy: gapic_v1.method.wrap_method( - self.update_autoscaling_policy, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.get_autoscaling_policy: gapic_v1.method.wrap_method( - self.get_autoscaling_policy, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.list_autoscaling_policies: gapic_v1.method.wrap_method( - self.list_autoscaling_policies, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.delete_autoscaling_policy: gapic_v1.method.wrap_method( - self.delete_autoscaling_policy, - default_timeout=600.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.CreateAutoscalingPolicyRequest], - Union[ - autoscaling_policies.AutoscalingPolicy, - Awaitable[autoscaling_policies.AutoscalingPolicy] - ]]: - raise NotImplementedError() - - @property - def update_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.UpdateAutoscalingPolicyRequest], - Union[ - autoscaling_policies.AutoscalingPolicy, - Awaitable[autoscaling_policies.AutoscalingPolicy] - ]]: - raise NotImplementedError() - - @property - def get_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.GetAutoscalingPolicyRequest], - Union[ - autoscaling_policies.AutoscalingPolicy, - Awaitable[autoscaling_policies.AutoscalingPolicy] - ]]: - raise NotImplementedError() - - @property - def list_autoscaling_policies(self) -> Callable[ - [autoscaling_policies.ListAutoscalingPoliciesRequest], - Union[ - autoscaling_policies.ListAutoscalingPoliciesResponse, - Awaitable[autoscaling_policies.ListAutoscalingPoliciesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.DeleteAutoscalingPolicyRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AutoscalingPolicyServiceTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py deleted file mode 100644 index 4cb4f3bc..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py +++ /dev/null @@ -1,529 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataproc_v1.types import autoscaling_policies -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from .base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO - - -class AutoscalingPolicyServiceGrpcTransport(AutoscalingPolicyServiceTransport): - """gRPC backend transport for AutoscalingPolicyService. - - The API interface for managing autoscaling policies in the - Dataproc API. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.CreateAutoscalingPolicyRequest], - autoscaling_policies.AutoscalingPolicy]: - r"""Return a callable for the create autoscaling policy method over gRPC. - - Creates new autoscaling policy. - - Returns: - Callable[[~.CreateAutoscalingPolicyRequest], - ~.AutoscalingPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_autoscaling_policy' not in self._stubs: - self._stubs['create_autoscaling_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.AutoscalingPolicyService/CreateAutoscalingPolicy', - request_serializer=autoscaling_policies.CreateAutoscalingPolicyRequest.serialize, - response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, - ) - return self._stubs['create_autoscaling_policy'] - - @property - def update_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.UpdateAutoscalingPolicyRequest], - autoscaling_policies.AutoscalingPolicy]: - r"""Return a callable for the update autoscaling policy method over gRPC. - - Updates (replaces) autoscaling policy. - - Disabled check for update_mask, because all updates will be full - replacements. - - Returns: - Callable[[~.UpdateAutoscalingPolicyRequest], - ~.AutoscalingPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_autoscaling_policy' not in self._stubs: - self._stubs['update_autoscaling_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.AutoscalingPolicyService/UpdateAutoscalingPolicy', - request_serializer=autoscaling_policies.UpdateAutoscalingPolicyRequest.serialize, - response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, - ) - return self._stubs['update_autoscaling_policy'] - - @property - def get_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.GetAutoscalingPolicyRequest], - autoscaling_policies.AutoscalingPolicy]: - r"""Return a callable for the get autoscaling policy method over gRPC. - - Retrieves autoscaling policy. - - Returns: - Callable[[~.GetAutoscalingPolicyRequest], - ~.AutoscalingPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_autoscaling_policy' not in self._stubs: - self._stubs['get_autoscaling_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.AutoscalingPolicyService/GetAutoscalingPolicy', - request_serializer=autoscaling_policies.GetAutoscalingPolicyRequest.serialize, - response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, - ) - return self._stubs['get_autoscaling_policy'] - - @property - def list_autoscaling_policies(self) -> Callable[ - [autoscaling_policies.ListAutoscalingPoliciesRequest], - autoscaling_policies.ListAutoscalingPoliciesResponse]: - r"""Return a callable for the list autoscaling policies method over gRPC. - - Lists autoscaling policies in the project. - - Returns: - Callable[[~.ListAutoscalingPoliciesRequest], - ~.ListAutoscalingPoliciesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_autoscaling_policies' not in self._stubs: - self._stubs['list_autoscaling_policies'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.AutoscalingPolicyService/ListAutoscalingPolicies', - request_serializer=autoscaling_policies.ListAutoscalingPoliciesRequest.serialize, - response_deserializer=autoscaling_policies.ListAutoscalingPoliciesResponse.deserialize, - ) - return self._stubs['list_autoscaling_policies'] - - @property - def delete_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.DeleteAutoscalingPolicyRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete autoscaling policy method over gRPC. - - Deletes an autoscaling policy. It is an error to - delete an autoscaling policy that is in use by one or - more clusters. - - Returns: - Callable[[~.DeleteAutoscalingPolicyRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_autoscaling_policy' not in self._stubs: - self._stubs['delete_autoscaling_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.AutoscalingPolicyService/DeleteAutoscalingPolicy', - request_serializer=autoscaling_policies.DeleteAutoscalingPolicyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_autoscaling_policy'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AutoscalingPolicyServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py deleted file mode 100644 index 4ab2974f..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,528 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataproc_v1.types import autoscaling_policies -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from .base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AutoscalingPolicyServiceGrpcTransport - - -class AutoscalingPolicyServiceGrpcAsyncIOTransport(AutoscalingPolicyServiceTransport): - """gRPC AsyncIO backend transport for AutoscalingPolicyService. - - The API interface for managing autoscaling policies in the - Dataproc API. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.CreateAutoscalingPolicyRequest], - Awaitable[autoscaling_policies.AutoscalingPolicy]]: - r"""Return a callable for the create autoscaling policy method over gRPC. - - Creates new autoscaling policy. - - Returns: - Callable[[~.CreateAutoscalingPolicyRequest], - Awaitable[~.AutoscalingPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_autoscaling_policy' not in self._stubs: - self._stubs['create_autoscaling_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.AutoscalingPolicyService/CreateAutoscalingPolicy', - request_serializer=autoscaling_policies.CreateAutoscalingPolicyRequest.serialize, - response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, - ) - return self._stubs['create_autoscaling_policy'] - - @property - def update_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.UpdateAutoscalingPolicyRequest], - Awaitable[autoscaling_policies.AutoscalingPolicy]]: - r"""Return a callable for the update autoscaling policy method over gRPC. - - Updates (replaces) autoscaling policy. - - Disabled check for update_mask, because all updates will be full - replacements. - - Returns: - Callable[[~.UpdateAutoscalingPolicyRequest], - Awaitable[~.AutoscalingPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_autoscaling_policy' not in self._stubs: - self._stubs['update_autoscaling_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.AutoscalingPolicyService/UpdateAutoscalingPolicy', - request_serializer=autoscaling_policies.UpdateAutoscalingPolicyRequest.serialize, - response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, - ) - return self._stubs['update_autoscaling_policy'] - - @property - def get_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.GetAutoscalingPolicyRequest], - Awaitable[autoscaling_policies.AutoscalingPolicy]]: - r"""Return a callable for the get autoscaling policy method over gRPC. - - Retrieves autoscaling policy. - - Returns: - Callable[[~.GetAutoscalingPolicyRequest], - Awaitable[~.AutoscalingPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_autoscaling_policy' not in self._stubs: - self._stubs['get_autoscaling_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.AutoscalingPolicyService/GetAutoscalingPolicy', - request_serializer=autoscaling_policies.GetAutoscalingPolicyRequest.serialize, - response_deserializer=autoscaling_policies.AutoscalingPolicy.deserialize, - ) - return self._stubs['get_autoscaling_policy'] - - @property - def list_autoscaling_policies(self) -> Callable[ - [autoscaling_policies.ListAutoscalingPoliciesRequest], - Awaitable[autoscaling_policies.ListAutoscalingPoliciesResponse]]: - r"""Return a callable for the list autoscaling policies method over gRPC. - - Lists autoscaling policies in the project. - - Returns: - Callable[[~.ListAutoscalingPoliciesRequest], - Awaitable[~.ListAutoscalingPoliciesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_autoscaling_policies' not in self._stubs: - self._stubs['list_autoscaling_policies'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.AutoscalingPolicyService/ListAutoscalingPolicies', - request_serializer=autoscaling_policies.ListAutoscalingPoliciesRequest.serialize, - response_deserializer=autoscaling_policies.ListAutoscalingPoliciesResponse.deserialize, - ) - return self._stubs['list_autoscaling_policies'] - - @property - def delete_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.DeleteAutoscalingPolicyRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete autoscaling policy method over gRPC. - - Deletes an autoscaling policy. It is an error to - delete an autoscaling policy that is in use by one or - more clusters. - - Returns: - Callable[[~.DeleteAutoscalingPolicyRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_autoscaling_policy' not in self._stubs: - self._stubs['delete_autoscaling_policy'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.AutoscalingPolicyService/DeleteAutoscalingPolicy', - request_serializer=autoscaling_policies.DeleteAutoscalingPolicyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_autoscaling_policy'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'AutoscalingPolicyServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/rest.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/rest.py deleted file mode 100644 index 30b5402f..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/rest.py +++ /dev/null @@ -1,1463 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataproc_v1.types import autoscaling_policies -from google.protobuf import empty_pb2 # type: ignore - -from .base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class AutoscalingPolicyServiceRestInterceptor: - """Interceptor for AutoscalingPolicyService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AutoscalingPolicyServiceRestTransport. - - .. code-block:: python - class MyCustomAutoscalingPolicyServiceInterceptor(AutoscalingPolicyServiceRestInterceptor): - def pre_create_autoscaling_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_autoscaling_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_autoscaling_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_autoscaling_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_autoscaling_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_autoscaling_policies(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_autoscaling_policies(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_autoscaling_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_autoscaling_policy(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AutoscalingPolicyServiceRestTransport(interceptor=MyCustomAutoscalingPolicyServiceInterceptor()) - client = AutoscalingPolicyServiceClient(transport=transport) - - - """ - def pre_create_autoscaling_policy(self, request: autoscaling_policies.CreateAutoscalingPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[autoscaling_policies.CreateAutoscalingPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_autoscaling_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def post_create_autoscaling_policy(self, response: autoscaling_policies.AutoscalingPolicy) -> autoscaling_policies.AutoscalingPolicy: - """Post-rpc interceptor for create_autoscaling_policy - - Override in a subclass to manipulate the response - after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. - """ - return response - def pre_delete_autoscaling_policy(self, request: autoscaling_policies.DeleteAutoscalingPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[autoscaling_policies.DeleteAutoscalingPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_autoscaling_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def pre_get_autoscaling_policy(self, request: autoscaling_policies.GetAutoscalingPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[autoscaling_policies.GetAutoscalingPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_autoscaling_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def post_get_autoscaling_policy(self, response: autoscaling_policies.AutoscalingPolicy) -> autoscaling_policies.AutoscalingPolicy: - """Post-rpc interceptor for get_autoscaling_policy - - Override in a subclass to manipulate the response - after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. - """ - return response - def pre_list_autoscaling_policies(self, request: autoscaling_policies.ListAutoscalingPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[autoscaling_policies.ListAutoscalingPoliciesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_autoscaling_policies - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def post_list_autoscaling_policies(self, response: autoscaling_policies.ListAutoscalingPoliciesResponse) -> autoscaling_policies.ListAutoscalingPoliciesResponse: - """Post-rpc interceptor for list_autoscaling_policies - - Override in a subclass to manipulate the response - after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. - """ - return response - def pre_update_autoscaling_policy(self, request: autoscaling_policies.UpdateAutoscalingPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[autoscaling_policies.UpdateAutoscalingPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_autoscaling_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def post_update_autoscaling_policy(self, response: autoscaling_policies.AutoscalingPolicy) -> autoscaling_policies.AutoscalingPolicy: - """Post-rpc interceptor for update_autoscaling_policy - - Override in a subclass to manipulate the response - after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. - """ - return response - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. - """ - return response - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. - """ - return response - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. - """ - return response - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. - """ - return response - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoscalingPolicyService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class AutoscalingPolicyServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AutoscalingPolicyServiceRestInterceptor - - -class AutoscalingPolicyServiceRestTransport(AutoscalingPolicyServiceTransport): - """REST backend transport for AutoscalingPolicyService. - - The API interface for managing autoscaling policies in the - Dataproc API. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AutoscalingPolicyServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AutoscalingPolicyServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateAutoscalingPolicy(AutoscalingPolicyServiceRestStub): - def __hash__(self): - return hash("CreateAutoscalingPolicy") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: autoscaling_policies.CreateAutoscalingPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> autoscaling_policies.AutoscalingPolicy: - r"""Call the create autoscaling policy method over HTTP. - - Args: - request (~.autoscaling_policies.CreateAutoscalingPolicyRequest): - The request object. A request to create an autoscaling - policy. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.autoscaling_policies.AutoscalingPolicy: - Describes an autoscaling policy for - Dataproc cluster autoscaler. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/autoscalingPolicies', - 'body': 'policy', - }, -{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/regions/*}/autoscalingPolicies', - 'body': 'policy', - }, - ] - request, metadata = self._interceptor.pre_create_autoscaling_policy(request, metadata) - pb_request = autoscaling_policies.CreateAutoscalingPolicyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = autoscaling_policies.AutoscalingPolicy() - pb_resp = autoscaling_policies.AutoscalingPolicy.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_autoscaling_policy(resp) - return resp - - class _DeleteAutoscalingPolicy(AutoscalingPolicyServiceRestStub): - def __hash__(self): - return hash("DeleteAutoscalingPolicy") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: autoscaling_policies.DeleteAutoscalingPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete autoscaling policy method over HTTP. - - Args: - request (~.autoscaling_policies.DeleteAutoscalingPolicyRequest): - The request object. A request to delete an autoscaling - policy. - Autoscaling policies in use by one or - more clusters will not be deleted. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/autoscalingPolicies/*}', - }, -{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/autoscalingPolicies/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_autoscaling_policy(request, metadata) - pb_request = autoscaling_policies.DeleteAutoscalingPolicyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetAutoscalingPolicy(AutoscalingPolicyServiceRestStub): - def __hash__(self): - return hash("GetAutoscalingPolicy") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: autoscaling_policies.GetAutoscalingPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> autoscaling_policies.AutoscalingPolicy: - r"""Call the get autoscaling policy method over HTTP. - - Args: - request (~.autoscaling_policies.GetAutoscalingPolicyRequest): - The request object. A request to fetch an autoscaling - policy. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.autoscaling_policies.AutoscalingPolicy: - Describes an autoscaling policy for - Dataproc cluster autoscaler. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/autoscalingPolicies/*}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/autoscalingPolicies/*}', - }, - ] - request, metadata = self._interceptor.pre_get_autoscaling_policy(request, metadata) - pb_request = autoscaling_policies.GetAutoscalingPolicyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = autoscaling_policies.AutoscalingPolicy() - pb_resp = autoscaling_policies.AutoscalingPolicy.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_autoscaling_policy(resp) - return resp - - class _ListAutoscalingPolicies(AutoscalingPolicyServiceRestStub): - def __hash__(self): - return hash("ListAutoscalingPolicies") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: autoscaling_policies.ListAutoscalingPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> autoscaling_policies.ListAutoscalingPoliciesResponse: - r"""Call the list autoscaling policies method over HTTP. - - Args: - request (~.autoscaling_policies.ListAutoscalingPoliciesRequest): - The request object. A request to list autoscaling - policies in a project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.autoscaling_policies.ListAutoscalingPoliciesResponse: - A response to a request to list - autoscaling policies in a project. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/autoscalingPolicies', - }, -{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/regions/*}/autoscalingPolicies', - }, - ] - request, metadata = self._interceptor.pre_list_autoscaling_policies(request, metadata) - pb_request = autoscaling_policies.ListAutoscalingPoliciesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = autoscaling_policies.ListAutoscalingPoliciesResponse() - pb_resp = autoscaling_policies.ListAutoscalingPoliciesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_autoscaling_policies(resp) - return resp - - class _UpdateAutoscalingPolicy(AutoscalingPolicyServiceRestStub): - def __hash__(self): - return hash("UpdateAutoscalingPolicy") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: autoscaling_policies.UpdateAutoscalingPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> autoscaling_policies.AutoscalingPolicy: - r"""Call the update autoscaling policy method over HTTP. - - Args: - request (~.autoscaling_policies.UpdateAutoscalingPolicyRequest): - The request object. A request to update an autoscaling - policy. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.autoscaling_policies.AutoscalingPolicy: - Describes an autoscaling policy for - Dataproc cluster autoscaler. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'put', - 'uri': '/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}', - 'body': 'policy', - }, -{ - 'method': 'put', - 'uri': '/v1/{policy.name=projects/*/regions/*/autoscalingPolicies/*}', - 'body': 'policy', - }, - ] - request, metadata = self._interceptor.pre_update_autoscaling_policy(request, metadata) - pb_request = autoscaling_policies.UpdateAutoscalingPolicyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = autoscaling_policies.AutoscalingPolicy() - pb_resp = autoscaling_policies.AutoscalingPolicy.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_autoscaling_policy(resp) - return resp - - @property - def create_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.CreateAutoscalingPolicyRequest], - autoscaling_policies.AutoscalingPolicy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateAutoscalingPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.DeleteAutoscalingPolicyRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteAutoscalingPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.GetAutoscalingPolicyRequest], - autoscaling_policies.AutoscalingPolicy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetAutoscalingPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_autoscaling_policies(self) -> Callable[ - [autoscaling_policies.ListAutoscalingPoliciesRequest], - autoscaling_policies.ListAutoscalingPoliciesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAutoscalingPolicies(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_autoscaling_policy(self) -> Callable[ - [autoscaling_policies.UpdateAutoscalingPolicyRequest], - autoscaling_policies.AutoscalingPolicy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateAutoscalingPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(AutoscalingPolicyServiceRestStub): - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_iam_policy(resp) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(AutoscalingPolicyServiceRestStub): - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_set_iam_policy(resp) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(AutoscalingPolicyServiceRestStub): - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_test_iam_permissions(resp) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(AutoscalingPolicyServiceRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}:cancel', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(AutoscalingPolicyServiceRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(AutoscalingPolicyServiceRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(AutoscalingPolicyServiceRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations}', - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AutoscalingPolicyServiceRestTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/__init__.py deleted file mode 100644 index a297cd10..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import BatchControllerClient -from .async_client import BatchControllerAsyncClient - -__all__ = ( - 'BatchControllerClient', - 'BatchControllerAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/async_client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/async_client.py deleted file mode 100644 index b684ae43..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/async_client.py +++ /dev/null @@ -1,1151 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataproc_v1.services.batch_controller import pagers -from google.cloud.dataproc_v1.types import batches -from google.cloud.dataproc_v1.types import operations -from google.cloud.dataproc_v1.types import shared -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BatchControllerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BatchControllerGrpcAsyncIOTransport -from .client import BatchControllerClient - - -class BatchControllerAsyncClient: - """The BatchController provides methods to manage batch - workloads. - """ - - _client: BatchControllerClient - - DEFAULT_ENDPOINT = BatchControllerClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = BatchControllerClient.DEFAULT_MTLS_ENDPOINT - - batch_path = staticmethod(BatchControllerClient.batch_path) - parse_batch_path = staticmethod(BatchControllerClient.parse_batch_path) - service_path = staticmethod(BatchControllerClient.service_path) - parse_service_path = staticmethod(BatchControllerClient.parse_service_path) - common_billing_account_path = staticmethod(BatchControllerClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(BatchControllerClient.parse_common_billing_account_path) - common_folder_path = staticmethod(BatchControllerClient.common_folder_path) - parse_common_folder_path = staticmethod(BatchControllerClient.parse_common_folder_path) - common_organization_path = staticmethod(BatchControllerClient.common_organization_path) - parse_common_organization_path = staticmethod(BatchControllerClient.parse_common_organization_path) - common_project_path = staticmethod(BatchControllerClient.common_project_path) - parse_common_project_path = staticmethod(BatchControllerClient.parse_common_project_path) - common_location_path = staticmethod(BatchControllerClient.common_location_path) - parse_common_location_path = staticmethod(BatchControllerClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchControllerAsyncClient: The constructed client. - """ - return BatchControllerClient.from_service_account_info.__func__(BatchControllerAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchControllerAsyncClient: The constructed client. - """ - return BatchControllerClient.from_service_account_file.__func__(BatchControllerAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return BatchControllerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> BatchControllerTransport: - """Returns the transport used by the client instance. - - Returns: - BatchControllerTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(BatchControllerClient).get_transport_class, type(BatchControllerClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, BatchControllerTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the batch controller client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.BatchControllerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = BatchControllerClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_batch(self, - request: Optional[Union[batches.CreateBatchRequest, dict]] = None, - *, - parent: Optional[str] = None, - batch: Optional[batches.Batch] = None, - batch_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a batch workload that executes - asynchronously. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_create_batch(): - # Create a client - client = dataproc_v1.BatchControllerAsyncClient() - - # Initialize request argument(s) - batch = dataproc_v1.Batch() - batch.pyspark_batch.main_python_file_uri = "main_python_file_uri_value" - - request = dataproc_v1.CreateBatchRequest( - parent="parent_value", - batch=batch, - ) - - # Make the request - operation = client.create_batch(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.CreateBatchRequest, dict]]): - The request object. A request to create a batch workload. - parent (:class:`str`): - Required. The parent resource where - this batch will be created. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - batch (:class:`google.cloud.dataproc_v1.types.Batch`): - Required. The batch to create. - This corresponds to the ``batch`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - batch_id (:class:`str`): - Optional. The ID to use for the batch, which will become - the final component of the batch's resource name. - - This value must be 4-63 characters. Valid characters are - ``/[a-z][0-9]-/``. - - This corresponds to the ``batch_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataproc_v1.types.Batch` A - representation of a batch workload in the service. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, batch, batch_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = batches.CreateBatchRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if batch is not None: - request.batch = batch - if batch_id is not None: - request.batch_id = batch_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_batch, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - batches.Batch, - metadata_type=operations.BatchOperationMetadata, - ) - - # Done; return the response. - return response - - async def get_batch(self, - request: Optional[Union[batches.GetBatchRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> batches.Batch: - r"""Gets the batch workload resource representation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_get_batch(): - # Create a client - client = dataproc_v1.BatchControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetBatchRequest( - name="name_value", - ) - - # Make the request - response = await client.get_batch(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.GetBatchRequest, dict]]): - The request object. A request to get the resource - representation for a batch workload. - name (:class:`str`): - Required. The fully qualified name of the batch to - retrieve in the format - "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID" - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Batch: - A representation of a batch workload - in the service. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = batches.GetBatchRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_batch, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_batches(self, - request: Optional[Union[batches.ListBatchesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBatchesAsyncPager: - r"""Lists batch workloads. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_list_batches(): - # Create a client - client = dataproc_v1.BatchControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ListBatchesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_batches(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.ListBatchesRequest, dict]]): - The request object. A request to list batch workloads in - a project. - parent (:class:`str`): - Required. The parent, which owns this - collection of batches. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.services.batch_controller.pagers.ListBatchesAsyncPager: - A list of batch workloads. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = batches.ListBatchesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_batches, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBatchesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_batch(self, - request: Optional[Union[batches.DeleteBatchRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes the batch workload resource. If the batch is not in - terminal state, the delete fails and the response returns - ``FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_delete_batch(): - # Create a client - client = dataproc_v1.BatchControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteBatchRequest( - name="name_value", - ) - - # Make the request - await client.delete_batch(request=request) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.DeleteBatchRequest, dict]]): - The request object. A request to delete a batch workload. - name (:class:`str`): - Required. The fully qualified name of the batch to - retrieve in the format - "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID" - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = batches.DeleteBatchRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_batch, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "BatchControllerAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BatchControllerAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/client.py deleted file mode 100644 index 404a726c..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/client.py +++ /dev/null @@ -1,1361 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataproc_v1.services.batch_controller import pagers -from google.cloud.dataproc_v1.types import batches -from google.cloud.dataproc_v1.types import operations -from google.cloud.dataproc_v1.types import shared -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BatchControllerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import BatchControllerGrpcTransport -from .transports.grpc_asyncio import BatchControllerGrpcAsyncIOTransport -from .transports.rest import BatchControllerRestTransport - - -class BatchControllerClientMeta(type): - """Metaclass for the BatchController client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[BatchControllerTransport]] - _transport_registry["grpc"] = BatchControllerGrpcTransport - _transport_registry["grpc_asyncio"] = BatchControllerGrpcAsyncIOTransport - _transport_registry["rest"] = BatchControllerRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[BatchControllerTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class BatchControllerClient(metaclass=BatchControllerClientMeta): - """The BatchController provides methods to manage batch - workloads. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataproc.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> BatchControllerTransport: - """Returns the transport used by the client instance. - - Returns: - BatchControllerTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def batch_path(project: str,location: str,batch: str,) -> str: - """Returns a fully-qualified batch string.""" - return "projects/{project}/locations/{location}/batches/{batch}".format(project=project, location=location, batch=batch, ) - - @staticmethod - def parse_batch_path(path: str) -> Dict[str,str]: - """Parses a batch path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/batches/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def service_path(project: str,location: str,service: str,) -> str: - """Returns a fully-qualified service string.""" - return "projects/{project}/locations/{location}/services/{service}".format(project=project, location=location, service=service, ) - - @staticmethod - def parse_service_path(path: str) -> Dict[str,str]: - """Parses a service path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BatchControllerTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the batch controller client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, BatchControllerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, BatchControllerTransport): - # transport is a BatchControllerTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_batch(self, - request: Optional[Union[batches.CreateBatchRequest, dict]] = None, - *, - parent: Optional[str] = None, - batch: Optional[batches.Batch] = None, - batch_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a batch workload that executes - asynchronously. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_create_batch(): - # Create a client - client = dataproc_v1.BatchControllerClient() - - # Initialize request argument(s) - batch = dataproc_v1.Batch() - batch.pyspark_batch.main_python_file_uri = "main_python_file_uri_value" - - request = dataproc_v1.CreateBatchRequest( - parent="parent_value", - batch=batch, - ) - - # Make the request - operation = client.create_batch(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.CreateBatchRequest, dict]): - The request object. A request to create a batch workload. - parent (str): - Required. The parent resource where - this batch will be created. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - batch (google.cloud.dataproc_v1.types.Batch): - Required. The batch to create. - This corresponds to the ``batch`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - batch_id (str): - Optional. The ID to use for the batch, which will become - the final component of the batch's resource name. - - This value must be 4-63 characters. Valid characters are - ``/[a-z][0-9]-/``. - - This corresponds to the ``batch_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataproc_v1.types.Batch` A - representation of a batch workload in the service. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, batch, batch_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a batches.CreateBatchRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, batches.CreateBatchRequest): - request = batches.CreateBatchRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if batch is not None: - request.batch = batch - if batch_id is not None: - request.batch_id = batch_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_batch] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - batches.Batch, - metadata_type=operations.BatchOperationMetadata, - ) - - # Done; return the response. - return response - - def get_batch(self, - request: Optional[Union[batches.GetBatchRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> batches.Batch: - r"""Gets the batch workload resource representation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_get_batch(): - # Create a client - client = dataproc_v1.BatchControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.GetBatchRequest( - name="name_value", - ) - - # Make the request - response = client.get_batch(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.GetBatchRequest, dict]): - The request object. A request to get the resource - representation for a batch workload. - name (str): - Required. The fully qualified name of the batch to - retrieve in the format - "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID" - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Batch: - A representation of a batch workload - in the service. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a batches.GetBatchRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, batches.GetBatchRequest): - request = batches.GetBatchRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_batch] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_batches(self, - request: Optional[Union[batches.ListBatchesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListBatchesPager: - r"""Lists batch workloads. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_list_batches(): - # Create a client - client = dataproc_v1.BatchControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.ListBatchesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_batches(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.ListBatchesRequest, dict]): - The request object. A request to list batch workloads in - a project. - parent (str): - Required. The parent, which owns this - collection of batches. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.services.batch_controller.pagers.ListBatchesPager: - A list of batch workloads. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a batches.ListBatchesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, batches.ListBatchesRequest): - request = batches.ListBatchesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_batches] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBatchesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_batch(self, - request: Optional[Union[batches.DeleteBatchRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes the batch workload resource. If the batch is not in - terminal state, the delete fails and the response returns - ``FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_delete_batch(): - # Create a client - client = dataproc_v1.BatchControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteBatchRequest( - name="name_value", - ) - - # Make the request - client.delete_batch(request=request) - - Args: - request (Union[google.cloud.dataproc_v1.types.DeleteBatchRequest, dict]): - The request object. A request to delete a batch workload. - name (str): - Required. The fully qualified name of the batch to - retrieve in the format - "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID" - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a batches.DeleteBatchRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, batches.DeleteBatchRequest): - request = batches.DeleteBatchRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_batch] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "BatchControllerClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BatchControllerClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/pagers.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/pagers.py deleted file mode 100644 index 15118cac..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/pagers.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.dataproc_v1.types import batches - - -class ListBatchesPager: - """A pager for iterating through ``list_batches`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataproc_v1.types.ListBatchesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``batches`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBatches`` requests and continue to iterate - through the ``batches`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataproc_v1.types.ListBatchesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., batches.ListBatchesResponse], - request: batches.ListBatchesRequest, - response: batches.ListBatchesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataproc_v1.types.ListBatchesRequest): - The initial request object. - response (google.cloud.dataproc_v1.types.ListBatchesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = batches.ListBatchesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[batches.ListBatchesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[batches.Batch]: - for page in self.pages: - yield from page.batches - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBatchesAsyncPager: - """A pager for iterating through ``list_batches`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataproc_v1.types.ListBatchesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``batches`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBatches`` requests and continue to iterate - through the ``batches`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataproc_v1.types.ListBatchesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[batches.ListBatchesResponse]], - request: batches.ListBatchesRequest, - response: batches.ListBatchesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataproc_v1.types.ListBatchesRequest): - The initial request object. - response (google.cloud.dataproc_v1.types.ListBatchesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = batches.ListBatchesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[batches.ListBatchesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[batches.Batch]: - async def async_generator(): - async for page in self.pages: - for response in page.batches: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/__init__.py deleted file mode 100644 index d81b8d0f..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import BatchControllerTransport -from .grpc import BatchControllerGrpcTransport -from .grpc_asyncio import BatchControllerGrpcAsyncIOTransport -from .rest import BatchControllerRestTransport -from .rest import BatchControllerRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[BatchControllerTransport]] -_transport_registry['grpc'] = BatchControllerGrpcTransport -_transport_registry['grpc_asyncio'] = BatchControllerGrpcAsyncIOTransport -_transport_registry['rest'] = BatchControllerRestTransport - -__all__ = ( - 'BatchControllerTransport', - 'BatchControllerGrpcTransport', - 'BatchControllerGrpcAsyncIOTransport', - 'BatchControllerRestTransport', - 'BatchControllerRestInterceptor', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/base.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/base.py deleted file mode 100644 index a4cb35c9..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/base.py +++ /dev/null @@ -1,267 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataproc_v1.types import batches -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class BatchControllerTransport(abc.ABC): - """Abstract transport class for BatchController.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataproc.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_batch: gapic_v1.method.wrap_method( - self.create_batch, - default_timeout=None, - client_info=client_info, - ), - self.get_batch: gapic_v1.method.wrap_method( - self.get_batch, - default_timeout=None, - client_info=client_info, - ), - self.list_batches: gapic_v1.method.wrap_method( - self.list_batches, - default_timeout=None, - client_info=client_info, - ), - self.delete_batch: gapic_v1.method.wrap_method( - self.delete_batch, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_batch(self) -> Callable[ - [batches.CreateBatchRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_batch(self) -> Callable[ - [batches.GetBatchRequest], - Union[ - batches.Batch, - Awaitable[batches.Batch] - ]]: - raise NotImplementedError() - - @property - def list_batches(self) -> Callable[ - [batches.ListBatchesRequest], - Union[ - batches.ListBatchesResponse, - Awaitable[batches.ListBatchesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_batch(self) -> Callable[ - [batches.DeleteBatchRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'BatchControllerTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py deleted file mode 100644 index e27b8c04..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/grpc.py +++ /dev/null @@ -1,520 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataproc_v1.types import batches -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import BatchControllerTransport, DEFAULT_CLIENT_INFO - - -class BatchControllerGrpcTransport(BatchControllerTransport): - """gRPC backend transport for BatchController. - - The BatchController provides methods to manage batch - workloads. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_batch(self) -> Callable[ - [batches.CreateBatchRequest], - operations_pb2.Operation]: - r"""Return a callable for the create batch method over gRPC. - - Creates a batch workload that executes - asynchronously. - - Returns: - Callable[[~.CreateBatchRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_batch' not in self._stubs: - self._stubs['create_batch'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.BatchController/CreateBatch', - request_serializer=batches.CreateBatchRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_batch'] - - @property - def get_batch(self) -> Callable[ - [batches.GetBatchRequest], - batches.Batch]: - r"""Return a callable for the get batch method over gRPC. - - Gets the batch workload resource representation. - - Returns: - Callable[[~.GetBatchRequest], - ~.Batch]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_batch' not in self._stubs: - self._stubs['get_batch'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.BatchController/GetBatch', - request_serializer=batches.GetBatchRequest.serialize, - response_deserializer=batches.Batch.deserialize, - ) - return self._stubs['get_batch'] - - @property - def list_batches(self) -> Callable[ - [batches.ListBatchesRequest], - batches.ListBatchesResponse]: - r"""Return a callable for the list batches method over gRPC. - - Lists batch workloads. - - Returns: - Callable[[~.ListBatchesRequest], - ~.ListBatchesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_batches' not in self._stubs: - self._stubs['list_batches'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.BatchController/ListBatches', - request_serializer=batches.ListBatchesRequest.serialize, - response_deserializer=batches.ListBatchesResponse.deserialize, - ) - return self._stubs['list_batches'] - - @property - def delete_batch(self) -> Callable[ - [batches.DeleteBatchRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete batch method over gRPC. - - Deletes the batch workload resource. If the batch is not in - terminal state, the delete fails and the response returns - ``FAILED_PRECONDITION``. - - Returns: - Callable[[~.DeleteBatchRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_batch' not in self._stubs: - self._stubs['delete_batch'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.BatchController/DeleteBatch', - request_serializer=batches.DeleteBatchRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_batch'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'BatchControllerGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py deleted file mode 100644 index e5bdb80e..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/grpc_asyncio.py +++ /dev/null @@ -1,519 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataproc_v1.types import batches -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import BatchControllerTransport, DEFAULT_CLIENT_INFO -from .grpc import BatchControllerGrpcTransport - - -class BatchControllerGrpcAsyncIOTransport(BatchControllerTransport): - """gRPC AsyncIO backend transport for BatchController. - - The BatchController provides methods to manage batch - workloads. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_batch(self) -> Callable[ - [batches.CreateBatchRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create batch method over gRPC. - - Creates a batch workload that executes - asynchronously. - - Returns: - Callable[[~.CreateBatchRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_batch' not in self._stubs: - self._stubs['create_batch'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.BatchController/CreateBatch', - request_serializer=batches.CreateBatchRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_batch'] - - @property - def get_batch(self) -> Callable[ - [batches.GetBatchRequest], - Awaitable[batches.Batch]]: - r"""Return a callable for the get batch method over gRPC. - - Gets the batch workload resource representation. - - Returns: - Callable[[~.GetBatchRequest], - Awaitable[~.Batch]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_batch' not in self._stubs: - self._stubs['get_batch'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.BatchController/GetBatch', - request_serializer=batches.GetBatchRequest.serialize, - response_deserializer=batches.Batch.deserialize, - ) - return self._stubs['get_batch'] - - @property - def list_batches(self) -> Callable[ - [batches.ListBatchesRequest], - Awaitable[batches.ListBatchesResponse]]: - r"""Return a callable for the list batches method over gRPC. - - Lists batch workloads. - - Returns: - Callable[[~.ListBatchesRequest], - Awaitable[~.ListBatchesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_batches' not in self._stubs: - self._stubs['list_batches'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.BatchController/ListBatches', - request_serializer=batches.ListBatchesRequest.serialize, - response_deserializer=batches.ListBatchesResponse.deserialize, - ) - return self._stubs['list_batches'] - - @property - def delete_batch(self) -> Callable[ - [batches.DeleteBatchRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete batch method over gRPC. - - Deletes the batch workload resource. If the batch is not in - terminal state, the delete fails and the response returns - ``FAILED_PRECONDITION``. - - Returns: - Callable[[~.DeleteBatchRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_batch' not in self._stubs: - self._stubs['delete_batch'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.BatchController/DeleteBatch', - request_serializer=batches.DeleteBatchRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_batch'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'BatchControllerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/rest.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/rest.py deleted file mode 100644 index 2cee3293..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/batch_controller/transports/rest.py +++ /dev/null @@ -1,1380 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataproc_v1.types import batches -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -from .base import BatchControllerTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class BatchControllerRestInterceptor: - """Interceptor for BatchController. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the BatchControllerRestTransport. - - .. code-block:: python - class MyCustomBatchControllerInterceptor(BatchControllerRestInterceptor): - def pre_create_batch(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_batch(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_batch(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_batch(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_batch(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_batches(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_batches(self, response): - logging.log(f"Received response: {response}") - return response - - transport = BatchControllerRestTransport(interceptor=MyCustomBatchControllerInterceptor()) - client = BatchControllerClient(transport=transport) - - - """ - def pre_create_batch(self, request: batches.CreateBatchRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[batches.CreateBatchRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_batch - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchController server. - """ - return request, metadata - - def post_create_batch(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_batch - - Override in a subclass to manipulate the response - after it is returned by the BatchController server but before - it is returned to user code. - """ - return response - def pre_delete_batch(self, request: batches.DeleteBatchRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[batches.DeleteBatchRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_batch - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchController server. - """ - return request, metadata - - def pre_get_batch(self, request: batches.GetBatchRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[batches.GetBatchRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_batch - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchController server. - """ - return request, metadata - - def post_get_batch(self, response: batches.Batch) -> batches.Batch: - """Post-rpc interceptor for get_batch - - Override in a subclass to manipulate the response - after it is returned by the BatchController server but before - it is returned to user code. - """ - return response - def pre_list_batches(self, request: batches.ListBatchesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[batches.ListBatchesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_batches - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchController server. - """ - return request, metadata - - def post_list_batches(self, response: batches.ListBatchesResponse) -> batches.ListBatchesResponse: - """Post-rpc interceptor for list_batches - - Override in a subclass to manipulate the response - after it is returned by the BatchController server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchController server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the BatchController server but before - it is returned to user code. - """ - return response - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchController server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the BatchController server but before - it is returned to user code. - """ - return response - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchController server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the BatchController server but before - it is returned to user code. - """ - return response - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchController server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the BatchController server but before - it is returned to user code. - """ - return response - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchController server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the BatchController server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchController server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the BatchController server but before - it is returned to user code. - """ - return response - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchController server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the BatchController server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class BatchControllerRestStub: - _session: AuthorizedSession - _host: str - _interceptor: BatchControllerRestInterceptor - - -class BatchControllerRestTransport(BatchControllerTransport): - """REST backend transport for BatchController. - - The BatchController provides methods to manage batch - workloads. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[BatchControllerRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or BatchControllerRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateBatch(BatchControllerRestStub): - def __hash__(self): - return hash("CreateBatch") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: batches.CreateBatchRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create batch method over HTTP. - - Args: - request (~.batches.CreateBatchRequest): - The request object. A request to create a batch workload. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/batches', - 'body': 'batch', - }, - ] - request, metadata = self._interceptor.pre_create_batch(request, metadata) - pb_request = batches.CreateBatchRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_batch(resp) - return resp - - class _DeleteBatch(BatchControllerRestStub): - def __hash__(self): - return hash("DeleteBatch") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: batches.DeleteBatchRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete batch method over HTTP. - - Args: - request (~.batches.DeleteBatchRequest): - The request object. A request to delete a batch workload. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/batches/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_batch(request, metadata) - pb_request = batches.DeleteBatchRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetBatch(BatchControllerRestStub): - def __hash__(self): - return hash("GetBatch") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: batches.GetBatchRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> batches.Batch: - r"""Call the get batch method over HTTP. - - Args: - request (~.batches.GetBatchRequest): - The request object. A request to get the resource - representation for a batch workload. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.batches.Batch: - A representation of a batch workload - in the service. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/batches/*}', - }, - ] - request, metadata = self._interceptor.pre_get_batch(request, metadata) - pb_request = batches.GetBatchRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = batches.Batch() - pb_resp = batches.Batch.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_batch(resp) - return resp - - class _ListBatches(BatchControllerRestStub): - def __hash__(self): - return hash("ListBatches") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: batches.ListBatchesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> batches.ListBatchesResponse: - r"""Call the list batches method over HTTP. - - Args: - request (~.batches.ListBatchesRequest): - The request object. A request to list batch workloads in - a project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.batches.ListBatchesResponse: - A list of batch workloads. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/batches', - }, - ] - request, metadata = self._interceptor.pre_list_batches(request, metadata) - pb_request = batches.ListBatchesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = batches.ListBatchesResponse() - pb_resp = batches.ListBatchesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_batches(resp) - return resp - - @property - def create_batch(self) -> Callable[ - [batches.CreateBatchRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBatch(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_batch(self) -> Callable[ - [batches.DeleteBatchRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBatch(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_batch(self) -> Callable[ - [batches.GetBatchRequest], - batches.Batch]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBatch(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_batches(self) -> Callable[ - [batches.ListBatchesRequest], - batches.ListBatchesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBatches(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(BatchControllerRestStub): - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_iam_policy(resp) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(BatchControllerRestStub): - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_set_iam_policy(resp) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(BatchControllerRestStub): - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_test_iam_permissions(resp) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(BatchControllerRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}:cancel', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(BatchControllerRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(BatchControllerRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(BatchControllerRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations}', - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'BatchControllerRestTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/__init__.py deleted file mode 100644 index 5c7af565..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import ClusterControllerClient -from .async_client import ClusterControllerAsyncClient - -__all__ = ( - 'ClusterControllerClient', - 'ClusterControllerAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/async_client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/async_client.py deleted file mode 100644 index 8201a89e..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/async_client.py +++ /dev/null @@ -1,1890 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataproc_v1.services.cluster_controller import pagers -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import operations -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import ClusterControllerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import ClusterControllerGrpcAsyncIOTransport -from .client import ClusterControllerClient - - -class ClusterControllerAsyncClient: - """The ClusterControllerService provides methods to manage - clusters of Compute Engine instances. - """ - - _client: ClusterControllerClient - - DEFAULT_ENDPOINT = ClusterControllerClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = ClusterControllerClient.DEFAULT_MTLS_ENDPOINT - - cluster_path = staticmethod(ClusterControllerClient.cluster_path) - parse_cluster_path = staticmethod(ClusterControllerClient.parse_cluster_path) - node_group_path = staticmethod(ClusterControllerClient.node_group_path) - parse_node_group_path = staticmethod(ClusterControllerClient.parse_node_group_path) - service_path = staticmethod(ClusterControllerClient.service_path) - parse_service_path = staticmethod(ClusterControllerClient.parse_service_path) - common_billing_account_path = staticmethod(ClusterControllerClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(ClusterControllerClient.parse_common_billing_account_path) - common_folder_path = staticmethod(ClusterControllerClient.common_folder_path) - parse_common_folder_path = staticmethod(ClusterControllerClient.parse_common_folder_path) - common_organization_path = staticmethod(ClusterControllerClient.common_organization_path) - parse_common_organization_path = staticmethod(ClusterControllerClient.parse_common_organization_path) - common_project_path = staticmethod(ClusterControllerClient.common_project_path) - parse_common_project_path = staticmethod(ClusterControllerClient.parse_common_project_path) - common_location_path = staticmethod(ClusterControllerClient.common_location_path) - parse_common_location_path = staticmethod(ClusterControllerClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClusterControllerAsyncClient: The constructed client. - """ - return ClusterControllerClient.from_service_account_info.__func__(ClusterControllerAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClusterControllerAsyncClient: The constructed client. - """ - return ClusterControllerClient.from_service_account_file.__func__(ClusterControllerAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return ClusterControllerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> ClusterControllerTransport: - """Returns the transport used by the client instance. - - Returns: - ClusterControllerTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(ClusterControllerClient).get_transport_class, type(ClusterControllerClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ClusterControllerTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cluster controller client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.ClusterControllerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = ClusterControllerClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_cluster(self, - request: Optional[Union[clusters.CreateClusterRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - cluster: Optional[clusters.Cluster] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_create_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - cluster = dataproc_v1.Cluster() - cluster.project_id = "project_id_value" - cluster.cluster_name = "cluster_name_value" - - request = dataproc_v1.CreateClusterRequest( - project_id="project_id_value", - region="region_value", - cluster=cluster, - ) - - # Make the request - operation = client.create_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.CreateClusterRequest, dict]]): - The request object. A request to create a cluster. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project that the cluster - belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster (:class:`google.cloud.dataproc_v1.types.Cluster`): - Required. The cluster to create. - This corresponds to the ``cluster`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of - a Dataproc cluster - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, cluster]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clusters.CreateClusterRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if cluster is not None: - request.cluster = cluster - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_cluster, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clusters.Cluster, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - async def update_cluster(self, - request: Optional[Union[clusters.UpdateClusterRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - cluster_name: Optional[str] = None, - cluster: Optional[clusters.Cluster] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - The cluster must be in a - [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] - state or an error is returned. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_update_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - cluster = dataproc_v1.Cluster() - cluster.project_id = "project_id_value" - cluster.cluster_name = "cluster_name_value" - - request = dataproc_v1.UpdateClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - cluster=cluster, - ) - - # Make the request - operation = client.update_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.UpdateClusterRequest, dict]]): - The request object. A request to update a cluster. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project the cluster belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster_name (:class:`str`): - Required. The cluster name. - This corresponds to the ``cluster_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster (:class:`google.cloud.dataproc_v1.types.Cluster`): - Required. The changes to the cluster. - This corresponds to the ``cluster`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Specifies the path, relative to ``Cluster``, - of the field to update. For example, to change the - number of workers in a cluster to 5, the ``update_mask`` - parameter would be specified as - ``config.worker_config.num_instances``, and the - ``PATCH`` request body would specify the new value, as - follows: - - :: - - { - "config":{ - "workerConfig":{ - "numInstances":"5" - } - } - } - - Similarly, to change the number of preemptible workers - in a cluster to 5, the ``update_mask`` parameter would - be ``config.secondary_worker_config.num_instances``, and - the ``PATCH`` request body would be set as follows: - - :: - - { - "config":{ - "secondaryWorkerConfig":{ - "numInstances":"5" - } - } - } - - Note: Currently, only the following fields can be - updated: - - .. raw:: html - - - - - - - - - - - - - - - - - - - - - - - -
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or - change autoscaling policies
- - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of - a Dataproc cluster - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, cluster_name, cluster, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clusters.UpdateClusterRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if cluster_name is not None: - request.cluster_name = cluster_name - if cluster is not None: - request.cluster = cluster - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_cluster, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clusters.Cluster, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - async def stop_cluster(self, - request: Optional[Union[clusters.StopClusterRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Stops a cluster in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_stop_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.StopClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.stop_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.StopClusterRequest, dict]]): - The request object. A request to stop a cluster. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of - a Dataproc cluster - - """ - # Create or coerce a protobuf request object. - request = clusters.StopClusterRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.stop_cluster, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clusters.Cluster, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - async def start_cluster(self, - request: Optional[Union[clusters.StartClusterRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Starts a cluster in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_start_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.StartClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.start_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.StartClusterRequest, dict]]): - The request object. A request to start a cluster. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of - a Dataproc cluster - - """ - # Create or coerce a protobuf request object. - request = clusters.StartClusterRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.start_cluster, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clusters.Cluster, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_cluster(self, - request: Optional[Union[clusters.DeleteClusterRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - cluster_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_delete_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.delete_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.DeleteClusterRequest, dict]]): - The request object. A request to delete a cluster. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project that the cluster - belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster_name (:class:`str`): - Required. The cluster name. - This corresponds to the ``cluster_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, cluster_name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clusters.DeleteClusterRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if cluster_name is not None: - request.cluster_name = cluster_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_cluster, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - async def get_cluster(self, - request: Optional[Union[clusters.GetClusterRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - cluster_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clusters.Cluster: - r"""Gets the resource representation for a cluster in a - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_get_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - response = await client.get_cluster(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.GetClusterRequest, dict]]): - The request object. Request to get the resource - representation for a cluster in a - project. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project that the cluster - belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster_name (:class:`str`): - Required. The cluster name. - This corresponds to the ``cluster_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Cluster: - Describes the identifying - information, config, and status of a - Dataproc cluster - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, cluster_name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clusters.GetClusterRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if cluster_name is not None: - request.cluster_name = cluster_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_cluster, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_clusters(self, - request: Optional[Union[clusters.ListClustersRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListClustersAsyncPager: - r"""Lists all regions/{region}/clusters in a project - alphabetically. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_list_clusters(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ListClustersRequest( - project_id="project_id_value", - region="region_value", - ) - - # Make the request - page_result = client.list_clusters(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.ListClustersRequest, dict]]): - The request object. A request to list the clusters in a - project. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project that the cluster - belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (:class:`str`): - Optional. A filter constraining the clusters to list. - Filters are case-sensitive and have the following - syntax: - - field = value [AND [field = value]] ... - - where **field** is one of ``status.state``, - ``clusterName``, or ``labels.[KEY]``, and ``[KEY]`` is a - label key. **value** can be ``*`` to match all values. - ``status.state`` can be one of the following: - ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, - ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` - contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` - states. ``INACTIVE`` contains the ``DELETING`` and - ``ERROR`` states. ``clusterName`` is the name of the - cluster provided at creation time. Only the logical - ``AND`` operator is supported; space-separated items are - treated as having an implicit ``AND`` operator. - - Example filter: - - status.state = ACTIVE AND clusterName = mycluster AND - labels.env = staging AND labels.starred = \* - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersAsyncPager: - The list of all clusters in a - project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, filter]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clusters.ListClustersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_clusters, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListClustersAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def diagnose_cluster(self, - request: Optional[Union[clusters.DiagnoseClusterRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - cluster_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Gets cluster diagnostic information. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - After the operation completes, - [Operation.response][google.longrunning.Operation.response] - contains - `DiagnoseClusterResults `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_diagnose_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DiagnoseClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.diagnose_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.DiagnoseClusterRequest, dict]]): - The request object. A request to collect cluster - diagnostic information. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project that the cluster - belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster_name (:class:`str`): - Required. The cluster name. - This corresponds to the ``cluster_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataproc_v1.types.DiagnoseClusterResults` - The location of diagnostic output. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, cluster_name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = clusters.DiagnoseClusterRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if cluster_name is not None: - request.cluster_name = cluster_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.diagnose_cluster, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clusters.DiagnoseClusterResults, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "ClusterControllerAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ClusterControllerAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/client.py deleted file mode 100644 index b0aa61f3..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ /dev/null @@ -1,2071 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataproc_v1.services.cluster_controller import pagers -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import operations -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import ClusterControllerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import ClusterControllerGrpcTransport -from .transports.grpc_asyncio import ClusterControllerGrpcAsyncIOTransport -from .transports.rest import ClusterControllerRestTransport - - -class ClusterControllerClientMeta(type): - """Metaclass for the ClusterController client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[ClusterControllerTransport]] - _transport_registry["grpc"] = ClusterControllerGrpcTransport - _transport_registry["grpc_asyncio"] = ClusterControllerGrpcAsyncIOTransport - _transport_registry["rest"] = ClusterControllerRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[ClusterControllerTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class ClusterControllerClient(metaclass=ClusterControllerClientMeta): - """The ClusterControllerService provides methods to manage - clusters of Compute Engine instances. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataproc.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClusterControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClusterControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ClusterControllerTransport: - """Returns the transport used by the client instance. - - Returns: - ClusterControllerTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def cluster_path(project: str,location: str,cluster: str,) -> str: - """Returns a fully-qualified cluster string.""" - return "projects/{project}/locations/{location}/clusters/{cluster}".format(project=project, location=location, cluster=cluster, ) - - @staticmethod - def parse_cluster_path(path: str) -> Dict[str,str]: - """Parses a cluster path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def node_group_path(project: str,region: str,cluster: str,node_group: str,) -> str: - """Returns a fully-qualified node_group string.""" - return "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format(project=project, region=region, cluster=cluster, node_group=node_group, ) - - @staticmethod - def parse_node_group_path(path: str) -> Dict[str,str]: - """Parses a node_group path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/clusters/(?P.+?)/nodeGroups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def service_path(project: str,location: str,service: str,) -> str: - """Returns a fully-qualified service string.""" - return "projects/{project}/locations/{location}/services/{service}".format(project=project, location=location, service=service, ) - - @staticmethod - def parse_service_path(path: str) -> Dict[str,str]: - """Parses a service path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ClusterControllerTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cluster controller client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ClusterControllerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, ClusterControllerTransport): - # transport is a ClusterControllerTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_cluster(self, - request: Optional[Union[clusters.CreateClusterRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - cluster: Optional[clusters.Cluster] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_create_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - cluster = dataproc_v1.Cluster() - cluster.project_id = "project_id_value" - cluster.cluster_name = "cluster_name_value" - - request = dataproc_v1.CreateClusterRequest( - project_id="project_id_value", - region="region_value", - cluster=cluster, - ) - - # Make the request - operation = client.create_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.CreateClusterRequest, dict]): - The request object. A request to create a cluster. - project_id (str): - Required. The ID of the Google Cloud - Platform project that the cluster - belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster (google.cloud.dataproc_v1.types.Cluster): - Required. The cluster to create. - This corresponds to the ``cluster`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of - a Dataproc cluster - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, cluster]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clusters.CreateClusterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clusters.CreateClusterRequest): - request = clusters.CreateClusterRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if cluster is not None: - request.cluster = cluster - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_cluster] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clusters.Cluster, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - def update_cluster(self, - request: Optional[Union[clusters.UpdateClusterRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - cluster_name: Optional[str] = None, - cluster: Optional[clusters.Cluster] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Updates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - The cluster must be in a - [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] - state or an error is returned. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_update_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - cluster = dataproc_v1.Cluster() - cluster.project_id = "project_id_value" - cluster.cluster_name = "cluster_name_value" - - request = dataproc_v1.UpdateClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - cluster=cluster, - ) - - # Make the request - operation = client.update_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.UpdateClusterRequest, dict]): - The request object. A request to update a cluster. - project_id (str): - Required. The ID of the Google Cloud - Platform project the cluster belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster_name (str): - Required. The cluster name. - This corresponds to the ``cluster_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster (google.cloud.dataproc_v1.types.Cluster): - Required. The changes to the cluster. - This corresponds to the ``cluster`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Specifies the path, relative to ``Cluster``, - of the field to update. For example, to change the - number of workers in a cluster to 5, the ``update_mask`` - parameter would be specified as - ``config.worker_config.num_instances``, and the - ``PATCH`` request body would specify the new value, as - follows: - - :: - - { - "config":{ - "workerConfig":{ - "numInstances":"5" - } - } - } - - Similarly, to change the number of preemptible workers - in a cluster to 5, the ``update_mask`` parameter would - be ``config.secondary_worker_config.num_instances``, and - the ``PATCH`` request body would be set as follows: - - :: - - { - "config":{ - "secondaryWorkerConfig":{ - "numInstances":"5" - } - } - } - - Note: Currently, only the following fields can be - updated: - - .. raw:: html - - - - - - - - - - - - - - - - - - - - - - - -
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or - change autoscaling policies
- - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of - a Dataproc cluster - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, cluster_name, cluster, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clusters.UpdateClusterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clusters.UpdateClusterRequest): - request = clusters.UpdateClusterRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if cluster_name is not None: - request.cluster_name = cluster_name - if cluster is not None: - request.cluster = cluster - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_cluster] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clusters.Cluster, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - def stop_cluster(self, - request: Optional[Union[clusters.StopClusterRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Stops a cluster in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_stop_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.StopClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.stop_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.StopClusterRequest, dict]): - The request object. A request to stop a cluster. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of - a Dataproc cluster - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clusters.StopClusterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clusters.StopClusterRequest): - request = clusters.StopClusterRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.stop_cluster] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clusters.Cluster, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - def start_cluster(self, - request: Optional[Union[clusters.StartClusterRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Starts a cluster in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_start_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.StartClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.start_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.StartClusterRequest, dict]): - The request object. A request to start a cluster. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of - a Dataproc cluster - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a clusters.StartClusterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clusters.StartClusterRequest): - request = clusters.StartClusterRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.start_cluster] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clusters.Cluster, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - def delete_cluster(self, - request: Optional[Union[clusters.DeleteClusterRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - cluster_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Deletes a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_delete_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.delete_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.DeleteClusterRequest, dict]): - The request object. A request to delete a cluster. - project_id (str): - Required. The ID of the Google Cloud - Platform project that the cluster - belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster_name (str): - Required. The cluster name. - This corresponds to the ``cluster_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, cluster_name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clusters.DeleteClusterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clusters.DeleteClusterRequest): - request = clusters.DeleteClusterRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if cluster_name is not None: - request.cluster_name = cluster_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_cluster] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - def get_cluster(self, - request: Optional[Union[clusters.GetClusterRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - cluster_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clusters.Cluster: - r"""Gets the resource representation for a cluster in a - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_get_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.GetClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - response = client.get_cluster(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.GetClusterRequest, dict]): - The request object. Request to get the resource - representation for a cluster in a - project. - project_id (str): - Required. The ID of the Google Cloud - Platform project that the cluster - belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster_name (str): - Required. The cluster name. - This corresponds to the ``cluster_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Cluster: - Describes the identifying - information, config, and status of a - Dataproc cluster - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, cluster_name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clusters.GetClusterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clusters.GetClusterRequest): - request = clusters.GetClusterRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if cluster_name is not None: - request.cluster_name = cluster_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_cluster] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_clusters(self, - request: Optional[Union[clusters.ListClustersRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListClustersPager: - r"""Lists all regions/{region}/clusters in a project - alphabetically. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_list_clusters(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.ListClustersRequest( - project_id="project_id_value", - region="region_value", - ) - - # Make the request - page_result = client.list_clusters(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.ListClustersRequest, dict]): - The request object. A request to list the clusters in a - project. - project_id (str): - Required. The ID of the Google Cloud - Platform project that the cluster - belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (str): - Optional. A filter constraining the clusters to list. - Filters are case-sensitive and have the following - syntax: - - field = value [AND [field = value]] ... - - where **field** is one of ``status.state``, - ``clusterName``, or ``labels.[KEY]``, and ``[KEY]`` is a - label key. **value** can be ``*`` to match all values. - ``status.state`` can be one of the following: - ``ACTIVE``, ``INACTIVE``, ``CREATING``, ``RUNNING``, - ``ERROR``, ``DELETING``, or ``UPDATING``. ``ACTIVE`` - contains the ``CREATING``, ``UPDATING``, and ``RUNNING`` - states. ``INACTIVE`` contains the ``DELETING`` and - ``ERROR`` states. ``clusterName`` is the name of the - cluster provided at creation time. Only the logical - ``AND`` operator is supported; space-separated items are - treated as having an implicit ``AND`` operator. - - Example filter: - - status.state = ACTIVE AND clusterName = mycluster AND - labels.env = staging AND labels.starred = \* - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersPager: - The list of all clusters in a - project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, filter]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clusters.ListClustersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clusters.ListClustersRequest): - request = clusters.ListClustersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_clusters] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListClustersPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def diagnose_cluster(self, - request: Optional[Union[clusters.DiagnoseClusterRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - cluster_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Gets cluster diagnostic information. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - After the operation completes, - [Operation.response][google.longrunning.Operation.response] - contains - `DiagnoseClusterResults `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_diagnose_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.DiagnoseClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.diagnose_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.DiagnoseClusterRequest, dict]): - The request object. A request to collect cluster - diagnostic information. - project_id (str): - Required. The ID of the Google Cloud - Platform project that the cluster - belongs to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - cluster_name (str): - Required. The cluster name. - This corresponds to the ``cluster_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataproc_v1.types.DiagnoseClusterResults` - The location of diagnostic output. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, cluster_name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a clusters.DiagnoseClusterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, clusters.DiagnoseClusterRequest): - request = clusters.DiagnoseClusterRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if cluster_name is not None: - request.cluster_name = cluster_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.diagnose_cluster] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("cluster_name", request.cluster_name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clusters.DiagnoseClusterResults, - metadata_type=operations.ClusterOperationMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "ClusterControllerClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ClusterControllerClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/pagers.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/pagers.py deleted file mode 100644 index 794b894e..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/pagers.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.dataproc_v1.types import clusters - - -class ListClustersPager: - """A pager for iterating through ``list_clusters`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataproc_v1.types.ListClustersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``clusters`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListClusters`` requests and continue to iterate - through the ``clusters`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataproc_v1.types.ListClustersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., clusters.ListClustersResponse], - request: clusters.ListClustersRequest, - response: clusters.ListClustersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataproc_v1.types.ListClustersRequest): - The initial request object. - response (google.cloud.dataproc_v1.types.ListClustersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clusters.ListClustersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[clusters.ListClustersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[clusters.Cluster]: - for page in self.pages: - yield from page.clusters - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListClustersAsyncPager: - """A pager for iterating through ``list_clusters`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataproc_v1.types.ListClustersResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``clusters`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListClusters`` requests and continue to iterate - through the ``clusters`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataproc_v1.types.ListClustersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[clusters.ListClustersResponse]], - request: clusters.ListClustersRequest, - response: clusters.ListClustersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataproc_v1.types.ListClustersRequest): - The initial request object. - response (google.cloud.dataproc_v1.types.ListClustersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = clusters.ListClustersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[clusters.ListClustersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[clusters.Cluster]: - async def async_generator(): - async for page in self.pages: - for response in page.clusters: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/__init__.py deleted file mode 100644 index 9e9cf17d..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import ClusterControllerTransport -from .grpc import ClusterControllerGrpcTransport -from .grpc_asyncio import ClusterControllerGrpcAsyncIOTransport -from .rest import ClusterControllerRestTransport -from .rest import ClusterControllerRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ClusterControllerTransport]] -_transport_registry['grpc'] = ClusterControllerGrpcTransport -_transport_registry['grpc_asyncio'] = ClusterControllerGrpcAsyncIOTransport -_transport_registry['rest'] = ClusterControllerRestTransport - -__all__ = ( - 'ClusterControllerTransport', - 'ClusterControllerGrpcTransport', - 'ClusterControllerGrpcAsyncIOTransport', - 'ClusterControllerRestTransport', - 'ClusterControllerRestInterceptor', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py deleted file mode 100644 index 5d998219..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py +++ /dev/null @@ -1,362 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataproc_v1.types import clusters -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class ClusterControllerTransport(abc.ABC): - """Abstract transport class for ClusterController.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataproc.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_cluster: gapic_v1.method.wrap_method( - self.create_cluster, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.update_cluster: gapic_v1.method.wrap_method( - self.update_cluster, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.stop_cluster: gapic_v1.method.wrap_method( - self.stop_cluster, - default_timeout=None, - client_info=client_info, - ), - self.start_cluster: gapic_v1.method.wrap_method( - self.start_cluster, - default_timeout=None, - client_info=client_info, - ), - self.delete_cluster: gapic_v1.method.wrap_method( - self.delete_cluster, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_cluster: gapic_v1.method.wrap_method( - self.get_cluster, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_clusters: gapic_v1.method.wrap_method( - self.list_clusters, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.diagnose_cluster: gapic_v1.method.wrap_method( - self.diagnose_cluster, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_cluster(self) -> Callable[ - [clusters.CreateClusterRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_cluster(self) -> Callable[ - [clusters.UpdateClusterRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def stop_cluster(self) -> Callable[ - [clusters.StopClusterRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def start_cluster(self) -> Callable[ - [clusters.StartClusterRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_cluster(self) -> Callable[ - [clusters.DeleteClusterRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_cluster(self) -> Callable[ - [clusters.GetClusterRequest], - Union[ - clusters.Cluster, - Awaitable[clusters.Cluster] - ]]: - raise NotImplementedError() - - @property - def list_clusters(self) -> Callable[ - [clusters.ListClustersRequest], - Union[ - clusters.ListClustersResponse, - Awaitable[clusters.ListClustersResponse] - ]]: - raise NotImplementedError() - - @property - def diagnose_cluster(self) -> Callable[ - [clusters.DiagnoseClusterRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'ClusterControllerTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py deleted file mode 100644 index 97d8ced0..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py +++ /dev/null @@ -1,641 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataproc_v1.types import clusters -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import ClusterControllerTransport, DEFAULT_CLIENT_INFO - - -class ClusterControllerGrpcTransport(ClusterControllerTransport): - """gRPC backend transport for ClusterController. - - The ClusterControllerService provides methods to manage - clusters of Compute Engine instances. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_cluster(self) -> Callable[ - [clusters.CreateClusterRequest], - operations_pb2.Operation]: - r"""Return a callable for the create cluster method over gRPC. - - Creates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - - Returns: - Callable[[~.CreateClusterRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_cluster' not in self._stubs: - self._stubs['create_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/CreateCluster', - request_serializer=clusters.CreateClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_cluster'] - - @property - def update_cluster(self) -> Callable[ - [clusters.UpdateClusterRequest], - operations_pb2.Operation]: - r"""Return a callable for the update cluster method over gRPC. - - Updates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - The cluster must be in a - [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] - state or an error is returned. - - Returns: - Callable[[~.UpdateClusterRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_cluster' not in self._stubs: - self._stubs['update_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/UpdateCluster', - request_serializer=clusters.UpdateClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_cluster'] - - @property - def stop_cluster(self) -> Callable[ - [clusters.StopClusterRequest], - operations_pb2.Operation]: - r"""Return a callable for the stop cluster method over gRPC. - - Stops a cluster in a project. - - Returns: - Callable[[~.StopClusterRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'stop_cluster' not in self._stubs: - self._stubs['stop_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/StopCluster', - request_serializer=clusters.StopClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['stop_cluster'] - - @property - def start_cluster(self) -> Callable[ - [clusters.StartClusterRequest], - operations_pb2.Operation]: - r"""Return a callable for the start cluster method over gRPC. - - Starts a cluster in a project. - - Returns: - Callable[[~.StartClusterRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_cluster' not in self._stubs: - self._stubs['start_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/StartCluster', - request_serializer=clusters.StartClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['start_cluster'] - - @property - def delete_cluster(self) -> Callable[ - [clusters.DeleteClusterRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete cluster method over gRPC. - - Deletes a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - - Returns: - Callable[[~.DeleteClusterRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_cluster' not in self._stubs: - self._stubs['delete_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/DeleteCluster', - request_serializer=clusters.DeleteClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_cluster'] - - @property - def get_cluster(self) -> Callable[ - [clusters.GetClusterRequest], - clusters.Cluster]: - r"""Return a callable for the get cluster method over gRPC. - - Gets the resource representation for a cluster in a - project. - - Returns: - Callable[[~.GetClusterRequest], - ~.Cluster]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_cluster' not in self._stubs: - self._stubs['get_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/GetCluster', - request_serializer=clusters.GetClusterRequest.serialize, - response_deserializer=clusters.Cluster.deserialize, - ) - return self._stubs['get_cluster'] - - @property - def list_clusters(self) -> Callable[ - [clusters.ListClustersRequest], - clusters.ListClustersResponse]: - r"""Return a callable for the list clusters method over gRPC. - - Lists all regions/{region}/clusters in a project - alphabetically. - - Returns: - Callable[[~.ListClustersRequest], - ~.ListClustersResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_clusters' not in self._stubs: - self._stubs['list_clusters'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/ListClusters', - request_serializer=clusters.ListClustersRequest.serialize, - response_deserializer=clusters.ListClustersResponse.deserialize, - ) - return self._stubs['list_clusters'] - - @property - def diagnose_cluster(self) -> Callable[ - [clusters.DiagnoseClusterRequest], - operations_pb2.Operation]: - r"""Return a callable for the diagnose cluster method over gRPC. - - Gets cluster diagnostic information. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - After the operation completes, - [Operation.response][google.longrunning.Operation.response] - contains - `DiagnoseClusterResults `__. - - Returns: - Callable[[~.DiagnoseClusterRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'diagnose_cluster' not in self._stubs: - self._stubs['diagnose_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/DiagnoseCluster', - request_serializer=clusters.DiagnoseClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['diagnose_cluster'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'ClusterControllerGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py deleted file mode 100644 index ffbdf5a2..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py +++ /dev/null @@ -1,640 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataproc_v1.types import clusters -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import ClusterControllerTransport, DEFAULT_CLIENT_INFO -from .grpc import ClusterControllerGrpcTransport - - -class ClusterControllerGrpcAsyncIOTransport(ClusterControllerTransport): - """gRPC AsyncIO backend transport for ClusterController. - - The ClusterControllerService provides methods to manage - clusters of Compute Engine instances. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_cluster(self) -> Callable[ - [clusters.CreateClusterRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create cluster method over gRPC. - - Creates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - - Returns: - Callable[[~.CreateClusterRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_cluster' not in self._stubs: - self._stubs['create_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/CreateCluster', - request_serializer=clusters.CreateClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_cluster'] - - @property - def update_cluster(self) -> Callable[ - [clusters.UpdateClusterRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update cluster method over gRPC. - - Updates a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - The cluster must be in a - [``RUNNING``][google.cloud.dataproc.v1.ClusterStatus.State] - state or an error is returned. - - Returns: - Callable[[~.UpdateClusterRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_cluster' not in self._stubs: - self._stubs['update_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/UpdateCluster', - request_serializer=clusters.UpdateClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_cluster'] - - @property - def stop_cluster(self) -> Callable[ - [clusters.StopClusterRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the stop cluster method over gRPC. - - Stops a cluster in a project. - - Returns: - Callable[[~.StopClusterRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'stop_cluster' not in self._stubs: - self._stubs['stop_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/StopCluster', - request_serializer=clusters.StopClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['stop_cluster'] - - @property - def start_cluster(self) -> Callable[ - [clusters.StartClusterRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the start cluster method over gRPC. - - Starts a cluster in a project. - - Returns: - Callable[[~.StartClusterRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_cluster' not in self._stubs: - self._stubs['start_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/StartCluster', - request_serializer=clusters.StartClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['start_cluster'] - - @property - def delete_cluster(self) -> Callable[ - [clusters.DeleteClusterRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete cluster method over gRPC. - - Deletes a cluster in a project. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - - Returns: - Callable[[~.DeleteClusterRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_cluster' not in self._stubs: - self._stubs['delete_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/DeleteCluster', - request_serializer=clusters.DeleteClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_cluster'] - - @property - def get_cluster(self) -> Callable[ - [clusters.GetClusterRequest], - Awaitable[clusters.Cluster]]: - r"""Return a callable for the get cluster method over gRPC. - - Gets the resource representation for a cluster in a - project. - - Returns: - Callable[[~.GetClusterRequest], - Awaitable[~.Cluster]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_cluster' not in self._stubs: - self._stubs['get_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/GetCluster', - request_serializer=clusters.GetClusterRequest.serialize, - response_deserializer=clusters.Cluster.deserialize, - ) - return self._stubs['get_cluster'] - - @property - def list_clusters(self) -> Callable[ - [clusters.ListClustersRequest], - Awaitable[clusters.ListClustersResponse]]: - r"""Return a callable for the list clusters method over gRPC. - - Lists all regions/{region}/clusters in a project - alphabetically. - - Returns: - Callable[[~.ListClustersRequest], - Awaitable[~.ListClustersResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_clusters' not in self._stubs: - self._stubs['list_clusters'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/ListClusters', - request_serializer=clusters.ListClustersRequest.serialize, - response_deserializer=clusters.ListClustersResponse.deserialize, - ) - return self._stubs['list_clusters'] - - @property - def diagnose_cluster(self) -> Callable[ - [clusters.DiagnoseClusterRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the diagnose cluster method over gRPC. - - Gets cluster diagnostic information. The returned - [Operation.metadata][google.longrunning.Operation.metadata] will - be - `ClusterOperationMetadata `__. - After the operation completes, - [Operation.response][google.longrunning.Operation.response] - contains - `DiagnoseClusterResults `__. - - Returns: - Callable[[~.DiagnoseClusterRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'diagnose_cluster' not in self._stubs: - self._stubs['diagnose_cluster'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.ClusterController/DiagnoseCluster', - request_serializer=clusters.DiagnoseClusterRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['diagnose_cluster'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'ClusterControllerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/rest.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/rest.py deleted file mode 100644 index 61711971..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/cluster_controller/transports/rest.py +++ /dev/null @@ -1,1889 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataproc_v1.types import clusters -from google.longrunning import operations_pb2 # type: ignore - -from .base import ClusterControllerTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class ClusterControllerRestInterceptor: - """Interceptor for ClusterController. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ClusterControllerRestTransport. - - .. code-block:: python - class MyCustomClusterControllerInterceptor(ClusterControllerRestInterceptor): - def pre_create_cluster(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_cluster(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_cluster(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_cluster(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_diagnose_cluster(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_diagnose_cluster(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_cluster(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_cluster(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_clusters(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_clusters(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_start_cluster(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_start_cluster(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_stop_cluster(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_stop_cluster(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_cluster(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_cluster(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ClusterControllerRestTransport(interceptor=MyCustomClusterControllerInterceptor()) - client = ClusterControllerClient(transport=transport) - - - """ - def pre_create_cluster(self, request: clusters.CreateClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[clusters.CreateClusterRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_cluster - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_create_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_cluster - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_delete_cluster(self, request: clusters.DeleteClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[clusters.DeleteClusterRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_cluster - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_delete_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_cluster - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_diagnose_cluster(self, request: clusters.DiagnoseClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[clusters.DiagnoseClusterRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for diagnose_cluster - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_diagnose_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for diagnose_cluster - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_get_cluster(self, request: clusters.GetClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[clusters.GetClusterRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_cluster - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_get_cluster(self, response: clusters.Cluster) -> clusters.Cluster: - """Post-rpc interceptor for get_cluster - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_list_clusters(self, request: clusters.ListClustersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[clusters.ListClustersRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_clusters - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_list_clusters(self, response: clusters.ListClustersResponse) -> clusters.ListClustersResponse: - """Post-rpc interceptor for list_clusters - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_start_cluster(self, request: clusters.StartClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[clusters.StartClusterRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for start_cluster - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_start_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for start_cluster - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_stop_cluster(self, request: clusters.StopClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[clusters.StopClusterRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for stop_cluster - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_stop_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for stop_cluster - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_update_cluster(self, request: clusters.UpdateClusterRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[clusters.UpdateClusterRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_cluster - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_update_cluster(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_cluster - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClusterController server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the ClusterController server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class ClusterControllerRestStub: - _session: AuthorizedSession - _host: str - _interceptor: ClusterControllerRestInterceptor - - -class ClusterControllerRestTransport(ClusterControllerTransport): - """REST backend transport for ClusterController. - - The ClusterControllerService provides methods to manage - clusters of Compute Engine instances. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[ClusterControllerRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ClusterControllerRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateCluster(ClusterControllerRestStub): - def __hash__(self): - return hash("CreateCluster") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: clusters.CreateClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create cluster method over HTTP. - - Args: - request (~.clusters.CreateClusterRequest): - The request object. A request to create a cluster. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/regions/{region}/clusters', - 'body': 'cluster', - }, - ] - request, metadata = self._interceptor.pre_create_cluster(request, metadata) - pb_request = clusters.CreateClusterRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_cluster(resp) - return resp - - class _DeleteCluster(ClusterControllerRestStub): - def __hash__(self): - return hash("DeleteCluster") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: clusters.DeleteClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete cluster method over HTTP. - - Args: - request (~.clusters.DeleteClusterRequest): - The request object. A request to delete a cluster. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}', - }, - ] - request, metadata = self._interceptor.pre_delete_cluster(request, metadata) - pb_request = clusters.DeleteClusterRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_cluster(resp) - return resp - - class _DiagnoseCluster(ClusterControllerRestStub): - def __hash__(self): - return hash("DiagnoseCluster") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: clusters.DiagnoseClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the diagnose cluster method over HTTP. - - Args: - request (~.clusters.DiagnoseClusterRequest): - The request object. A request to collect cluster - diagnostic information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_diagnose_cluster(request, metadata) - pb_request = clusters.DiagnoseClusterRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_diagnose_cluster(resp) - return resp - - class _GetCluster(ClusterControllerRestStub): - def __hash__(self): - return hash("GetCluster") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: clusters.GetClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> clusters.Cluster: - r"""Call the get cluster method over HTTP. - - Args: - request (~.clusters.GetClusterRequest): - The request object. Request to get the resource - representation for a cluster in a - project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.clusters.Cluster: - Describes the identifying - information, config, and status of a - Dataproc cluster - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}', - }, - ] - request, metadata = self._interceptor.pre_get_cluster(request, metadata) - pb_request = clusters.GetClusterRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = clusters.Cluster() - pb_resp = clusters.Cluster.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_cluster(resp) - return resp - - class _ListClusters(ClusterControllerRestStub): - def __hash__(self): - return hash("ListClusters") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: clusters.ListClustersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> clusters.ListClustersResponse: - r"""Call the list clusters method over HTTP. - - Args: - request (~.clusters.ListClustersRequest): - The request object. A request to list the clusters in a - project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.clusters.ListClustersResponse: - The list of all clusters in a - project. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/regions/{region}/clusters', - }, - ] - request, metadata = self._interceptor.pre_list_clusters(request, metadata) - pb_request = clusters.ListClustersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = clusters.ListClustersResponse() - pb_resp = clusters.ListClustersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_clusters(resp) - return resp - - class _StartCluster(ClusterControllerRestStub): - def __hash__(self): - return hash("StartCluster") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: clusters.StartClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the start cluster method over HTTP. - - Args: - request (~.clusters.StartClusterRequest): - The request object. A request to start a cluster. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:start', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_start_cluster(request, metadata) - pb_request = clusters.StartClusterRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_start_cluster(resp) - return resp - - class _StopCluster(ClusterControllerRestStub): - def __hash__(self): - return hash("StopCluster") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: clusters.StopClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the stop cluster method over HTTP. - - Args: - request (~.clusters.StopClusterRequest): - The request object. A request to stop a cluster. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:stop', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_stop_cluster(request, metadata) - pb_request = clusters.StopClusterRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_stop_cluster(resp) - return resp - - class _UpdateCluster(ClusterControllerRestStub): - def __hash__(self): - return hash("UpdateCluster") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: clusters.UpdateClusterRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the update cluster method over HTTP. - - Args: - request (~.clusters.UpdateClusterRequest): - The request object. A request to update a cluster. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}', - 'body': 'cluster', - }, - ] - request, metadata = self._interceptor.pre_update_cluster(request, metadata) - pb_request = clusters.UpdateClusterRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_cluster(resp) - return resp - - @property - def create_cluster(self) -> Callable[ - [clusters.CreateClusterRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateCluster(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_cluster(self) -> Callable[ - [clusters.DeleteClusterRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteCluster(self._session, self._host, self._interceptor) # type: ignore - - @property - def diagnose_cluster(self) -> Callable[ - [clusters.DiagnoseClusterRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DiagnoseCluster(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_cluster(self) -> Callable[ - [clusters.GetClusterRequest], - clusters.Cluster]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetCluster(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_clusters(self) -> Callable[ - [clusters.ListClustersRequest], - clusters.ListClustersResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListClusters(self._session, self._host, self._interceptor) # type: ignore - - @property - def start_cluster(self) -> Callable[ - [clusters.StartClusterRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._StartCluster(self._session, self._host, self._interceptor) # type: ignore - - @property - def stop_cluster(self) -> Callable[ - [clusters.StopClusterRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._StopCluster(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_cluster(self) -> Callable[ - [clusters.UpdateClusterRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateCluster(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(ClusterControllerRestStub): - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_iam_policy(resp) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(ClusterControllerRestStub): - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_set_iam_policy(resp) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(ClusterControllerRestStub): - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_test_iam_permissions(resp) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(ClusterControllerRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}:cancel', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(ClusterControllerRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(ClusterControllerRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(ClusterControllerRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations}', - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'ClusterControllerRestTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/__init__.py deleted file mode 100644 index 89dd1ffb..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import JobControllerClient -from .async_client import JobControllerAsyncClient - -__all__ = ( - 'JobControllerClient', - 'JobControllerAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/async_client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/async_client.py deleted file mode 100644 index 0b09ab81..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/async_client.py +++ /dev/null @@ -1,1605 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataproc_v1.services.job_controller import pagers -from google.cloud.dataproc_v1.types import jobs -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from .transports.base import JobControllerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import JobControllerGrpcAsyncIOTransport -from .client import JobControllerClient - - -class JobControllerAsyncClient: - """The JobController provides methods to manage jobs.""" - - _client: JobControllerClient - - DEFAULT_ENDPOINT = JobControllerClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = JobControllerClient.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(JobControllerClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(JobControllerClient.parse_common_billing_account_path) - common_folder_path = staticmethod(JobControllerClient.common_folder_path) - parse_common_folder_path = staticmethod(JobControllerClient.parse_common_folder_path) - common_organization_path = staticmethod(JobControllerClient.common_organization_path) - parse_common_organization_path = staticmethod(JobControllerClient.parse_common_organization_path) - common_project_path = staticmethod(JobControllerClient.common_project_path) - parse_common_project_path = staticmethod(JobControllerClient.parse_common_project_path) - common_location_path = staticmethod(JobControllerClient.common_location_path) - parse_common_location_path = staticmethod(JobControllerClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - JobControllerAsyncClient: The constructed client. - """ - return JobControllerClient.from_service_account_info.__func__(JobControllerAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - JobControllerAsyncClient: The constructed client. - """ - return JobControllerClient.from_service_account_file.__func__(JobControllerAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return JobControllerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> JobControllerTransport: - """Returns the transport used by the client instance. - - Returns: - JobControllerTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(JobControllerClient).get_transport_class, type(JobControllerClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, JobControllerTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the job controller client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.JobControllerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = JobControllerClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def submit_job(self, - request: Optional[Union[jobs.SubmitJobRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - job: Optional[jobs.Job] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Submits a job to a cluster. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_submit_job(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.SubmitJobRequest( - project_id="project_id_value", - region="region_value", - job=job, - ) - - # Make the request - response = await client.submit_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.SubmitJobRequest, dict]]): - The request object. A request to submit a job. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (:class:`google.cloud.dataproc_v1.types.Job`): - Required. The job resource. - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Job: - A Dataproc job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, job]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = jobs.SubmitJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.submit_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def submit_job_as_operation(self, - request: Optional[Union[jobs.SubmitJobRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - job: Optional[jobs.Job] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Submits job to a cluster. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_submit_job_as_operation(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.SubmitJobRequest( - project_id="project_id_value", - region="region_value", - job=job, - ) - - # Make the request - operation = client.submit_job_as_operation(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.SubmitJobRequest, dict]]): - The request object. A request to submit a job. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (:class:`google.cloud.dataproc_v1.types.Job`): - Required. The job resource. - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataproc_v1.types.Job` A Dataproc - job resource. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, job]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = jobs.SubmitJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.submit_job_as_operation, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - jobs.Job, - metadata_type=jobs.JobMetadata, - ) - - # Done; return the response. - return response - - async def get_job(self, - request: Optional[Union[jobs.GetJobRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Gets the resource representation for a job in a - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_get_job(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.GetJobRequest, dict]]): - The request object. A request to get the resource - representation for a job in a project. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (:class:`str`): - Required. The job ID. - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Job: - A Dataproc job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, job_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = jobs.GetJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_jobs(self, - request: Optional[Union[jobs.ListJobsRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsAsyncPager: - r"""Lists regions/{region}/jobs in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_list_jobs(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ListJobsRequest( - project_id="project_id_value", - region="region_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.ListJobsRequest, dict]]): - The request object. A request to list jobs in a project. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (:class:`str`): - Optional. A filter constraining the jobs to list. - Filters are case-sensitive and have the following - syntax: - - [field = value] AND [field [= value]] ... - - where **field** is ``status.state`` or ``labels.[KEY]``, - and ``[KEY]`` is a label key. **value** can be ``*`` to - match all values. ``status.state`` can be either - ``ACTIVE`` or ``NON_ACTIVE``. Only the logical ``AND`` - operator is supported; space-separated items are treated - as having an implicit ``AND`` operator. - - Example filter: - - status.state = ACTIVE AND labels.env = staging AND - labels.starred = \* - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsAsyncPager: - A list of jobs in a project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, filter]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = jobs.ListJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_jobs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_job(self, - request: Optional[Union[jobs.UpdateJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Updates a job in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_update_job(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.UpdateJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - job=job, - ) - - # Make the request - response = await client.update_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.UpdateJobRequest, dict]]): - The request object. A request to update a job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Job: - A Dataproc job resource. - """ - # Create or coerce a protobuf request object. - request = jobs.UpdateJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def cancel_job(self, - request: Optional[Union[jobs.CancelJobRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Starts a job cancellation request. To access the job resource - after cancellation, call - `regions/{region}/jobs.list `__ - or - `regions/{region}/jobs.get `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_cancel_job(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.CancelJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - response = await client.cancel_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.CancelJobRequest, dict]]): - The request object. A request to cancel a job. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (:class:`str`): - Required. The job ID. - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Job: - A Dataproc job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, job_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = jobs.CancelJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job(self, - request: Optional[Union[jobs.DeleteJobRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes the job from the project. If the job is active, the - delete fails, and the response returns ``FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_delete_job(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - await client.delete_job(request=request) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.DeleteJobRequest, dict]]): - The request object. A request to delete a job. - project_id (:class:`str`): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (:class:`str`): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (:class:`str`): - Required. The job ID. - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, job_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = jobs.DeleteJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("job_id", request.job_id), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "JobControllerAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "JobControllerAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/client.py deleted file mode 100644 index e957d134..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/client.py +++ /dev/null @@ -1,1750 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataproc_v1.services.job_controller import pagers -from google.cloud.dataproc_v1.types import jobs -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from .transports.base import JobControllerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import JobControllerGrpcTransport -from .transports.grpc_asyncio import JobControllerGrpcAsyncIOTransport -from .transports.rest import JobControllerRestTransport - - -class JobControllerClientMeta(type): - """Metaclass for the JobController client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[JobControllerTransport]] - _transport_registry["grpc"] = JobControllerGrpcTransport - _transport_registry["grpc_asyncio"] = JobControllerGrpcAsyncIOTransport - _transport_registry["rest"] = JobControllerRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[JobControllerTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class JobControllerClient(metaclass=JobControllerClientMeta): - """The JobController provides methods to manage jobs.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataproc.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - JobControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - JobControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> JobControllerTransport: - """Returns the transport used by the client instance. - - Returns: - JobControllerTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, JobControllerTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the job controller client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, JobControllerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, JobControllerTransport): - # transport is a JobControllerTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def submit_job(self, - request: Optional[Union[jobs.SubmitJobRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - job: Optional[jobs.Job] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Submits a job to a cluster. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_submit_job(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.SubmitJobRequest( - project_id="project_id_value", - region="region_value", - job=job, - ) - - # Make the request - response = client.submit_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.SubmitJobRequest, dict]): - The request object. A request to submit a job. - project_id (str): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (google.cloud.dataproc_v1.types.Job): - Required. The job resource. - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Job: - A Dataproc job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, job]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a jobs.SubmitJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.SubmitJobRequest): - request = jobs.SubmitJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.submit_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def submit_job_as_operation(self, - request: Optional[Union[jobs.SubmitJobRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - job: Optional[jobs.Job] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Submits job to a cluster. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_submit_job_as_operation(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.SubmitJobRequest( - project_id="project_id_value", - region="region_value", - job=job, - ) - - # Make the request - operation = client.submit_job_as_operation(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.SubmitJobRequest, dict]): - The request object. A request to submit a job. - project_id (str): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (google.cloud.dataproc_v1.types.Job): - Required. The job resource. - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.dataproc_v1.types.Job` A Dataproc - job resource. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, job]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a jobs.SubmitJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.SubmitJobRequest): - request = jobs.SubmitJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.submit_job_as_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - jobs.Job, - metadata_type=jobs.JobMetadata, - ) - - # Done; return the response. - return response - - def get_job(self, - request: Optional[Union[jobs.GetJobRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Gets the resource representation for a job in a - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_get_job(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.GetJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.GetJobRequest, dict]): - The request object. A request to get the resource - representation for a job in a project. - project_id (str): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (str): - Required. The job ID. - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Job: - A Dataproc job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, job_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a jobs.GetJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.GetJobRequest): - request = jobs.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_jobs(self, - request: Optional[Union[jobs.ListJobsRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsPager: - r"""Lists regions/{region}/jobs in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_list_jobs(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.ListJobsRequest( - project_id="project_id_value", - region="region_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.ListJobsRequest, dict]): - The request object. A request to list jobs in a project. - project_id (str): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (str): - Optional. A filter constraining the jobs to list. - Filters are case-sensitive and have the following - syntax: - - [field = value] AND [field [= value]] ... - - where **field** is ``status.state`` or ``labels.[KEY]``, - and ``[KEY]`` is a label key. **value** can be ``*`` to - match all values. ``status.state`` can be either - ``ACTIVE`` or ``NON_ACTIVE``. Only the logical ``AND`` - operator is supported; space-separated items are treated - as having an implicit ``AND`` operator. - - Example filter: - - status.state = ACTIVE AND labels.env = staging AND - labels.starred = \* - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsPager: - A list of jobs in a project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, filter]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a jobs.ListJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.ListJobsRequest): - request = jobs.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_job(self, - request: Optional[Union[jobs.UpdateJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Updates a job in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_update_job(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.UpdateJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - job=job, - ) - - # Make the request - response = client.update_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.UpdateJobRequest, dict]): - The request object. A request to update a job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Job: - A Dataproc job resource. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a jobs.UpdateJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.UpdateJobRequest): - request = jobs.UpdateJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def cancel_job(self, - request: Optional[Union[jobs.CancelJobRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Starts a job cancellation request. To access the job resource - after cancellation, call - `regions/{region}/jobs.list `__ - or - `regions/{region}/jobs.get `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_cancel_job(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.CancelJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - response = client.cancel_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.CancelJobRequest, dict]): - The request object. A request to cancel a job. - project_id (str): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (str): - Required. The job ID. - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.Job: - A Dataproc job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, job_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a jobs.CancelJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.CancelJobRequest): - request = jobs.CancelJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job(self, - request: Optional[Union[jobs.DeleteJobRequest, dict]] = None, - *, - project_id: Optional[str] = None, - region: Optional[str] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes the job from the project. If the job is active, the - delete fails, and the response returns ``FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_delete_job(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - client.delete_job(request=request) - - Args: - request (Union[google.cloud.dataproc_v1.types.DeleteJobRequest, dict]): - The request object. A request to delete a job. - project_id (str): - Required. The ID of the Google Cloud - Platform project that the job belongs - to. - - This corresponds to the ``project_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - region (str): - Required. The Dataproc region in - which to handle the request. - - This corresponds to the ``region`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (str): - Required. The job ID. - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_id, region, job_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a jobs.DeleteJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.DeleteJobRequest): - request = jobs.DeleteJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project_id is not None: - request.project_id = project_id - if region is not None: - request.region = region - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("region", request.region), - ("job_id", request.job_id), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "JobControllerClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "JobControllerClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/pagers.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/pagers.py deleted file mode 100644 index c556f583..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/pagers.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.dataproc_v1.types import jobs - - -class ListJobsPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataproc_v1.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataproc_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., jobs.ListJobsResponse], - request: jobs.ListJobsRequest, - response: jobs.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataproc_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.dataproc_v1.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = jobs.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[jobs.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[jobs.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsAsyncPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataproc_v1.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataproc_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[jobs.ListJobsResponse]], - request: jobs.ListJobsRequest, - response: jobs.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataproc_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.dataproc_v1.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = jobs.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[jobs.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[jobs.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/__init__.py deleted file mode 100644 index f423e24f..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import JobControllerTransport -from .grpc import JobControllerGrpcTransport -from .grpc_asyncio import JobControllerGrpcAsyncIOTransport -from .rest import JobControllerRestTransport -from .rest import JobControllerRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[JobControllerTransport]] -_transport_registry['grpc'] = JobControllerGrpcTransport -_transport_registry['grpc_asyncio'] = JobControllerGrpcAsyncIOTransport -_transport_registry['rest'] = JobControllerRestTransport - -__all__ = ( - 'JobControllerTransport', - 'JobControllerGrpcTransport', - 'JobControllerGrpcAsyncIOTransport', - 'JobControllerRestTransport', - 'JobControllerRestInterceptor', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/base.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/base.py deleted file mode 100644 index acb4ff9c..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/base.py +++ /dev/null @@ -1,357 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataproc_v1.types import jobs -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class JobControllerTransport(abc.ABC): - """Abstract transport class for JobController.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataproc.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.submit_job: gapic_v1.method.wrap_method( - self.submit_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=client_info, - ), - self.submit_job_as_operation: gapic_v1.method.wrap_method( - self.submit_job_as_operation, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=client_info, - ), - self.get_job: gapic_v1.method.wrap_method( - self.get_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=client_info, - ), - self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=client_info, - ), - self.update_job: gapic_v1.method.wrap_method( - self.update_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=client_info, - ), - self.cancel_job: gapic_v1.method.wrap_method( - self.cancel_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=client_info, - ), - self.delete_job: gapic_v1.method.wrap_method( - self.delete_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def submit_job(self) -> Callable[ - [jobs.SubmitJobRequest], - Union[ - jobs.Job, - Awaitable[jobs.Job] - ]]: - raise NotImplementedError() - - @property - def submit_job_as_operation(self) -> Callable[ - [jobs.SubmitJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_job(self) -> Callable[ - [jobs.GetJobRequest], - Union[ - jobs.Job, - Awaitable[jobs.Job] - ]]: - raise NotImplementedError() - - @property - def list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - Union[ - jobs.ListJobsResponse, - Awaitable[jobs.ListJobsResponse] - ]]: - raise NotImplementedError() - - @property - def update_job(self) -> Callable[ - [jobs.UpdateJobRequest], - Union[ - jobs.Job, - Awaitable[jobs.Job] - ]]: - raise NotImplementedError() - - @property - def cancel_job(self) -> Callable[ - [jobs.CancelJobRequest], - Union[ - jobs.Job, - Awaitable[jobs.Job] - ]]: - raise NotImplementedError() - - @property - def delete_job(self) -> Callable[ - [jobs.DeleteJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'JobControllerTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py deleted file mode 100644 index 2bd2c184..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py +++ /dev/null @@ -1,600 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataproc_v1.types import jobs -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import JobControllerTransport, DEFAULT_CLIENT_INFO - - -class JobControllerGrpcTransport(JobControllerTransport): - """gRPC backend transport for JobController. - - The JobController provides methods to manage jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def submit_job(self) -> Callable[ - [jobs.SubmitJobRequest], - jobs.Job]: - r"""Return a callable for the submit job method over gRPC. - - Submits a job to a cluster. - - Returns: - Callable[[~.SubmitJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'submit_job' not in self._stubs: - self._stubs['submit_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/SubmitJob', - request_serializer=jobs.SubmitJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['submit_job'] - - @property - def submit_job_as_operation(self) -> Callable[ - [jobs.SubmitJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the submit job as operation method over gRPC. - - Submits job to a cluster. - - Returns: - Callable[[~.SubmitJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'submit_job_as_operation' not in self._stubs: - self._stubs['submit_job_as_operation'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/SubmitJobAsOperation', - request_serializer=jobs.SubmitJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['submit_job_as_operation'] - - @property - def get_job(self) -> Callable[ - [jobs.GetJobRequest], - jobs.Job]: - r"""Return a callable for the get job method over gRPC. - - Gets the resource representation for a job in a - project. - - Returns: - Callable[[~.GetJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/GetJob', - request_serializer=jobs.GetJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - jobs.ListJobsResponse]: - r"""Return a callable for the list jobs method over gRPC. - - Lists regions/{region}/jobs in a project. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/ListJobs', - request_serializer=jobs.ListJobsRequest.serialize, - response_deserializer=jobs.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def update_job(self) -> Callable[ - [jobs.UpdateJobRequest], - jobs.Job]: - r"""Return a callable for the update job method over gRPC. - - Updates a job in a project. - - Returns: - Callable[[~.UpdateJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job' not in self._stubs: - self._stubs['update_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/UpdateJob', - request_serializer=jobs.UpdateJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['update_job'] - - @property - def cancel_job(self) -> Callable[ - [jobs.CancelJobRequest], - jobs.Job]: - r"""Return a callable for the cancel job method over gRPC. - - Starts a job cancellation request. To access the job resource - after cancellation, call - `regions/{region}/jobs.list `__ - or - `regions/{region}/jobs.get `__. - - Returns: - Callable[[~.CancelJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/CancelJob', - request_serializer=jobs.CancelJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['cancel_job'] - - @property - def delete_job(self) -> Callable[ - [jobs.DeleteJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job method over gRPC. - - Deletes the job from the project. If the job is active, the - delete fails, and the response returns ``FAILED_PRECONDITION``. - - Returns: - Callable[[~.DeleteJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/DeleteJob', - request_serializer=jobs.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'JobControllerGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py deleted file mode 100644 index 0fdbb489..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py +++ /dev/null @@ -1,599 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataproc_v1.types import jobs -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import JobControllerTransport, DEFAULT_CLIENT_INFO -from .grpc import JobControllerGrpcTransport - - -class JobControllerGrpcAsyncIOTransport(JobControllerTransport): - """gRPC AsyncIO backend transport for JobController. - - The JobController provides methods to manage jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def submit_job(self) -> Callable[ - [jobs.SubmitJobRequest], - Awaitable[jobs.Job]]: - r"""Return a callable for the submit job method over gRPC. - - Submits a job to a cluster. - - Returns: - Callable[[~.SubmitJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'submit_job' not in self._stubs: - self._stubs['submit_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/SubmitJob', - request_serializer=jobs.SubmitJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['submit_job'] - - @property - def submit_job_as_operation(self) -> Callable[ - [jobs.SubmitJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the submit job as operation method over gRPC. - - Submits job to a cluster. - - Returns: - Callable[[~.SubmitJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'submit_job_as_operation' not in self._stubs: - self._stubs['submit_job_as_operation'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/SubmitJobAsOperation', - request_serializer=jobs.SubmitJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['submit_job_as_operation'] - - @property - def get_job(self) -> Callable[ - [jobs.GetJobRequest], - Awaitable[jobs.Job]]: - r"""Return a callable for the get job method over gRPC. - - Gets the resource representation for a job in a - project. - - Returns: - Callable[[~.GetJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/GetJob', - request_serializer=jobs.GetJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - Awaitable[jobs.ListJobsResponse]]: - r"""Return a callable for the list jobs method over gRPC. - - Lists regions/{region}/jobs in a project. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/ListJobs', - request_serializer=jobs.ListJobsRequest.serialize, - response_deserializer=jobs.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def update_job(self) -> Callable[ - [jobs.UpdateJobRequest], - Awaitable[jobs.Job]]: - r"""Return a callable for the update job method over gRPC. - - Updates a job in a project. - - Returns: - Callable[[~.UpdateJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job' not in self._stubs: - self._stubs['update_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/UpdateJob', - request_serializer=jobs.UpdateJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['update_job'] - - @property - def cancel_job(self) -> Callable[ - [jobs.CancelJobRequest], - Awaitable[jobs.Job]]: - r"""Return a callable for the cancel job method over gRPC. - - Starts a job cancellation request. To access the job resource - after cancellation, call - `regions/{region}/jobs.list `__ - or - `regions/{region}/jobs.get `__. - - Returns: - Callable[[~.CancelJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/CancelJob', - request_serializer=jobs.CancelJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['cancel_job'] - - @property - def delete_job(self) -> Callable[ - [jobs.DeleteJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job method over gRPC. - - Deletes the job from the project. If the job is active, the - delete fails, and the response returns ``FAILED_PRECONDITION``. - - Returns: - Callable[[~.DeleteJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.JobController/DeleteJob', - request_serializer=jobs.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'JobControllerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/rest.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/rest.py deleted file mode 100644 index 7a851ab1..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/job_controller/transports/rest.py +++ /dev/null @@ -1,1733 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataproc_v1.types import jobs -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -from .base import JobControllerTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class JobControllerRestInterceptor: - """Interceptor for JobController. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the JobControllerRestTransport. - - .. code-block:: python - class MyCustomJobControllerInterceptor(JobControllerRestInterceptor): - def pre_cancel_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_cancel_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_submit_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_submit_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_submit_job_as_operation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_submit_job_as_operation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_job(self, response): - logging.log(f"Received response: {response}") - return response - - transport = JobControllerRestTransport(interceptor=MyCustomJobControllerInterceptor()) - client = JobControllerClient(transport=transport) - - - """ - def pre_cancel_job(self, request: jobs.CancelJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.CancelJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_cancel_job(self, response: jobs.Job) -> jobs.Job: - """Post-rpc interceptor for cancel_job - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - def pre_delete_job(self, request: jobs.DeleteJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.DeleteJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def pre_get_job(self, request: jobs.GetJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.GetJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_get_job(self, response: jobs.Job) -> jobs.Job: - """Post-rpc interceptor for get_job - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - def pre_list_jobs(self, request: jobs.ListJobsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.ListJobsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_list_jobs(self, response: jobs.ListJobsResponse) -> jobs.ListJobsResponse: - """Post-rpc interceptor for list_jobs - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - def pre_submit_job(self, request: jobs.SubmitJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.SubmitJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for submit_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_submit_job(self, response: jobs.Job) -> jobs.Job: - """Post-rpc interceptor for submit_job - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - def pre_submit_job_as_operation(self, request: jobs.SubmitJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.SubmitJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for submit_job_as_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_submit_job_as_operation(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for submit_job_as_operation - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - def pre_update_job(self, request: jobs.UpdateJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.UpdateJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_update_job(self, response: jobs.Job) -> jobs.Job: - """Post-rpc interceptor for update_job - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobController server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the JobController server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class JobControllerRestStub: - _session: AuthorizedSession - _host: str - _interceptor: JobControllerRestInterceptor - - -class JobControllerRestTransport(JobControllerTransport): - """REST backend transport for JobController. - - The JobController provides methods to manage jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[JobControllerRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or JobControllerRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CancelJob(JobControllerRestStub): - def __hash__(self): - return hash("CancelJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: jobs.CancelJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.Job: - r"""Call the cancel job method over HTTP. - - Args: - request (~.jobs.CancelJobRequest): - The request object. A request to cancel a job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.jobs.Job: - A Dataproc job resource. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_cancel_job(request, metadata) - pb_request = jobs.CancelJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = jobs.Job() - pb_resp = jobs.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_cancel_job(resp) - return resp - - class _DeleteJob(JobControllerRestStub): - def __hash__(self): - return hash("DeleteJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: jobs.DeleteJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete job method over HTTP. - - Args: - request (~.jobs.DeleteJobRequest): - The request object. A request to delete a job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/projects/{project_id}/regions/{region}/jobs/{job_id}', - }, - ] - request, metadata = self._interceptor.pre_delete_job(request, metadata) - pb_request = jobs.DeleteJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetJob(JobControllerRestStub): - def __hash__(self): - return hash("GetJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: jobs.GetJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.Job: - r"""Call the get job method over HTTP. - - Args: - request (~.jobs.GetJobRequest): - The request object. A request to get the resource - representation for a job in a project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.jobs.Job: - A Dataproc job resource. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/regions/{region}/jobs/{job_id}', - }, - ] - request, metadata = self._interceptor.pre_get_job(request, metadata) - pb_request = jobs.GetJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = jobs.Job() - pb_resp = jobs.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_job(resp) - return resp - - class _ListJobs(JobControllerRestStub): - def __hash__(self): - return hash("ListJobs") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: jobs.ListJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.ListJobsResponse: - r"""Call the list jobs method over HTTP. - - Args: - request (~.jobs.ListJobsRequest): - The request object. A request to list jobs in a project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.jobs.ListJobsResponse: - A list of jobs in a project. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/projects/{project_id}/regions/{region}/jobs', - }, - ] - request, metadata = self._interceptor.pre_list_jobs(request, metadata) - pb_request = jobs.ListJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = jobs.ListJobsResponse() - pb_resp = jobs.ListJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_jobs(resp) - return resp - - class _SubmitJob(JobControllerRestStub): - def __hash__(self): - return hash("SubmitJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: jobs.SubmitJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.Job: - r"""Call the submit job method over HTTP. - - Args: - request (~.jobs.SubmitJobRequest): - The request object. A request to submit a job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.jobs.Job: - A Dataproc job resource. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/regions/{region}/jobs:submit', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_submit_job(request, metadata) - pb_request = jobs.SubmitJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = jobs.Job() - pb_resp = jobs.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_submit_job(resp) - return resp - - class _SubmitJobAsOperation(JobControllerRestStub): - def __hash__(self): - return hash("SubmitJobAsOperation") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: jobs.SubmitJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the submit job as operation method over HTTP. - - Args: - request (~.jobs.SubmitJobRequest): - The request object. A request to submit a job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/projects/{project_id}/regions/{region}/jobs:submitAsOperation', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_submit_job_as_operation(request, metadata) - pb_request = jobs.SubmitJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_submit_job_as_operation(resp) - return resp - - class _UpdateJob(JobControllerRestStub): - def __hash__(self): - return hash("UpdateJob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: jobs.UpdateJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.Job: - r"""Call the update job method over HTTP. - - Args: - request (~.jobs.UpdateJobRequest): - The request object. A request to update a job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.jobs.Job: - A Dataproc job resource. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/projects/{project_id}/regions/{region}/jobs/{job_id}', - 'body': 'job', - }, - ] - request, metadata = self._interceptor.pre_update_job(request, metadata) - pb_request = jobs.UpdateJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = jobs.Job() - pb_resp = jobs.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_job(resp) - return resp - - @property - def cancel_job(self) -> Callable[ - [jobs.CancelJobRequest], - jobs.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CancelJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_job(self) -> Callable[ - [jobs.DeleteJobRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_job(self) -> Callable[ - [jobs.GetJobRequest], - jobs.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - jobs.ListJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def submit_job(self) -> Callable[ - [jobs.SubmitJobRequest], - jobs.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SubmitJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def submit_job_as_operation(self) -> Callable[ - [jobs.SubmitJobRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SubmitJobAsOperation(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_job(self) -> Callable[ - [jobs.UpdateJobRequest], - jobs.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(JobControllerRestStub): - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_iam_policy(resp) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(JobControllerRestStub): - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_set_iam_policy(resp) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(JobControllerRestStub): - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_test_iam_permissions(resp) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(JobControllerRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}:cancel', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(JobControllerRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(JobControllerRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(JobControllerRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations}', - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'JobControllerRestTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/__init__.py deleted file mode 100644 index 3352c712..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import NodeGroupControllerClient -from .async_client import NodeGroupControllerAsyncClient - -__all__ = ( - 'NodeGroupControllerClient', - 'NodeGroupControllerAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/async_client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/async_client.py deleted file mode 100644 index 28ce51ed..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/async_client.py +++ /dev/null @@ -1,1085 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import node_groups -from google.cloud.dataproc_v1.types import operations -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from .transports.base import NodeGroupControllerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import NodeGroupControllerGrpcAsyncIOTransport -from .client import NodeGroupControllerClient - - -class NodeGroupControllerAsyncClient: - """The ``NodeGroupControllerService`` provides methods to manage node - groups of Compute Engine managed instances. - """ - - _client: NodeGroupControllerClient - - DEFAULT_ENDPOINT = NodeGroupControllerClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = NodeGroupControllerClient.DEFAULT_MTLS_ENDPOINT - - node_group_path = staticmethod(NodeGroupControllerClient.node_group_path) - parse_node_group_path = staticmethod(NodeGroupControllerClient.parse_node_group_path) - common_billing_account_path = staticmethod(NodeGroupControllerClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(NodeGroupControllerClient.parse_common_billing_account_path) - common_folder_path = staticmethod(NodeGroupControllerClient.common_folder_path) - parse_common_folder_path = staticmethod(NodeGroupControllerClient.parse_common_folder_path) - common_organization_path = staticmethod(NodeGroupControllerClient.common_organization_path) - parse_common_organization_path = staticmethod(NodeGroupControllerClient.parse_common_organization_path) - common_project_path = staticmethod(NodeGroupControllerClient.common_project_path) - parse_common_project_path = staticmethod(NodeGroupControllerClient.parse_common_project_path) - common_location_path = staticmethod(NodeGroupControllerClient.common_location_path) - parse_common_location_path = staticmethod(NodeGroupControllerClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - NodeGroupControllerAsyncClient: The constructed client. - """ - return NodeGroupControllerClient.from_service_account_info.__func__(NodeGroupControllerAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - NodeGroupControllerAsyncClient: The constructed client. - """ - return NodeGroupControllerClient.from_service_account_file.__func__(NodeGroupControllerAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return NodeGroupControllerClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> NodeGroupControllerTransport: - """Returns the transport used by the client instance. - - Returns: - NodeGroupControllerTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(NodeGroupControllerClient).get_transport_class, type(NodeGroupControllerClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, NodeGroupControllerTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the node group controller client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.NodeGroupControllerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = NodeGroupControllerClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_node_group(self, - request: Optional[Union[node_groups.CreateNodeGroupRequest, dict]] = None, - *, - parent: Optional[str] = None, - node_group: Optional[clusters.NodeGroup] = None, - node_group_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a node group in a cluster. The returned - [Operation.metadata][google.longrunning.Operation.metadata] is - `NodeGroupOperationMetadata `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_create_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerAsyncClient() - - # Initialize request argument(s) - node_group = dataproc_v1.NodeGroup() - node_group.roles = ['DRIVER'] - - request = dataproc_v1.CreateNodeGroupRequest( - parent="parent_value", - node_group=node_group, - ) - - # Make the request - operation = client.create_node_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.CreateNodeGroupRequest, dict]]): - The request object. A request to create a node group. - parent (:class:`str`): - Required. The parent resource where this node group will - be created. Format: - ``projects/{project}/regions/{region}/clusters/{cluster}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - node_group (:class:`google.cloud.dataproc_v1.types.NodeGroup`): - Required. The node group to create. - This corresponds to the ``node_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - node_group_id (:class:`str`): - Optional. An optional node group ID. Generated if not - specified. - - The ID must contain only letters (a-z, A-Z), numbers - (0-9), underscores (_), and hyphens (-). Cannot begin or - end with underscore or hyphen. Must consist of from 3 to - 33 characters. - - This corresponds to the ``node_group_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.NodeGroup` Dataproc Node Group. - **The Dataproc NodeGroup resource is not related to - the Dataproc - [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] - resource.** - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, node_group, node_group_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = node_groups.CreateNodeGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if node_group is not None: - request.node_group = node_group - if node_group_id is not None: - request.node_group_id = node_group_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_node_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clusters.NodeGroup, - metadata_type=operations.NodeGroupOperationMetadata, - ) - - # Done; return the response. - return response - - async def resize_node_group(self, - request: Optional[Union[node_groups.ResizeNodeGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - size: Optional[int] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Resizes a node group in a cluster. The returned - [Operation.metadata][google.longrunning.Operation.metadata] is - `NodeGroupOperationMetadata `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_resize_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ResizeNodeGroupRequest( - name="name_value", - size=443, - ) - - # Make the request - operation = client.resize_node_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.ResizeNodeGroupRequest, dict]]): - The request object. A request to resize a node group. - name (:class:`str`): - Required. The name of the node group to resize. Format: - ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - size (:class:`int`): - Required. The number of running - instances for the node group to - maintain. The group adds or removes - instances to maintain the number of - instances specified by this parameter. - - This corresponds to the ``size`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.NodeGroup` Dataproc Node Group. - **The Dataproc NodeGroup resource is not related to - the Dataproc - [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] - resource.** - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, size]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = node_groups.ResizeNodeGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if size is not None: - request.size = size - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.resize_node_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - clusters.NodeGroup, - metadata_type=operations.NodeGroupOperationMetadata, - ) - - # Done; return the response. - return response - - async def get_node_group(self, - request: Optional[Union[node_groups.GetNodeGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clusters.NodeGroup: - r"""Gets the resource representation for a node group in - a cluster. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_get_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetNodeGroupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_node_group(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.GetNodeGroupRequest, dict]]): - The request object. A request to get a node group . - name (:class:`str`): - Required. The name of the node group to retrieve. - Format: - ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.NodeGroup: - Dataproc Node Group. - **The Dataproc NodeGroup resource is not related to - the Dataproc - [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] - resource.** - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = node_groups.GetNodeGroupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_node_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "NodeGroupControllerAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "NodeGroupControllerAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/client.py deleted file mode 100644 index 40aad596..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/client.py +++ /dev/null @@ -1,1286 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import node_groups -from google.cloud.dataproc_v1.types import operations -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from .transports.base import NodeGroupControllerTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import NodeGroupControllerGrpcTransport -from .transports.grpc_asyncio import NodeGroupControllerGrpcAsyncIOTransport -from .transports.rest import NodeGroupControllerRestTransport - - -class NodeGroupControllerClientMeta(type): - """Metaclass for the NodeGroupController client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[NodeGroupControllerTransport]] - _transport_registry["grpc"] = NodeGroupControllerGrpcTransport - _transport_registry["grpc_asyncio"] = NodeGroupControllerGrpcAsyncIOTransport - _transport_registry["rest"] = NodeGroupControllerRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[NodeGroupControllerTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class NodeGroupControllerClient(metaclass=NodeGroupControllerClientMeta): - """The ``NodeGroupControllerService`` provides methods to manage node - groups of Compute Engine managed instances. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataproc.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - NodeGroupControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - NodeGroupControllerClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> NodeGroupControllerTransport: - """Returns the transport used by the client instance. - - Returns: - NodeGroupControllerTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def node_group_path(project: str,region: str,cluster: str,node_group: str,) -> str: - """Returns a fully-qualified node_group string.""" - return "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format(project=project, region=region, cluster=cluster, node_group=node_group, ) - - @staticmethod - def parse_node_group_path(path: str) -> Dict[str,str]: - """Parses a node_group path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/clusters/(?P.+?)/nodeGroups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, NodeGroupControllerTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the node group controller client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, NodeGroupControllerTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, NodeGroupControllerTransport): - # transport is a NodeGroupControllerTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_node_group(self, - request: Optional[Union[node_groups.CreateNodeGroupRequest, dict]] = None, - *, - parent: Optional[str] = None, - node_group: Optional[clusters.NodeGroup] = None, - node_group_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Creates a node group in a cluster. The returned - [Operation.metadata][google.longrunning.Operation.metadata] is - `NodeGroupOperationMetadata `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_create_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerClient() - - # Initialize request argument(s) - node_group = dataproc_v1.NodeGroup() - node_group.roles = ['DRIVER'] - - request = dataproc_v1.CreateNodeGroupRequest( - parent="parent_value", - node_group=node_group, - ) - - # Make the request - operation = client.create_node_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.CreateNodeGroupRequest, dict]): - The request object. A request to create a node group. - parent (str): - Required. The parent resource where this node group will - be created. Format: - ``projects/{project}/regions/{region}/clusters/{cluster}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - node_group (google.cloud.dataproc_v1.types.NodeGroup): - Required. The node group to create. - This corresponds to the ``node_group`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - node_group_id (str): - Optional. An optional node group ID. Generated if not - specified. - - The ID must contain only letters (a-z, A-Z), numbers - (0-9), underscores (_), and hyphens (-). Cannot begin or - end with underscore or hyphen. Must consist of from 3 to - 33 characters. - - This corresponds to the ``node_group_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.NodeGroup` Dataproc Node Group. - **The Dataproc NodeGroup resource is not related to - the Dataproc - [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] - resource.** - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, node_group, node_group_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a node_groups.CreateNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, node_groups.CreateNodeGroupRequest): - request = node_groups.CreateNodeGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if node_group is not None: - request.node_group = node_group - if node_group_id is not None: - request.node_group_id = node_group_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_node_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clusters.NodeGroup, - metadata_type=operations.NodeGroupOperationMetadata, - ) - - # Done; return the response. - return response - - def resize_node_group(self, - request: Optional[Union[node_groups.ResizeNodeGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - size: Optional[int] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Resizes a node group in a cluster. The returned - [Operation.metadata][google.longrunning.Operation.metadata] is - `NodeGroupOperationMetadata `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_resize_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.ResizeNodeGroupRequest( - name="name_value", - size=443, - ) - - # Make the request - operation = client.resize_node_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.ResizeNodeGroupRequest, dict]): - The request object. A request to resize a node group. - name (str): - Required. The name of the node group to resize. Format: - ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - size (int): - Required. The number of running - instances for the node group to - maintain. The group adds or removes - instances to maintain the number of - instances specified by this parameter. - - This corresponds to the ``size`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.dataproc_v1.types.NodeGroup` Dataproc Node Group. - **The Dataproc NodeGroup resource is not related to - the Dataproc - [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] - resource.** - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, size]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a node_groups.ResizeNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, node_groups.ResizeNodeGroupRequest): - request = node_groups.ResizeNodeGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if size is not None: - request.size = size - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.resize_node_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - clusters.NodeGroup, - metadata_type=operations.NodeGroupOperationMetadata, - ) - - # Done; return the response. - return response - - def get_node_group(self, - request: Optional[Union[node_groups.GetNodeGroupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> clusters.NodeGroup: - r"""Gets the resource representation for a node group in - a cluster. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_get_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.GetNodeGroupRequest( - name="name_value", - ) - - # Make the request - response = client.get_node_group(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.GetNodeGroupRequest, dict]): - The request object. A request to get a node group . - name (str): - Required. The name of the node group to retrieve. - Format: - ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.NodeGroup: - Dataproc Node Group. - **The Dataproc NodeGroup resource is not related to - the Dataproc - [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] - resource.** - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a node_groups.GetNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, node_groups.GetNodeGroupRequest): - request = node_groups.GetNodeGroupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_node_group] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "NodeGroupControllerClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "NodeGroupControllerClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/__init__.py deleted file mode 100644 index 469b5e77..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import NodeGroupControllerTransport -from .grpc import NodeGroupControllerGrpcTransport -from .grpc_asyncio import NodeGroupControllerGrpcAsyncIOTransport -from .rest import NodeGroupControllerRestTransport -from .rest import NodeGroupControllerRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[NodeGroupControllerTransport]] -_transport_registry['grpc'] = NodeGroupControllerGrpcTransport -_transport_registry['grpc_asyncio'] = NodeGroupControllerGrpcAsyncIOTransport -_transport_registry['rest'] = NodeGroupControllerRestTransport - -__all__ = ( - 'NodeGroupControllerTransport', - 'NodeGroupControllerGrpcTransport', - 'NodeGroupControllerGrpcAsyncIOTransport', - 'NodeGroupControllerRestTransport', - 'NodeGroupControllerRestInterceptor', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/base.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/base.py deleted file mode 100644 index ffc1544c..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/base.py +++ /dev/null @@ -1,253 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import node_groups -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class NodeGroupControllerTransport(abc.ABC): - """Abstract transport class for NodeGroupController.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataproc.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_node_group: gapic_v1.method.wrap_method( - self.create_node_group, - default_timeout=None, - client_info=client_info, - ), - self.resize_node_group: gapic_v1.method.wrap_method( - self.resize_node_group, - default_timeout=None, - client_info=client_info, - ), - self.get_node_group: gapic_v1.method.wrap_method( - self.get_node_group, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_node_group(self) -> Callable[ - [node_groups.CreateNodeGroupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def resize_node_group(self) -> Callable[ - [node_groups.ResizeNodeGroupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_node_group(self) -> Callable[ - [node_groups.GetNodeGroupRequest], - Union[ - clusters.NodeGroup, - Awaitable[clusters.NodeGroup] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'NodeGroupControllerTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc.py deleted file mode 100644 index 24c23a20..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc.py +++ /dev/null @@ -1,496 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import node_groups -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import NodeGroupControllerTransport, DEFAULT_CLIENT_INFO - - -class NodeGroupControllerGrpcTransport(NodeGroupControllerTransport): - """gRPC backend transport for NodeGroupController. - - The ``NodeGroupControllerService`` provides methods to manage node - groups of Compute Engine managed instances. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_node_group(self) -> Callable[ - [node_groups.CreateNodeGroupRequest], - operations_pb2.Operation]: - r"""Return a callable for the create node group method over gRPC. - - Creates a node group in a cluster. The returned - [Operation.metadata][google.longrunning.Operation.metadata] is - `NodeGroupOperationMetadata `__. - - Returns: - Callable[[~.CreateNodeGroupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_node_group' not in self._stubs: - self._stubs['create_node_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.NodeGroupController/CreateNodeGroup', - request_serializer=node_groups.CreateNodeGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_node_group'] - - @property - def resize_node_group(self) -> Callable[ - [node_groups.ResizeNodeGroupRequest], - operations_pb2.Operation]: - r"""Return a callable for the resize node group method over gRPC. - - Resizes a node group in a cluster. The returned - [Operation.metadata][google.longrunning.Operation.metadata] is - `NodeGroupOperationMetadata `__. - - Returns: - Callable[[~.ResizeNodeGroupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resize_node_group' not in self._stubs: - self._stubs['resize_node_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.NodeGroupController/ResizeNodeGroup', - request_serializer=node_groups.ResizeNodeGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['resize_node_group'] - - @property - def get_node_group(self) -> Callable[ - [node_groups.GetNodeGroupRequest], - clusters.NodeGroup]: - r"""Return a callable for the get node group method over gRPC. - - Gets the resource representation for a node group in - a cluster. - - Returns: - Callable[[~.GetNodeGroupRequest], - ~.NodeGroup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_node_group' not in self._stubs: - self._stubs['get_node_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.NodeGroupController/GetNodeGroup', - request_serializer=node_groups.GetNodeGroupRequest.serialize, - response_deserializer=clusters.NodeGroup.deserialize, - ) - return self._stubs['get_node_group'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'NodeGroupControllerGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc_asyncio.py deleted file mode 100644 index de025f4e..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/grpc_asyncio.py +++ /dev/null @@ -1,495 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import node_groups -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from .base import NodeGroupControllerTransport, DEFAULT_CLIENT_INFO -from .grpc import NodeGroupControllerGrpcTransport - - -class NodeGroupControllerGrpcAsyncIOTransport(NodeGroupControllerTransport): - """gRPC AsyncIO backend transport for NodeGroupController. - - The ``NodeGroupControllerService`` provides methods to manage node - groups of Compute Engine managed instances. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_node_group(self) -> Callable[ - [node_groups.CreateNodeGroupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create node group method over gRPC. - - Creates a node group in a cluster. The returned - [Operation.metadata][google.longrunning.Operation.metadata] is - `NodeGroupOperationMetadata `__. - - Returns: - Callable[[~.CreateNodeGroupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_node_group' not in self._stubs: - self._stubs['create_node_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.NodeGroupController/CreateNodeGroup', - request_serializer=node_groups.CreateNodeGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_node_group'] - - @property - def resize_node_group(self) -> Callable[ - [node_groups.ResizeNodeGroupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the resize node group method over gRPC. - - Resizes a node group in a cluster. The returned - [Operation.metadata][google.longrunning.Operation.metadata] is - `NodeGroupOperationMetadata `__. - - Returns: - Callable[[~.ResizeNodeGroupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resize_node_group' not in self._stubs: - self._stubs['resize_node_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.NodeGroupController/ResizeNodeGroup', - request_serializer=node_groups.ResizeNodeGroupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['resize_node_group'] - - @property - def get_node_group(self) -> Callable[ - [node_groups.GetNodeGroupRequest], - Awaitable[clusters.NodeGroup]]: - r"""Return a callable for the get node group method over gRPC. - - Gets the resource representation for a node group in - a cluster. - - Returns: - Callable[[~.GetNodeGroupRequest], - Awaitable[~.NodeGroup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_node_group' not in self._stubs: - self._stubs['get_node_group'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.NodeGroupController/GetNodeGroup', - request_serializer=node_groups.GetNodeGroupRequest.serialize, - response_deserializer=clusters.NodeGroup.deserialize, - ) - return self._stubs['get_node_group'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'NodeGroupControllerGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/rest.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/rest.py deleted file mode 100644 index 8607311e..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/node_group_controller/transports/rest.py +++ /dev/null @@ -1,1304 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import node_groups -from google.longrunning import operations_pb2 # type: ignore - -from .base import NodeGroupControllerTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class NodeGroupControllerRestInterceptor: - """Interceptor for NodeGroupController. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the NodeGroupControllerRestTransport. - - .. code-block:: python - class MyCustomNodeGroupControllerInterceptor(NodeGroupControllerRestInterceptor): - def pre_create_node_group(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_node_group(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_node_group(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_node_group(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_resize_node_group(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_resize_node_group(self, response): - logging.log(f"Received response: {response}") - return response - - transport = NodeGroupControllerRestTransport(interceptor=MyCustomNodeGroupControllerInterceptor()) - client = NodeGroupControllerClient(transport=transport) - - - """ - def pre_create_node_group(self, request: node_groups.CreateNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[node_groups.CreateNodeGroupRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_node_group - - Override in a subclass to manipulate the request or metadata - before they are sent to the NodeGroupController server. - """ - return request, metadata - - def post_create_node_group(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_node_group - - Override in a subclass to manipulate the response - after it is returned by the NodeGroupController server but before - it is returned to user code. - """ - return response - def pre_get_node_group(self, request: node_groups.GetNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[node_groups.GetNodeGroupRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_node_group - - Override in a subclass to manipulate the request or metadata - before they are sent to the NodeGroupController server. - """ - return request, metadata - - def post_get_node_group(self, response: clusters.NodeGroup) -> clusters.NodeGroup: - """Post-rpc interceptor for get_node_group - - Override in a subclass to manipulate the response - after it is returned by the NodeGroupController server but before - it is returned to user code. - """ - return response - def pre_resize_node_group(self, request: node_groups.ResizeNodeGroupRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[node_groups.ResizeNodeGroupRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for resize_node_group - - Override in a subclass to manipulate the request or metadata - before they are sent to the NodeGroupController server. - """ - return request, metadata - - def post_resize_node_group(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for resize_node_group - - Override in a subclass to manipulate the response - after it is returned by the NodeGroupController server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the NodeGroupController server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the NodeGroupController server but before - it is returned to user code. - """ - return response - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the NodeGroupController server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the NodeGroupController server but before - it is returned to user code. - """ - return response - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the NodeGroupController server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the NodeGroupController server but before - it is returned to user code. - """ - return response - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the NodeGroupController server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the NodeGroupController server but before - it is returned to user code. - """ - return response - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the NodeGroupController server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the NodeGroupController server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the NodeGroupController server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the NodeGroupController server but before - it is returned to user code. - """ - return response - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the NodeGroupController server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the NodeGroupController server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class NodeGroupControllerRestStub: - _session: AuthorizedSession - _host: str - _interceptor: NodeGroupControllerRestInterceptor - - -class NodeGroupControllerRestTransport(NodeGroupControllerTransport): - """REST backend transport for NodeGroupController. - - The ``NodeGroupControllerService`` provides methods to manage node - groups of Compute Engine managed instances. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[NodeGroupControllerRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or NodeGroupControllerRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateNodeGroup(NodeGroupControllerRestStub): - def __hash__(self): - return hash("CreateNodeGroup") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: node_groups.CreateNodeGroupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create node group method over HTTP. - - Args: - request (~.node_groups.CreateNodeGroupRequest): - The request object. A request to create a node group. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/regions/*/clusters/*}/nodeGroups', - 'body': 'node_group', - }, - ] - request, metadata = self._interceptor.pre_create_node_group(request, metadata) - pb_request = node_groups.CreateNodeGroupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_node_group(resp) - return resp - - class _GetNodeGroup(NodeGroupControllerRestStub): - def __hash__(self): - return hash("GetNodeGroup") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: node_groups.GetNodeGroupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> clusters.NodeGroup: - r"""Call the get node group method over HTTP. - - Args: - request (~.node_groups.GetNodeGroupRequest): - The request object. A request to get a node group . - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.clusters.NodeGroup: - Dataproc Node Group. **The Dataproc ``NodeGroup`` - resource is not related to the Dataproc - [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] - resource.** - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/clusters/*/nodeGroups/*}', - }, - ] - request, metadata = self._interceptor.pre_get_node_group(request, metadata) - pb_request = node_groups.GetNodeGroupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = clusters.NodeGroup() - pb_resp = clusters.NodeGroup.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_node_group(resp) - return resp - - class _ResizeNodeGroup(NodeGroupControllerRestStub): - def __hash__(self): - return hash("ResizeNodeGroup") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: node_groups.ResizeNodeGroupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the resize node group method over HTTP. - - Args: - request (~.node_groups.ResizeNodeGroupRequest): - The request object. A request to resize a node group. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/clusters/*/nodeGroups/*}:resize', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_resize_node_group(request, metadata) - pb_request = node_groups.ResizeNodeGroupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_resize_node_group(resp) - return resp - - @property - def create_node_group(self) -> Callable[ - [node_groups.CreateNodeGroupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateNodeGroup(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_node_group(self) -> Callable[ - [node_groups.GetNodeGroupRequest], - clusters.NodeGroup]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetNodeGroup(self._session, self._host, self._interceptor) # type: ignore - - @property - def resize_node_group(self) -> Callable[ - [node_groups.ResizeNodeGroupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ResizeNodeGroup(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(NodeGroupControllerRestStub): - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_iam_policy(resp) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(NodeGroupControllerRestStub): - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_set_iam_policy(resp) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(NodeGroupControllerRestStub): - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_test_iam_permissions(resp) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(NodeGroupControllerRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}:cancel', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(NodeGroupControllerRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(NodeGroupControllerRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(NodeGroupControllerRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations}', - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'NodeGroupControllerRestTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/__init__.py deleted file mode 100644 index 947e61ae..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import WorkflowTemplateServiceClient -from .async_client import WorkflowTemplateServiceAsyncClient - -__all__ = ( - 'WorkflowTemplateServiceClient', - 'WorkflowTemplateServiceAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py deleted file mode 100644 index 141ad5e7..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py +++ /dev/null @@ -1,1680 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataproc_v1.services.workflow_template_service import pagers -from google.cloud.dataproc_v1.types import workflow_templates -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import WorkflowTemplateServiceGrpcAsyncIOTransport -from .client import WorkflowTemplateServiceClient - - -class WorkflowTemplateServiceAsyncClient: - """The API interface for managing Workflow Templates in the - Dataproc API. - """ - - _client: WorkflowTemplateServiceClient - - DEFAULT_ENDPOINT = WorkflowTemplateServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = WorkflowTemplateServiceClient.DEFAULT_MTLS_ENDPOINT - - node_group_path = staticmethod(WorkflowTemplateServiceClient.node_group_path) - parse_node_group_path = staticmethod(WorkflowTemplateServiceClient.parse_node_group_path) - service_path = staticmethod(WorkflowTemplateServiceClient.service_path) - parse_service_path = staticmethod(WorkflowTemplateServiceClient.parse_service_path) - workflow_template_path = staticmethod(WorkflowTemplateServiceClient.workflow_template_path) - parse_workflow_template_path = staticmethod(WorkflowTemplateServiceClient.parse_workflow_template_path) - common_billing_account_path = staticmethod(WorkflowTemplateServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(WorkflowTemplateServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(WorkflowTemplateServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(WorkflowTemplateServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(WorkflowTemplateServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(WorkflowTemplateServiceClient.parse_common_organization_path) - common_project_path = staticmethod(WorkflowTemplateServiceClient.common_project_path) - parse_common_project_path = staticmethod(WorkflowTemplateServiceClient.parse_common_project_path) - common_location_path = staticmethod(WorkflowTemplateServiceClient.common_location_path) - parse_common_location_path = staticmethod(WorkflowTemplateServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - WorkflowTemplateServiceAsyncClient: The constructed client. - """ - return WorkflowTemplateServiceClient.from_service_account_info.__func__(WorkflowTemplateServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - WorkflowTemplateServiceAsyncClient: The constructed client. - """ - return WorkflowTemplateServiceClient.from_service_account_file.__func__(WorkflowTemplateServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return WorkflowTemplateServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> WorkflowTemplateServiceTransport: - """Returns the transport used by the client instance. - - Returns: - WorkflowTemplateServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(WorkflowTemplateServiceClient).get_transport_class, type(WorkflowTemplateServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, WorkflowTemplateServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the workflow template service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.WorkflowTemplateServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = WorkflowTemplateServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_workflow_template(self, - request: Optional[Union[workflow_templates.CreateWorkflowTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - template: Optional[workflow_templates.WorkflowTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> workflow_templates.WorkflowTemplate: - r"""Creates new workflow template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_create_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.CreateWorkflowTemplateRequest( - parent="parent_value", - template=template, - ) - - # Make the request - response = await client.create_workflow_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest, dict]]): - The request object. A request to create a workflow - template. - parent (:class:`str`): - Required. The resource name of the region or location, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates.create``, - the resource name of the region has the following - format: ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.create``, - the resource name of the location has the following - format: - ``projects/{project_id}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - template (:class:`google.cloud.dataproc_v1.types.WorkflowTemplate`): - Required. The Dataproc workflow - template to create. - - This corresponds to the ``template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.WorkflowTemplate: - A Dataproc workflow template - resource. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = workflow_templates.CreateWorkflowTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if template is not None: - request.template = template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_workflow_template(self, - request: Optional[Union[workflow_templates.GetWorkflowTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> workflow_templates.WorkflowTemplate: - r"""Retrieves the latest workflow template. - Can retrieve previously instantiated template by - specifying optional version parameter. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_get_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_workflow_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest, dict]]): - The request object. A request to fetch a workflow - template. - name (:class:`str`): - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates.get``, the - resource name of the template has the following - format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates.get``, the - resource name of the template has the following - format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.WorkflowTemplate: - A Dataproc workflow template - resource. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = workflow_templates.GetWorkflowTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def instantiate_workflow_template(self, - request: Optional[Union[workflow_templates.InstantiateWorkflowTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - parameters: Optional[MutableMapping[str, str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Instantiates a template and begins execution. - - The returned Operation can be used to track execution of - workflow by polling - [operations.get][google.longrunning.Operations.GetOperation]. - The Operation will complete when entire workflow is finished. - - The running workflow can be aborted via - [operations.cancel][google.longrunning.Operations.CancelOperation]. - This will cause any inflight jobs to be cancelled and - workflow-owned clusters to be deleted. - - The [Operation.metadata][google.longrunning.Operation.metadata] - will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, - [Operation.response][google.longrunning.Operation.response] will - be [Empty][google.protobuf.Empty]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_instantiate_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.InstantiateWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - operation = client.instantiate_workflow_template(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest, dict]]): - The request object. A request to instantiate a workflow - template. - name (:class:`str`): - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For - ``projects.regions.workflowTemplates.instantiate``, - the resource name of the template has the following - format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For - ``projects.locations.workflowTemplates.instantiate``, - the resource name of the template has the following - format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - parameters (:class:`MutableMapping[str, str]`): - Optional. Map from parameter names to - values that should be used for those - parameters. Values may not exceed 1000 - characters. - - This corresponds to the ``parameters`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, parameters]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = workflow_templates.InstantiateWorkflowTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - if parameters: - request.parameters.update(parameters) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.instantiate_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=workflow_templates.WorkflowMetadata, - ) - - # Done; return the response. - return response - - async def instantiate_inline_workflow_template(self, - request: Optional[Union[workflow_templates.InstantiateInlineWorkflowTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - template: Optional[workflow_templates.WorkflowTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation_async.AsyncOperation: - r"""Instantiates a template and begins execution. - - This method is equivalent to executing the sequence - [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], - [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], - [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. - - The returned Operation can be used to track execution of - workflow by polling - [operations.get][google.longrunning.Operations.GetOperation]. - The Operation will complete when entire workflow is finished. - - The running workflow can be aborted via - [operations.cancel][google.longrunning.Operations.CancelOperation]. - This will cause any inflight jobs to be cancelled and - workflow-owned clusters to be deleted. - - The [Operation.metadata][google.longrunning.Operation.metadata] - will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, - [Operation.response][google.longrunning.Operation.response] will - be [Empty][google.protobuf.Empty]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_instantiate_inline_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.InstantiateInlineWorkflowTemplateRequest( - parent="parent_value", - template=template, - ) - - # Make the request - operation = client.instantiate_inline_workflow_template(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest, dict]]): - The request object. A request to instantiate an inline - workflow template. - parent (:class:`str`): - Required. The resource name of the region or location, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For - ``projects.regions.workflowTemplates,instantiateinline``, - the resource name of the region has the following - format: ``projects/{project_id}/regions/{region}`` - - - For - ``projects.locations.workflowTemplates.instantiateinline``, - the resource name of the location has the following - format: - ``projects/{project_id}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - template (:class:`google.cloud.dataproc_v1.types.WorkflowTemplate`): - Required. The workflow template to - instantiate. - - This corresponds to the ``template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = workflow_templates.InstantiateInlineWorkflowTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if template is not None: - request.template = template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.instantiate_inline_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=workflow_templates.WorkflowMetadata, - ) - - # Done; return the response. - return response - - async def update_workflow_template(self, - request: Optional[Union[workflow_templates.UpdateWorkflowTemplateRequest, dict]] = None, - *, - template: Optional[workflow_templates.WorkflowTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> workflow_templates.WorkflowTemplate: - r"""Updates (replaces) workflow template. The updated - template must contain version that matches the current - server version. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_update_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.UpdateWorkflowTemplateRequest( - template=template, - ) - - # Make the request - response = await client.update_workflow_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest, dict]]): - The request object. A request to update a workflow - template. - template (:class:`google.cloud.dataproc_v1.types.WorkflowTemplate`): - Required. The updated workflow template. - - The ``template.version`` field must match the current - version. - - This corresponds to the ``template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.WorkflowTemplate: - A Dataproc workflow template - resource. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = workflow_templates.UpdateWorkflowTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if template is not None: - request.template = template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("template.name", request.template.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_workflow_templates(self, - request: Optional[Union[workflow_templates.ListWorkflowTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListWorkflowTemplatesAsyncPager: - r"""Lists workflows that match the specified filter in - the request. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_list_workflow_templates(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ListWorkflowTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workflow_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest, dict]]): - The request object. A request to list workflow templates - in a project. - parent (:class:`str`): - Required. The resource name of the region or location, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates,list``, the - resource name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.list``, - the resource name of the location has the following - format: - ``projects/{project_id}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesAsyncPager: - A response to a request to list - workflow templates in a project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = workflow_templates.ListWorkflowTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_workflow_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListWorkflowTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_workflow_template(self, - request: Optional[Union[workflow_templates.DeleteWorkflowTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a workflow template. It does not cancel - in-progress workflows. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - async def sample_delete_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_workflow_template(request=request) - - Args: - request (Optional[Union[google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest, dict]]): - The request object. A request to delete a workflow - template. - Currently started workflows will remain - running. - name (:class:`str`): - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates.delete``, - the resource name of the template has the following - format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For - ``projects.locations.workflowTemplates.instantiate``, - the resource name of the template has the following - format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = workflow_templates.DeleteWorkflowTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "WorkflowTemplateServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "WorkflowTemplateServiceAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/client.py deleted file mode 100644 index 3a8b41bc..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ /dev/null @@ -1,1852 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataproc_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.dataproc_v1.services.workflow_template_service import pagers -from google.cloud.dataproc_v1.types import workflow_templates -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import WorkflowTemplateServiceGrpcTransport -from .transports.grpc_asyncio import WorkflowTemplateServiceGrpcAsyncIOTransport -from .transports.rest import WorkflowTemplateServiceRestTransport - - -class WorkflowTemplateServiceClientMeta(type): - """Metaclass for the WorkflowTemplateService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[WorkflowTemplateServiceTransport]] - _transport_registry["grpc"] = WorkflowTemplateServiceGrpcTransport - _transport_registry["grpc_asyncio"] = WorkflowTemplateServiceGrpcAsyncIOTransport - _transport_registry["rest"] = WorkflowTemplateServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[WorkflowTemplateServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class WorkflowTemplateServiceClient(metaclass=WorkflowTemplateServiceClientMeta): - """The API interface for managing Workflow Templates in the - Dataproc API. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataproc.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - WorkflowTemplateServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - WorkflowTemplateServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> WorkflowTemplateServiceTransport: - """Returns the transport used by the client instance. - - Returns: - WorkflowTemplateServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def node_group_path(project: str,region: str,cluster: str,node_group: str,) -> str: - """Returns a fully-qualified node_group string.""" - return "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format(project=project, region=region, cluster=cluster, node_group=node_group, ) - - @staticmethod - def parse_node_group_path(path: str) -> Dict[str,str]: - """Parses a node_group path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/clusters/(?P.+?)/nodeGroups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def service_path(project: str,location: str,service: str,) -> str: - """Returns a fully-qualified service string.""" - return "projects/{project}/locations/{location}/services/{service}".format(project=project, location=location, service=service, ) - - @staticmethod - def parse_service_path(path: str) -> Dict[str,str]: - """Parses a service path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def workflow_template_path(project: str,region: str,workflow_template: str,) -> str: - """Returns a fully-qualified workflow_template string.""" - return "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}".format(project=project, region=region, workflow_template=workflow_template, ) - - @staticmethod - def parse_workflow_template_path(path: str) -> Dict[str,str]: - """Parses a workflow_template path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/workflowTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, WorkflowTemplateServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the workflow template service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, WorkflowTemplateServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, WorkflowTemplateServiceTransport): - # transport is a WorkflowTemplateServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_workflow_template(self, - request: Optional[Union[workflow_templates.CreateWorkflowTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - template: Optional[workflow_templates.WorkflowTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> workflow_templates.WorkflowTemplate: - r"""Creates new workflow template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_create_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.CreateWorkflowTemplateRequest( - parent="parent_value", - template=template, - ) - - # Make the request - response = client.create_workflow_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest, dict]): - The request object. A request to create a workflow - template. - parent (str): - Required. The resource name of the region or location, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates.create``, - the resource name of the region has the following - format: ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.create``, - the resource name of the location has the following - format: - ``projects/{project_id}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - template (google.cloud.dataproc_v1.types.WorkflowTemplate): - Required. The Dataproc workflow - template to create. - - This corresponds to the ``template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.WorkflowTemplate: - A Dataproc workflow template - resource. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a workflow_templates.CreateWorkflowTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, workflow_templates.CreateWorkflowTemplateRequest): - request = workflow_templates.CreateWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if template is not None: - request.template = template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_workflow_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_workflow_template(self, - request: Optional[Union[workflow_templates.GetWorkflowTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> workflow_templates.WorkflowTemplate: - r"""Retrieves the latest workflow template. - Can retrieve previously instantiated template by - specifying optional version parameter. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_get_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.GetWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_workflow_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest, dict]): - The request object. A request to fetch a workflow - template. - name (str): - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates.get``, the - resource name of the template has the following - format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates.get``, the - resource name of the template has the following - format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.WorkflowTemplate: - A Dataproc workflow template - resource. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a workflow_templates.GetWorkflowTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, workflow_templates.GetWorkflowTemplateRequest): - request = workflow_templates.GetWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_workflow_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def instantiate_workflow_template(self, - request: Optional[Union[workflow_templates.InstantiateWorkflowTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - parameters: Optional[MutableMapping[str, str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Instantiates a template and begins execution. - - The returned Operation can be used to track execution of - workflow by polling - [operations.get][google.longrunning.Operations.GetOperation]. - The Operation will complete when entire workflow is finished. - - The running workflow can be aborted via - [operations.cancel][google.longrunning.Operations.CancelOperation]. - This will cause any inflight jobs to be cancelled and - workflow-owned clusters to be deleted. - - The [Operation.metadata][google.longrunning.Operation.metadata] - will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, - [Operation.response][google.longrunning.Operation.response] will - be [Empty][google.protobuf.Empty]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_instantiate_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.InstantiateWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - operation = client.instantiate_workflow_template(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest, dict]): - The request object. A request to instantiate a workflow - template. - name (str): - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For - ``projects.regions.workflowTemplates.instantiate``, - the resource name of the template has the following - format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For - ``projects.locations.workflowTemplates.instantiate``, - the resource name of the template has the following - format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - parameters (MutableMapping[str, str]): - Optional. Map from parameter names to - values that should be used for those - parameters. Values may not exceed 1000 - characters. - - This corresponds to the ``parameters`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, parameters]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a workflow_templates.InstantiateWorkflowTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, workflow_templates.InstantiateWorkflowTemplateRequest): - request = workflow_templates.InstantiateWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if parameters is not None: - request.parameters = parameters - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.instantiate_workflow_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=workflow_templates.WorkflowMetadata, - ) - - # Done; return the response. - return response - - def instantiate_inline_workflow_template(self, - request: Optional[Union[workflow_templates.InstantiateInlineWorkflowTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - template: Optional[workflow_templates.WorkflowTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operation.Operation: - r"""Instantiates a template and begins execution. - - This method is equivalent to executing the sequence - [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], - [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], - [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. - - The returned Operation can be used to track execution of - workflow by polling - [operations.get][google.longrunning.Operations.GetOperation]. - The Operation will complete when entire workflow is finished. - - The running workflow can be aborted via - [operations.cancel][google.longrunning.Operations.CancelOperation]. - This will cause any inflight jobs to be cancelled and - workflow-owned clusters to be deleted. - - The [Operation.metadata][google.longrunning.Operation.metadata] - will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, - [Operation.response][google.longrunning.Operation.response] will - be [Empty][google.protobuf.Empty]. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_instantiate_inline_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.InstantiateInlineWorkflowTemplateRequest( - parent="parent_value", - template=template, - ) - - # Make the request - operation = client.instantiate_inline_workflow_template(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest, dict]): - The request object. A request to instantiate an inline - workflow template. - parent (str): - Required. The resource name of the region or location, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For - ``projects.regions.workflowTemplates,instantiateinline``, - the resource name of the region has the following - format: ``projects/{project_id}/regions/{region}`` - - - For - ``projects.locations.workflowTemplates.instantiateinline``, - the resource name of the location has the following - format: - ``projects/{project_id}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - template (google.cloud.dataproc_v1.types.WorkflowTemplate): - Required. The workflow template to - instantiate. - - This corresponds to the ``template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a workflow_templates.InstantiateInlineWorkflowTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, workflow_templates.InstantiateInlineWorkflowTemplateRequest): - request = workflow_templates.InstantiateInlineWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if template is not None: - request.template = template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.instantiate_inline_workflow_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=workflow_templates.WorkflowMetadata, - ) - - # Done; return the response. - return response - - def update_workflow_template(self, - request: Optional[Union[workflow_templates.UpdateWorkflowTemplateRequest, dict]] = None, - *, - template: Optional[workflow_templates.WorkflowTemplate] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> workflow_templates.WorkflowTemplate: - r"""Updates (replaces) workflow template. The updated - template must contain version that matches the current - server version. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_update_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.UpdateWorkflowTemplateRequest( - template=template, - ) - - # Make the request - response = client.update_workflow_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest, dict]): - The request object. A request to update a workflow - template. - template (google.cloud.dataproc_v1.types.WorkflowTemplate): - Required. The updated workflow template. - - The ``template.version`` field must match the current - version. - - This corresponds to the ``template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.types.WorkflowTemplate: - A Dataproc workflow template - resource. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a workflow_templates.UpdateWorkflowTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, workflow_templates.UpdateWorkflowTemplateRequest): - request = workflow_templates.UpdateWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if template is not None: - request.template = template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_workflow_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("template.name", request.template.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_workflow_templates(self, - request: Optional[Union[workflow_templates.ListWorkflowTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListWorkflowTemplatesPager: - r"""Lists workflows that match the specified filter in - the request. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_list_workflow_templates(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.ListWorkflowTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workflow_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest, dict]): - The request object. A request to list workflow templates - in a project. - parent (str): - Required. The resource name of the region or location, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates,list``, the - resource name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.list``, - the resource name of the location has the following - format: - ``projects/{project_id}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesPager: - A response to a request to list - workflow templates in a project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a workflow_templates.ListWorkflowTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, workflow_templates.ListWorkflowTemplatesRequest): - request = workflow_templates.ListWorkflowTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_workflow_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListWorkflowTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_workflow_template(self, - request: Optional[Union[workflow_templates.DeleteWorkflowTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a workflow template. It does not cancel - in-progress workflows. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataproc_v1 - - def sample_delete_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_workflow_template(request=request) - - Args: - request (Union[google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest, dict]): - The request object. A request to delete a workflow - template. - Currently started workflows will remain - running. - name (str): - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates.delete``, - the resource name of the template has the following - format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For - ``projects.locations.workflowTemplates.instantiate``, - the resource name of the template has the following - format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a workflow_templates.DeleteWorkflowTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, workflow_templates.DeleteWorkflowTemplateRequest): - request = workflow_templates.DeleteWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_workflow_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "WorkflowTemplateServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "WorkflowTemplateServiceClient", -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py deleted file mode 100644 index 0c495b7a..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.dataproc_v1.types import workflow_templates - - -class ListWorkflowTemplatesPager: - """A pager for iterating through ``list_workflow_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListWorkflowTemplates`` requests and continue to iterate - through the ``templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., workflow_templates.ListWorkflowTemplatesResponse], - request: workflow_templates.ListWorkflowTemplatesRequest, - response: workflow_templates.ListWorkflowTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest): - The initial request object. - response (google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = workflow_templates.ListWorkflowTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[workflow_templates.ListWorkflowTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[workflow_templates.WorkflowTemplate]: - for page in self.pages: - yield from page.templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListWorkflowTemplatesAsyncPager: - """A pager for iterating through ``list_workflow_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListWorkflowTemplates`` requests and continue to iterate - through the ``templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[workflow_templates.ListWorkflowTemplatesResponse]], - request: workflow_templates.ListWorkflowTemplatesRequest, - response: workflow_templates.ListWorkflowTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest): - The initial request object. - response (google.cloud.dataproc_v1.types.ListWorkflowTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = workflow_templates.ListWorkflowTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[workflow_templates.ListWorkflowTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[workflow_templates.WorkflowTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/__init__.py deleted file mode 100644 index 7b92bbd4..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import WorkflowTemplateServiceTransport -from .grpc import WorkflowTemplateServiceGrpcTransport -from .grpc_asyncio import WorkflowTemplateServiceGrpcAsyncIOTransport -from .rest import WorkflowTemplateServiceRestTransport -from .rest import WorkflowTemplateServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[WorkflowTemplateServiceTransport]] -_transport_registry['grpc'] = WorkflowTemplateServiceGrpcTransport -_transport_registry['grpc_asyncio'] = WorkflowTemplateServiceGrpcAsyncIOTransport -_transport_registry['rest'] = WorkflowTemplateServiceRestTransport - -__all__ = ( - 'WorkflowTemplateServiceTransport', - 'WorkflowTemplateServiceGrpcTransport', - 'WorkflowTemplateServiceGrpcAsyncIOTransport', - 'WorkflowTemplateServiceRestTransport', - 'WorkflowTemplateServiceRestInterceptor', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py deleted file mode 100644 index 90509104..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py +++ /dev/null @@ -1,355 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataproc_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataproc_v1.types import workflow_templates -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class WorkflowTemplateServiceTransport(abc.ABC): - """Abstract transport class for WorkflowTemplateService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dataproc.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_workflow_template: gapic_v1.method.wrap_method( - self.create_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.get_workflow_template: gapic_v1.method.wrap_method( - self.get_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.instantiate_workflow_template: gapic_v1.method.wrap_method( - self.instantiate_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.instantiate_inline_workflow_template: gapic_v1.method.wrap_method( - self.instantiate_inline_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.update_workflow_template: gapic_v1.method.wrap_method( - self.update_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.list_workflow_templates: gapic_v1.method.wrap_method( - self.list_workflow_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - self.delete_workflow_template: gapic_v1.method.wrap_method( - self.delete_workflow_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=600.0, - ), - default_timeout=600.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_workflow_template(self) -> Callable[ - [workflow_templates.CreateWorkflowTemplateRequest], - Union[ - workflow_templates.WorkflowTemplate, - Awaitable[workflow_templates.WorkflowTemplate] - ]]: - raise NotImplementedError() - - @property - def get_workflow_template(self) -> Callable[ - [workflow_templates.GetWorkflowTemplateRequest], - Union[ - workflow_templates.WorkflowTemplate, - Awaitable[workflow_templates.WorkflowTemplate] - ]]: - raise NotImplementedError() - - @property - def instantiate_workflow_template(self) -> Callable[ - [workflow_templates.InstantiateWorkflowTemplateRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def instantiate_inline_workflow_template(self) -> Callable[ - [workflow_templates.InstantiateInlineWorkflowTemplateRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_workflow_template(self) -> Callable[ - [workflow_templates.UpdateWorkflowTemplateRequest], - Union[ - workflow_templates.WorkflowTemplate, - Awaitable[workflow_templates.WorkflowTemplate] - ]]: - raise NotImplementedError() - - @property - def list_workflow_templates(self) -> Callable[ - [workflow_templates.ListWorkflowTemplatesRequest], - Union[ - workflow_templates.ListWorkflowTemplatesResponse, - Awaitable[workflow_templates.ListWorkflowTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_workflow_template(self) -> Callable[ - [workflow_templates.DeleteWorkflowTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'WorkflowTemplateServiceTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py deleted file mode 100644 index c7f94706..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py +++ /dev/null @@ -1,647 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataproc_v1.types import workflow_templates -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO - - -class WorkflowTemplateServiceGrpcTransport(WorkflowTemplateServiceTransport): - """gRPC backend transport for WorkflowTemplateService. - - The API interface for managing Workflow Templates in the - Dataproc API. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_workflow_template(self) -> Callable[ - [workflow_templates.CreateWorkflowTemplateRequest], - workflow_templates.WorkflowTemplate]: - r"""Return a callable for the create workflow template method over gRPC. - - Creates new workflow template. - - Returns: - Callable[[~.CreateWorkflowTemplateRequest], - ~.WorkflowTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_workflow_template' not in self._stubs: - self._stubs['create_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/CreateWorkflowTemplate', - request_serializer=workflow_templates.CreateWorkflowTemplateRequest.serialize, - response_deserializer=workflow_templates.WorkflowTemplate.deserialize, - ) - return self._stubs['create_workflow_template'] - - @property - def get_workflow_template(self) -> Callable[ - [workflow_templates.GetWorkflowTemplateRequest], - workflow_templates.WorkflowTemplate]: - r"""Return a callable for the get workflow template method over gRPC. - - Retrieves the latest workflow template. - Can retrieve previously instantiated template by - specifying optional version parameter. - - Returns: - Callable[[~.GetWorkflowTemplateRequest], - ~.WorkflowTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_workflow_template' not in self._stubs: - self._stubs['get_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/GetWorkflowTemplate', - request_serializer=workflow_templates.GetWorkflowTemplateRequest.serialize, - response_deserializer=workflow_templates.WorkflowTemplate.deserialize, - ) - return self._stubs['get_workflow_template'] - - @property - def instantiate_workflow_template(self) -> Callable[ - [workflow_templates.InstantiateWorkflowTemplateRequest], - operations_pb2.Operation]: - r"""Return a callable for the instantiate workflow template method over gRPC. - - Instantiates a template and begins execution. - - The returned Operation can be used to track execution of - workflow by polling - [operations.get][google.longrunning.Operations.GetOperation]. - The Operation will complete when entire workflow is finished. - - The running workflow can be aborted via - [operations.cancel][google.longrunning.Operations.CancelOperation]. - This will cause any inflight jobs to be cancelled and - workflow-owned clusters to be deleted. - - The [Operation.metadata][google.longrunning.Operation.metadata] - will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, - [Operation.response][google.longrunning.Operation.response] will - be [Empty][google.protobuf.Empty]. - - Returns: - Callable[[~.InstantiateWorkflowTemplateRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'instantiate_workflow_template' not in self._stubs: - self._stubs['instantiate_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateWorkflowTemplate', - request_serializer=workflow_templates.InstantiateWorkflowTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['instantiate_workflow_template'] - - @property - def instantiate_inline_workflow_template(self) -> Callable[ - [workflow_templates.InstantiateInlineWorkflowTemplateRequest], - operations_pb2.Operation]: - r"""Return a callable for the instantiate inline workflow - template method over gRPC. - - Instantiates a template and begins execution. - - This method is equivalent to executing the sequence - [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], - [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], - [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. - - The returned Operation can be used to track execution of - workflow by polling - [operations.get][google.longrunning.Operations.GetOperation]. - The Operation will complete when entire workflow is finished. - - The running workflow can be aborted via - [operations.cancel][google.longrunning.Operations.CancelOperation]. - This will cause any inflight jobs to be cancelled and - workflow-owned clusters to be deleted. - - The [Operation.metadata][google.longrunning.Operation.metadata] - will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, - [Operation.response][google.longrunning.Operation.response] will - be [Empty][google.protobuf.Empty]. - - Returns: - Callable[[~.InstantiateInlineWorkflowTemplateRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'instantiate_inline_workflow_template' not in self._stubs: - self._stubs['instantiate_inline_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateInlineWorkflowTemplate', - request_serializer=workflow_templates.InstantiateInlineWorkflowTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['instantiate_inline_workflow_template'] - - @property - def update_workflow_template(self) -> Callable[ - [workflow_templates.UpdateWorkflowTemplateRequest], - workflow_templates.WorkflowTemplate]: - r"""Return a callable for the update workflow template method over gRPC. - - Updates (replaces) workflow template. The updated - template must contain version that matches the current - server version. - - Returns: - Callable[[~.UpdateWorkflowTemplateRequest], - ~.WorkflowTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_workflow_template' not in self._stubs: - self._stubs['update_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/UpdateWorkflowTemplate', - request_serializer=workflow_templates.UpdateWorkflowTemplateRequest.serialize, - response_deserializer=workflow_templates.WorkflowTemplate.deserialize, - ) - return self._stubs['update_workflow_template'] - - @property - def list_workflow_templates(self) -> Callable[ - [workflow_templates.ListWorkflowTemplatesRequest], - workflow_templates.ListWorkflowTemplatesResponse]: - r"""Return a callable for the list workflow templates method over gRPC. - - Lists workflows that match the specified filter in - the request. - - Returns: - Callable[[~.ListWorkflowTemplatesRequest], - ~.ListWorkflowTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_workflow_templates' not in self._stubs: - self._stubs['list_workflow_templates'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/ListWorkflowTemplates', - request_serializer=workflow_templates.ListWorkflowTemplatesRequest.serialize, - response_deserializer=workflow_templates.ListWorkflowTemplatesResponse.deserialize, - ) - return self._stubs['list_workflow_templates'] - - @property - def delete_workflow_template(self) -> Callable[ - [workflow_templates.DeleteWorkflowTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete workflow template method over gRPC. - - Deletes a workflow template. It does not cancel - in-progress workflows. - - Returns: - Callable[[~.DeleteWorkflowTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_workflow_template' not in self._stubs: - self._stubs['delete_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/DeleteWorkflowTemplate', - request_serializer=workflow_templates.DeleteWorkflowTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_workflow_template'] - - def close(self): - self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'WorkflowTemplateServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py deleted file mode 100644 index 6d7e7ed0..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,646 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataproc_v1.types import workflow_templates -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import WorkflowTemplateServiceGrpcTransport - - -class WorkflowTemplateServiceGrpcAsyncIOTransport(WorkflowTemplateServiceTransport): - """gRPC AsyncIO backend transport for WorkflowTemplateService. - - The API interface for managing Workflow Templates in the - Dataproc API. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_workflow_template(self) -> Callable[ - [workflow_templates.CreateWorkflowTemplateRequest], - Awaitable[workflow_templates.WorkflowTemplate]]: - r"""Return a callable for the create workflow template method over gRPC. - - Creates new workflow template. - - Returns: - Callable[[~.CreateWorkflowTemplateRequest], - Awaitable[~.WorkflowTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_workflow_template' not in self._stubs: - self._stubs['create_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/CreateWorkflowTemplate', - request_serializer=workflow_templates.CreateWorkflowTemplateRequest.serialize, - response_deserializer=workflow_templates.WorkflowTemplate.deserialize, - ) - return self._stubs['create_workflow_template'] - - @property - def get_workflow_template(self) -> Callable[ - [workflow_templates.GetWorkflowTemplateRequest], - Awaitable[workflow_templates.WorkflowTemplate]]: - r"""Return a callable for the get workflow template method over gRPC. - - Retrieves the latest workflow template. - Can retrieve previously instantiated template by - specifying optional version parameter. - - Returns: - Callable[[~.GetWorkflowTemplateRequest], - Awaitable[~.WorkflowTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_workflow_template' not in self._stubs: - self._stubs['get_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/GetWorkflowTemplate', - request_serializer=workflow_templates.GetWorkflowTemplateRequest.serialize, - response_deserializer=workflow_templates.WorkflowTemplate.deserialize, - ) - return self._stubs['get_workflow_template'] - - @property - def instantiate_workflow_template(self) -> Callable[ - [workflow_templates.InstantiateWorkflowTemplateRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the instantiate workflow template method over gRPC. - - Instantiates a template and begins execution. - - The returned Operation can be used to track execution of - workflow by polling - [operations.get][google.longrunning.Operations.GetOperation]. - The Operation will complete when entire workflow is finished. - - The running workflow can be aborted via - [operations.cancel][google.longrunning.Operations.CancelOperation]. - This will cause any inflight jobs to be cancelled and - workflow-owned clusters to be deleted. - - The [Operation.metadata][google.longrunning.Operation.metadata] - will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, - [Operation.response][google.longrunning.Operation.response] will - be [Empty][google.protobuf.Empty]. - - Returns: - Callable[[~.InstantiateWorkflowTemplateRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'instantiate_workflow_template' not in self._stubs: - self._stubs['instantiate_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateWorkflowTemplate', - request_serializer=workflow_templates.InstantiateWorkflowTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['instantiate_workflow_template'] - - @property - def instantiate_inline_workflow_template(self) -> Callable[ - [workflow_templates.InstantiateInlineWorkflowTemplateRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the instantiate inline workflow - template method over gRPC. - - Instantiates a template and begins execution. - - This method is equivalent to executing the sequence - [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], - [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], - [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. - - The returned Operation can be used to track execution of - workflow by polling - [operations.get][google.longrunning.Operations.GetOperation]. - The Operation will complete when entire workflow is finished. - - The running workflow can be aborted via - [operations.cancel][google.longrunning.Operations.CancelOperation]. - This will cause any inflight jobs to be cancelled and - workflow-owned clusters to be deleted. - - The [Operation.metadata][google.longrunning.Operation.metadata] - will be - `WorkflowMetadata `__. - Also see `Using - WorkflowMetadata `__. - - On successful completion, - [Operation.response][google.longrunning.Operation.response] will - be [Empty][google.protobuf.Empty]. - - Returns: - Callable[[~.InstantiateInlineWorkflowTemplateRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'instantiate_inline_workflow_template' not in self._stubs: - self._stubs['instantiate_inline_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateInlineWorkflowTemplate', - request_serializer=workflow_templates.InstantiateInlineWorkflowTemplateRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['instantiate_inline_workflow_template'] - - @property - def update_workflow_template(self) -> Callable[ - [workflow_templates.UpdateWorkflowTemplateRequest], - Awaitable[workflow_templates.WorkflowTemplate]]: - r"""Return a callable for the update workflow template method over gRPC. - - Updates (replaces) workflow template. The updated - template must contain version that matches the current - server version. - - Returns: - Callable[[~.UpdateWorkflowTemplateRequest], - Awaitable[~.WorkflowTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_workflow_template' not in self._stubs: - self._stubs['update_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/UpdateWorkflowTemplate', - request_serializer=workflow_templates.UpdateWorkflowTemplateRequest.serialize, - response_deserializer=workflow_templates.WorkflowTemplate.deserialize, - ) - return self._stubs['update_workflow_template'] - - @property - def list_workflow_templates(self) -> Callable[ - [workflow_templates.ListWorkflowTemplatesRequest], - Awaitable[workflow_templates.ListWorkflowTemplatesResponse]]: - r"""Return a callable for the list workflow templates method over gRPC. - - Lists workflows that match the specified filter in - the request. - - Returns: - Callable[[~.ListWorkflowTemplatesRequest], - Awaitable[~.ListWorkflowTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_workflow_templates' not in self._stubs: - self._stubs['list_workflow_templates'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/ListWorkflowTemplates', - request_serializer=workflow_templates.ListWorkflowTemplatesRequest.serialize, - response_deserializer=workflow_templates.ListWorkflowTemplatesResponse.deserialize, - ) - return self._stubs['list_workflow_templates'] - - @property - def delete_workflow_template(self) -> Callable[ - [workflow_templates.DeleteWorkflowTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete workflow template method over gRPC. - - Deletes a workflow template. It does not cancel - in-progress workflows. - - Returns: - Callable[[~.DeleteWorkflowTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_workflow_template' not in self._stubs: - self._stubs['delete_workflow_template'] = self.grpc_channel.unary_unary( - '/google.cloud.dataproc.v1.WorkflowTemplateService/DeleteWorkflowTemplate', - request_serializer=workflow_templates.DeleteWorkflowTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_workflow_template'] - - def close(self): - return self.grpc_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'WorkflowTemplateServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/rest.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/rest.py deleted file mode 100644 index 2020a048..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/services/workflow_template_service/transports/rest.py +++ /dev/null @@ -1,1785 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataproc_v1.types import workflow_templates -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -from .base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class WorkflowTemplateServiceRestInterceptor: - """Interceptor for WorkflowTemplateService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the WorkflowTemplateServiceRestTransport. - - .. code-block:: python - class MyCustomWorkflowTemplateServiceInterceptor(WorkflowTemplateServiceRestInterceptor): - def pre_create_workflow_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_workflow_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_workflow_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_workflow_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_workflow_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_instantiate_inline_workflow_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_instantiate_inline_workflow_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_instantiate_workflow_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_instantiate_workflow_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_workflow_templates(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_workflow_templates(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_workflow_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_workflow_template(self, response): - logging.log(f"Received response: {response}") - return response - - transport = WorkflowTemplateServiceRestTransport(interceptor=MyCustomWorkflowTemplateServiceInterceptor()) - client = WorkflowTemplateServiceClient(transport=transport) - - - """ - def pre_create_workflow_template(self, request: workflow_templates.CreateWorkflowTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[workflow_templates.CreateWorkflowTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_workflow_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_create_workflow_template(self, response: workflow_templates.WorkflowTemplate) -> workflow_templates.WorkflowTemplate: - """Post-rpc interceptor for create_workflow_template - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - def pre_delete_workflow_template(self, request: workflow_templates.DeleteWorkflowTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[workflow_templates.DeleteWorkflowTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_workflow_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def pre_get_workflow_template(self, request: workflow_templates.GetWorkflowTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[workflow_templates.GetWorkflowTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_workflow_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_get_workflow_template(self, response: workflow_templates.WorkflowTemplate) -> workflow_templates.WorkflowTemplate: - """Post-rpc interceptor for get_workflow_template - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - def pre_instantiate_inline_workflow_template(self, request: workflow_templates.InstantiateInlineWorkflowTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[workflow_templates.InstantiateInlineWorkflowTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for instantiate_inline_workflow_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_instantiate_inline_workflow_template(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for instantiate_inline_workflow_template - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - def pre_instantiate_workflow_template(self, request: workflow_templates.InstantiateWorkflowTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[workflow_templates.InstantiateWorkflowTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for instantiate_workflow_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_instantiate_workflow_template(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for instantiate_workflow_template - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - def pre_list_workflow_templates(self, request: workflow_templates.ListWorkflowTemplatesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[workflow_templates.ListWorkflowTemplatesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_workflow_templates - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_list_workflow_templates(self, response: workflow_templates.ListWorkflowTemplatesResponse) -> workflow_templates.ListWorkflowTemplatesResponse: - """Post-rpc interceptor for list_workflow_templates - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - def pre_update_workflow_template(self, request: workflow_templates.UpdateWorkflowTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[workflow_templates.UpdateWorkflowTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_workflow_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_update_workflow_template(self, response: workflow_templates.WorkflowTemplate) -> workflow_templates.WorkflowTemplate: - """Post-rpc interceptor for update_workflow_template - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the WorkflowTemplateService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the WorkflowTemplateService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class WorkflowTemplateServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: WorkflowTemplateServiceRestInterceptor - - -class WorkflowTemplateServiceRestTransport(WorkflowTemplateServiceTransport): - """REST backend transport for WorkflowTemplateService. - - The API interface for managing Workflow Templates in the - Dataproc API. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataproc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[WorkflowTemplateServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or WorkflowTemplateServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}:cancel', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateWorkflowTemplate(WorkflowTemplateServiceRestStub): - def __hash__(self): - return hash("CreateWorkflowTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: workflow_templates.CreateWorkflowTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> workflow_templates.WorkflowTemplate: - r"""Call the create workflow template method over HTTP. - - Args: - request (~.workflow_templates.CreateWorkflowTemplateRequest): - The request object. A request to create a workflow - template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.workflow_templates.WorkflowTemplate: - A Dataproc workflow template - resource. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/workflowTemplates', - 'body': 'template', - }, -{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/regions/*}/workflowTemplates', - 'body': 'template', - }, - ] - request, metadata = self._interceptor.pre_create_workflow_template(request, metadata) - pb_request = workflow_templates.CreateWorkflowTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = workflow_templates.WorkflowTemplate() - pb_resp = workflow_templates.WorkflowTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_workflow_template(resp) - return resp - - class _DeleteWorkflowTemplate(WorkflowTemplateServiceRestStub): - def __hash__(self): - return hash("DeleteWorkflowTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: workflow_templates.DeleteWorkflowTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete workflow template method over HTTP. - - Args: - request (~.workflow_templates.DeleteWorkflowTemplateRequest): - The request object. A request to delete a workflow - template. - Currently started workflows will remain - running. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/workflowTemplates/*}', - }, -{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/workflowTemplates/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_workflow_template(request, metadata) - pb_request = workflow_templates.DeleteWorkflowTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetWorkflowTemplate(WorkflowTemplateServiceRestStub): - def __hash__(self): - return hash("GetWorkflowTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: workflow_templates.GetWorkflowTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> workflow_templates.WorkflowTemplate: - r"""Call the get workflow template method over HTTP. - - Args: - request (~.workflow_templates.GetWorkflowTemplateRequest): - The request object. A request to fetch a workflow - template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.workflow_templates.WorkflowTemplate: - A Dataproc workflow template - resource. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/workflowTemplates/*}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/workflowTemplates/*}', - }, - ] - request, metadata = self._interceptor.pre_get_workflow_template(request, metadata) - pb_request = workflow_templates.GetWorkflowTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = workflow_templates.WorkflowTemplate() - pb_resp = workflow_templates.WorkflowTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_workflow_template(resp) - return resp - - class _InstantiateInlineWorkflowTemplate(WorkflowTemplateServiceRestStub): - def __hash__(self): - return hash("InstantiateInlineWorkflowTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: workflow_templates.InstantiateInlineWorkflowTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the instantiate inline - workflow template method over HTTP. - - Args: - request (~.workflow_templates.InstantiateInlineWorkflowTemplateRequest): - The request object. A request to instantiate an inline - workflow template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline', - 'body': 'template', - }, -{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline', - 'body': 'template', - }, - ] - request, metadata = self._interceptor.pre_instantiate_inline_workflow_template(request, metadata) - pb_request = workflow_templates.InstantiateInlineWorkflowTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_instantiate_inline_workflow_template(resp) - return resp - - class _InstantiateWorkflowTemplate(WorkflowTemplateServiceRestStub): - def __hash__(self): - return hash("InstantiateWorkflowTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: workflow_templates.InstantiateWorkflowTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the instantiate workflow - template method over HTTP. - - Args: - request (~.workflow_templates.InstantiateWorkflowTemplateRequest): - The request object. A request to instantiate a workflow - template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_instantiate_workflow_template(request, metadata) - pb_request = workflow_templates.InstantiateWorkflowTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_instantiate_workflow_template(resp) - return resp - - class _ListWorkflowTemplates(WorkflowTemplateServiceRestStub): - def __hash__(self): - return hash("ListWorkflowTemplates") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: workflow_templates.ListWorkflowTemplatesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> workflow_templates.ListWorkflowTemplatesResponse: - r"""Call the list workflow templates method over HTTP. - - Args: - request (~.workflow_templates.ListWorkflowTemplatesRequest): - The request object. A request to list workflow templates - in a project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.workflow_templates.ListWorkflowTemplatesResponse: - A response to a request to list - workflow templates in a project. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/workflowTemplates', - }, -{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/regions/*}/workflowTemplates', - }, - ] - request, metadata = self._interceptor.pre_list_workflow_templates(request, metadata) - pb_request = workflow_templates.ListWorkflowTemplatesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = workflow_templates.ListWorkflowTemplatesResponse() - pb_resp = workflow_templates.ListWorkflowTemplatesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_workflow_templates(resp) - return resp - - class _UpdateWorkflowTemplate(WorkflowTemplateServiceRestStub): - def __hash__(self): - return hash("UpdateWorkflowTemplate") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: workflow_templates.UpdateWorkflowTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> workflow_templates.WorkflowTemplate: - r"""Call the update workflow template method over HTTP. - - Args: - request (~.workflow_templates.UpdateWorkflowTemplateRequest): - The request object. A request to update a workflow - template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.workflow_templates.WorkflowTemplate: - A Dataproc workflow template - resource. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'put', - 'uri': '/v1/{template.name=projects/*/locations/*/workflowTemplates/*}', - 'body': 'template', - }, -{ - 'method': 'put', - 'uri': '/v1/{template.name=projects/*/regions/*/workflowTemplates/*}', - 'body': 'template', - }, - ] - request, metadata = self._interceptor.pre_update_workflow_template(request, metadata) - pb_request = workflow_templates.UpdateWorkflowTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = workflow_templates.WorkflowTemplate() - pb_resp = workflow_templates.WorkflowTemplate.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_workflow_template(resp) - return resp - - @property - def create_workflow_template(self) -> Callable[ - [workflow_templates.CreateWorkflowTemplateRequest], - workflow_templates.WorkflowTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_workflow_template(self) -> Callable[ - [workflow_templates.DeleteWorkflowTemplateRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_workflow_template(self) -> Callable[ - [workflow_templates.GetWorkflowTemplateRequest], - workflow_templates.WorkflowTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def instantiate_inline_workflow_template(self) -> Callable[ - [workflow_templates.InstantiateInlineWorkflowTemplateRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._InstantiateInlineWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def instantiate_workflow_template(self) -> Callable[ - [workflow_templates.InstantiateWorkflowTemplateRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._InstantiateWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_workflow_templates(self) -> Callable[ - [workflow_templates.ListWorkflowTemplatesRequest], - workflow_templates.ListWorkflowTemplatesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListWorkflowTemplates(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_workflow_template(self) -> Callable[ - [workflow_templates.UpdateWorkflowTemplateRequest], - workflow_templates.WorkflowTemplate]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateWorkflowTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(WorkflowTemplateServiceRestStub): - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_iam_policy(resp) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(WorkflowTemplateServiceRestStub): - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_set_iam_policy(resp) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(WorkflowTemplateServiceRestStub): - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions', - 'body': '*', - }, - ] - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_test_iam_permissions(resp) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(WorkflowTemplateServiceRestStub): - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}:cancel', - }, -{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(WorkflowTemplateServiceRestStub): - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(WorkflowTemplateServiceRestStub): - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations/*}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(WorkflowTemplateServiceRestStub): - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/regions/*/operations}', - }, -{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations}', - }, - ] - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_list_operations(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'WorkflowTemplateServiceRestTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/types/__init__.py deleted file mode 100644 index 0cbed877..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/__init__.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .autoscaling_policies import ( - AutoscalingPolicy, - BasicAutoscalingAlgorithm, - BasicYarnAutoscalingConfig, - CreateAutoscalingPolicyRequest, - DeleteAutoscalingPolicyRequest, - GetAutoscalingPolicyRequest, - InstanceGroupAutoscalingPolicyConfig, - ListAutoscalingPoliciesRequest, - ListAutoscalingPoliciesResponse, - UpdateAutoscalingPolicyRequest, -) -from .batches import ( - Batch, - CreateBatchRequest, - DeleteBatchRequest, - GetBatchRequest, - ListBatchesRequest, - ListBatchesResponse, - PySparkBatch, - SparkBatch, - SparkRBatch, - SparkSqlBatch, -) -from .clusters import ( - AcceleratorConfig, - AutoscalingConfig, - AuxiliaryNodeGroup, - AuxiliaryServicesConfig, - Cluster, - ClusterConfig, - ClusterMetrics, - ClusterStatus, - ConfidentialInstanceConfig, - CreateClusterRequest, - DataprocMetricConfig, - DeleteClusterRequest, - DiagnoseClusterRequest, - DiagnoseClusterResults, - DiskConfig, - EncryptionConfig, - EndpointConfig, - GceClusterConfig, - GetClusterRequest, - IdentityConfig, - InstanceGroupConfig, - KerberosConfig, - LifecycleConfig, - ListClustersRequest, - ListClustersResponse, - ManagedGroupConfig, - MetastoreConfig, - NodeGroup, - NodeGroupAffinity, - NodeInitializationAction, - ReservationAffinity, - SecurityConfig, - ShieldedInstanceConfig, - SoftwareConfig, - StartClusterRequest, - StopClusterRequest, - UpdateClusterRequest, - VirtualClusterConfig, -) -from .jobs import ( - CancelJobRequest, - DeleteJobRequest, - DriverSchedulingConfig, - GetJobRequest, - HadoopJob, - HiveJob, - Job, - JobMetadata, - JobPlacement, - JobReference, - JobScheduling, - JobStatus, - ListJobsRequest, - ListJobsResponse, - LoggingConfig, - PigJob, - PrestoJob, - PySparkJob, - QueryList, - SparkJob, - SparkRJob, - SparkSqlJob, - SubmitJobRequest, - TrinoJob, - UpdateJobRequest, - YarnApplication, -) -from .node_groups import ( - CreateNodeGroupRequest, - GetNodeGroupRequest, - ResizeNodeGroupRequest, -) -from .operations import ( - BatchOperationMetadata, - ClusterOperationMetadata, - ClusterOperationStatus, - NodeGroupOperationMetadata, -) -from .shared import ( - EnvironmentConfig, - ExecutionConfig, - GkeClusterConfig, - GkeNodePoolConfig, - GkeNodePoolTarget, - KubernetesClusterConfig, - KubernetesSoftwareConfig, - PeripheralsConfig, - RuntimeConfig, - RuntimeInfo, - SparkHistoryServerConfig, - UsageMetrics, - UsageSnapshot, - Component, - FailureAction, -) -from .workflow_templates import ( - ClusterOperation, - ClusterSelector, - CreateWorkflowTemplateRequest, - DeleteWorkflowTemplateRequest, - GetWorkflowTemplateRequest, - InstantiateInlineWorkflowTemplateRequest, - InstantiateWorkflowTemplateRequest, - ListWorkflowTemplatesRequest, - ListWorkflowTemplatesResponse, - ManagedCluster, - OrderedJob, - ParameterValidation, - RegexValidation, - TemplateParameter, - UpdateWorkflowTemplateRequest, - ValueValidation, - WorkflowGraph, - WorkflowMetadata, - WorkflowNode, - WorkflowTemplate, - WorkflowTemplatePlacement, -) - -__all__ = ( - 'AutoscalingPolicy', - 'BasicAutoscalingAlgorithm', - 'BasicYarnAutoscalingConfig', - 'CreateAutoscalingPolicyRequest', - 'DeleteAutoscalingPolicyRequest', - 'GetAutoscalingPolicyRequest', - 'InstanceGroupAutoscalingPolicyConfig', - 'ListAutoscalingPoliciesRequest', - 'ListAutoscalingPoliciesResponse', - 'UpdateAutoscalingPolicyRequest', - 'Batch', - 'CreateBatchRequest', - 'DeleteBatchRequest', - 'GetBatchRequest', - 'ListBatchesRequest', - 'ListBatchesResponse', - 'PySparkBatch', - 'SparkBatch', - 'SparkRBatch', - 'SparkSqlBatch', - 'AcceleratorConfig', - 'AutoscalingConfig', - 'AuxiliaryNodeGroup', - 'AuxiliaryServicesConfig', - 'Cluster', - 'ClusterConfig', - 'ClusterMetrics', - 'ClusterStatus', - 'ConfidentialInstanceConfig', - 'CreateClusterRequest', - 'DataprocMetricConfig', - 'DeleteClusterRequest', - 'DiagnoseClusterRequest', - 'DiagnoseClusterResults', - 'DiskConfig', - 'EncryptionConfig', - 'EndpointConfig', - 'GceClusterConfig', - 'GetClusterRequest', - 'IdentityConfig', - 'InstanceGroupConfig', - 'KerberosConfig', - 'LifecycleConfig', - 'ListClustersRequest', - 'ListClustersResponse', - 'ManagedGroupConfig', - 'MetastoreConfig', - 'NodeGroup', - 'NodeGroupAffinity', - 'NodeInitializationAction', - 'ReservationAffinity', - 'SecurityConfig', - 'ShieldedInstanceConfig', - 'SoftwareConfig', - 'StartClusterRequest', - 'StopClusterRequest', - 'UpdateClusterRequest', - 'VirtualClusterConfig', - 'CancelJobRequest', - 'DeleteJobRequest', - 'DriverSchedulingConfig', - 'GetJobRequest', - 'HadoopJob', - 'HiveJob', - 'Job', - 'JobMetadata', - 'JobPlacement', - 'JobReference', - 'JobScheduling', - 'JobStatus', - 'ListJobsRequest', - 'ListJobsResponse', - 'LoggingConfig', - 'PigJob', - 'PrestoJob', - 'PySparkJob', - 'QueryList', - 'SparkJob', - 'SparkRJob', - 'SparkSqlJob', - 'SubmitJobRequest', - 'TrinoJob', - 'UpdateJobRequest', - 'YarnApplication', - 'CreateNodeGroupRequest', - 'GetNodeGroupRequest', - 'ResizeNodeGroupRequest', - 'BatchOperationMetadata', - 'ClusterOperationMetadata', - 'ClusterOperationStatus', - 'NodeGroupOperationMetadata', - 'EnvironmentConfig', - 'ExecutionConfig', - 'GkeClusterConfig', - 'GkeNodePoolConfig', - 'GkeNodePoolTarget', - 'KubernetesClusterConfig', - 'KubernetesSoftwareConfig', - 'PeripheralsConfig', - 'RuntimeConfig', - 'RuntimeInfo', - 'SparkHistoryServerConfig', - 'UsageMetrics', - 'UsageSnapshot', - 'Component', - 'FailureAction', - 'ClusterOperation', - 'ClusterSelector', - 'CreateWorkflowTemplateRequest', - 'DeleteWorkflowTemplateRequest', - 'GetWorkflowTemplateRequest', - 'InstantiateInlineWorkflowTemplateRequest', - 'InstantiateWorkflowTemplateRequest', - 'ListWorkflowTemplatesRequest', - 'ListWorkflowTemplatesResponse', - 'ManagedCluster', - 'OrderedJob', - 'ParameterValidation', - 'RegexValidation', - 'TemplateParameter', - 'UpdateWorkflowTemplateRequest', - 'ValueValidation', - 'WorkflowGraph', - 'WorkflowMetadata', - 'WorkflowNode', - 'WorkflowTemplate', - 'WorkflowTemplatePlacement', -) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/autoscaling_policies.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/types/autoscaling_policies.py deleted file mode 100644 index ee8f21f3..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/autoscaling_policies.py +++ /dev/null @@ -1,449 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataproc.v1', - manifest={ - 'AutoscalingPolicy', - 'BasicAutoscalingAlgorithm', - 'BasicYarnAutoscalingConfig', - 'InstanceGroupAutoscalingPolicyConfig', - 'CreateAutoscalingPolicyRequest', - 'GetAutoscalingPolicyRequest', - 'UpdateAutoscalingPolicyRequest', - 'DeleteAutoscalingPolicyRequest', - 'ListAutoscalingPoliciesRequest', - 'ListAutoscalingPoliciesResponse', - }, -) - - -class AutoscalingPolicy(proto.Message): - r"""Describes an autoscaling policy for Dataproc cluster - autoscaler. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - id (str): - Required. The policy id. - - The id must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). Cannot begin or end with - underscore or hyphen. Must consist of between 3 and 50 - characters. - name (str): - Output only. The "resource name" of the autoscaling policy, - as described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies``, the - resource name of the policy has the following format: - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` - - - For ``projects.locations.autoscalingPolicies``, the - resource name of the policy has the following format: - ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` - basic_algorithm (google.cloud.dataproc_v1.types.BasicAutoscalingAlgorithm): - - This field is a member of `oneof`_ ``algorithm``. - worker_config (google.cloud.dataproc_v1.types.InstanceGroupAutoscalingPolicyConfig): - Required. Describes how the autoscaler will - operate for primary workers. - secondary_worker_config (google.cloud.dataproc_v1.types.InstanceGroupAutoscalingPolicyConfig): - Optional. Describes how the autoscaler will - operate for secondary workers. - labels (MutableMapping[str, str]): - Optional. The labels to associate with this autoscaling - policy. Label **keys** must contain 1 to 63 characters, and - must conform to `RFC - 1035 `__. Label - **values** may be empty, but, if present, must contain 1 to - 63 characters, and must conform to `RFC - 1035 `__. No more than - 32 labels can be associated with an autoscaling policy. - """ - - id: str = proto.Field( - proto.STRING, - number=1, - ) - name: str = proto.Field( - proto.STRING, - number=2, - ) - basic_algorithm: 'BasicAutoscalingAlgorithm' = proto.Field( - proto.MESSAGE, - number=3, - oneof='algorithm', - message='BasicAutoscalingAlgorithm', - ) - worker_config: 'InstanceGroupAutoscalingPolicyConfig' = proto.Field( - proto.MESSAGE, - number=4, - message='InstanceGroupAutoscalingPolicyConfig', - ) - secondary_worker_config: 'InstanceGroupAutoscalingPolicyConfig' = proto.Field( - proto.MESSAGE, - number=5, - message='InstanceGroupAutoscalingPolicyConfig', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - - -class BasicAutoscalingAlgorithm(proto.Message): - r"""Basic algorithm for autoscaling. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - yarn_config (google.cloud.dataproc_v1.types.BasicYarnAutoscalingConfig): - Required. YARN autoscaling configuration. - - This field is a member of `oneof`_ ``config``. - cooldown_period (google.protobuf.duration_pb2.Duration): - Optional. Duration between scaling events. A scaling period - starts after the update operation from the previous event - has completed. - - Bounds: [2m, 1d]. Default: 2m. - """ - - yarn_config: 'BasicYarnAutoscalingConfig' = proto.Field( - proto.MESSAGE, - number=1, - oneof='config', - message='BasicYarnAutoscalingConfig', - ) - cooldown_period: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - -class BasicYarnAutoscalingConfig(proto.Message): - r"""Basic autoscaling configurations for YARN. - - Attributes: - graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): - Required. Timeout for YARN graceful decommissioning of Node - Managers. Specifies the duration to wait for jobs to - complete before forcefully removing workers (and potentially - interrupting jobs). Only applicable to downscaling - operations. - - Bounds: [0s, 1d]. - scale_up_factor (float): - Required. Fraction of average YARN pending memory in the - last cooldown period for which to add workers. A scale-up - factor of 1.0 will result in scaling up so that there is no - pending memory remaining after the update (more aggressive - scaling). A scale-up factor closer to 0 will result in a - smaller magnitude of scaling up (less aggressive scaling). - See `How autoscaling - works `__ - for more information. - - Bounds: [0.0, 1.0]. - scale_down_factor (float): - Required. Fraction of average YARN pending memory in the - last cooldown period for which to remove workers. A - scale-down factor of 1 will result in scaling down so that - there is no available memory remaining after the update - (more aggressive scaling). A scale-down factor of 0 disables - removing workers, which can be beneficial for autoscaling a - single job. See `How autoscaling - works `__ - for more information. - - Bounds: [0.0, 1.0]. - scale_up_min_worker_fraction (float): - Optional. Minimum scale-up threshold as a fraction of total - cluster size before scaling occurs. For example, in a - 20-worker cluster, a threshold of 0.1 means the autoscaler - must recommend at least a 2-worker scale-up for the cluster - to scale. A threshold of 0 means the autoscaler will scale - up on any recommended change. - - Bounds: [0.0, 1.0]. Default: 0.0. - scale_down_min_worker_fraction (float): - Optional. Minimum scale-down threshold as a fraction of - total cluster size before scaling occurs. For example, in a - 20-worker cluster, a threshold of 0.1 means the autoscaler - must recommend at least a 2 worker scale-down for the - cluster to scale. A threshold of 0 means the autoscaler will - scale down on any recommended change. - - Bounds: [0.0, 1.0]. Default: 0.0. - """ - - graceful_decommission_timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=5, - message=duration_pb2.Duration, - ) - scale_up_factor: float = proto.Field( - proto.DOUBLE, - number=1, - ) - scale_down_factor: float = proto.Field( - proto.DOUBLE, - number=2, - ) - scale_up_min_worker_fraction: float = proto.Field( - proto.DOUBLE, - number=3, - ) - scale_down_min_worker_fraction: float = proto.Field( - proto.DOUBLE, - number=4, - ) - - -class InstanceGroupAutoscalingPolicyConfig(proto.Message): - r"""Configuration for the size bounds of an instance group, - including its proportional size to other groups. - - Attributes: - min_instances (int): - Optional. Minimum number of instances for this group. - - Primary workers - Bounds: [2, max_instances]. Default: 2. - Secondary workers - Bounds: [0, max_instances]. Default: 0. - max_instances (int): - Required. Maximum number of instances for this group. - Required for primary workers. Note that by default, clusters - will not use secondary workers. Required for secondary - workers if the minimum secondary instances is set. - - Primary workers - Bounds: [min_instances, ). Secondary - workers - Bounds: [min_instances, ). Default: 0. - weight (int): - Optional. Weight for the instance group, which is used to - determine the fraction of total workers in the cluster from - this instance group. For example, if primary workers have - weight 2, and secondary workers have weight 1, the cluster - will have approximately 2 primary workers for each secondary - worker. - - The cluster may not reach the specified balance if - constrained by min/max bounds or other autoscaling settings. - For example, if ``max_instances`` for secondary workers is - 0, then only primary workers will be added. The cluster can - also be out of balance when created. - - If weight is not set on any instance group, the cluster will - default to equal weight for all groups: the cluster will - attempt to maintain an equal number of workers in each group - within the configured size bounds for each group. If weight - is set for one group only, the cluster will default to zero - weight on the unset group. For example if weight is set only - on primary workers, the cluster will use primary workers - only and no secondary workers. - """ - - min_instances: int = proto.Field( - proto.INT32, - number=1, - ) - max_instances: int = proto.Field( - proto.INT32, - number=2, - ) - weight: int = proto.Field( - proto.INT32, - number=3, - ) - - -class CreateAutoscalingPolicyRequest(proto.Message): - r"""A request to create an autoscaling policy. - - Attributes: - parent (str): - Required. The "resource name" of the region or location, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.create``, the - resource name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.autoscalingPolicies.create``, - the resource name of the location has the following - format: ``projects/{project_id}/locations/{location}`` - policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): - Required. The autoscaling policy to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - policy: 'AutoscalingPolicy' = proto.Field( - proto.MESSAGE, - number=2, - message='AutoscalingPolicy', - ) - - -class GetAutoscalingPolicyRequest(proto.Message): - r"""A request to fetch an autoscaling policy. - - Attributes: - name (str): - Required. The "resource name" of the autoscaling policy, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.get``, the - resource name of the policy has the following format: - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` - - - For ``projects.locations.autoscalingPolicies.get``, the - resource name of the policy has the following format: - ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateAutoscalingPolicyRequest(proto.Message): - r"""A request to update an autoscaling policy. - - Attributes: - policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): - Required. The updated autoscaling policy. - """ - - policy: 'AutoscalingPolicy' = proto.Field( - proto.MESSAGE, - number=1, - message='AutoscalingPolicy', - ) - - -class DeleteAutoscalingPolicyRequest(proto.Message): - r"""A request to delete an autoscaling policy. - Autoscaling policies in use by one or more clusters will not be - deleted. - - Attributes: - name (str): - Required. The "resource name" of the autoscaling policy, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.delete``, the - resource name of the policy has the following format: - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` - - - For ``projects.locations.autoscalingPolicies.delete``, - the resource name of the policy has the following format: - ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListAutoscalingPoliciesRequest(proto.Message): - r"""A request to list autoscaling policies in a project. - - Attributes: - parent (str): - Required. The "resource name" of the region or location, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.autoscalingPolicies.list``, the - resource name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.autoscalingPolicies.list``, the - resource name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - page_size (int): - Optional. The maximum number of results to - return in each response. Must be less than or - equal to 1000. Defaults to 100. - page_token (str): - Optional. The page token, returned by a - previous call, to request the next page of - results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListAutoscalingPoliciesResponse(proto.Message): - r"""A response to a request to list autoscaling policies in a - project. - - Attributes: - policies (MutableSequence[google.cloud.dataproc_v1.types.AutoscalingPolicy]): - Output only. Autoscaling policies list. - next_page_token (str): - Output only. This token is included in the - response if there are more results to fetch. - """ - - @property - def raw_page(self): - return self - - policies: MutableSequence['AutoscalingPolicy'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AutoscalingPolicy', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/batches.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/types/batches.py deleted file mode 100644 index ab47c470..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/batches.py +++ /dev/null @@ -1,633 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataproc_v1.types import shared -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataproc.v1', - manifest={ - 'CreateBatchRequest', - 'GetBatchRequest', - 'ListBatchesRequest', - 'ListBatchesResponse', - 'DeleteBatchRequest', - 'Batch', - 'PySparkBatch', - 'SparkBatch', - 'SparkRBatch', - 'SparkSqlBatch', - }, -) - - -class CreateBatchRequest(proto.Message): - r"""A request to create a batch workload. - - Attributes: - parent (str): - Required. The parent resource where this - batch will be created. - batch (google.cloud.dataproc_v1.types.Batch): - Required. The batch to create. - batch_id (str): - Optional. The ID to use for the batch, which will become the - final component of the batch's resource name. - - This value must be 4-63 characters. Valid characters are - ``/[a-z][0-9]-/``. - request_id (str): - Optional. A unique ID used to identify the request. If the - service receives two - `CreateBatchRequest `__\ s - with the same request_id, the second request is ignored and - the Operation that corresponds to the first Batch created - and stored in the backend is returned. - - Recommendation: Set this value to a - `UUID `__. - - The value must contain only letters (a-z, A-Z), numbers - (0-9), underscores (_), and hyphens (-). The maximum length - is 40 characters. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - batch: 'Batch' = proto.Field( - proto.MESSAGE, - number=2, - message='Batch', - ) - batch_id: str = proto.Field( - proto.STRING, - number=3, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class GetBatchRequest(proto.Message): - r"""A request to get the resource representation for a batch - workload. - - Attributes: - name (str): - Required. The fully qualified name of the batch to retrieve - in the format - "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID". - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBatchesRequest(proto.Message): - r"""A request to list batch workloads in a project. - - Attributes: - parent (str): - Required. The parent, which owns this - collection of batches. - page_size (int): - Optional. The maximum number of batches to - return in each response. The service may return - fewer than this value. The default page size is - 20; the maximum page size is 1000. - page_token (str): - Optional. A page token received from a previous - ``ListBatches`` call. Provide this token to retrieve the - subsequent page. - filter (str): - Optional. A filter for the batches to return in the - response. - - A filter is a logical expression constraining the values of - various fields in each batch resource. Filters are case - sensitive, and may contain multiple clauses combined with - logical operators (AND/OR). Supported fields are - ``batch_id``, ``batch_uuid``, ``state``, and - ``create_time``. - - e.g. - ``state = RUNNING and create_time < "2023-01-01T00:00:00Z"`` - filters for batches in state RUNNING that were created - before 2023-01-01 - - See https://google.aip.dev/assets/misc/ebnf-filtering.txt - for a detailed description of the filter syntax and a list - of supported comparisons. - order_by (str): - Optional. Field(s) on which to sort the list of batches. - - Currently the only supported sort orders are unspecified - (empty) and ``create_time desc`` to sort by most recently - created batches first. - - See https://google.aip.dev/132#ordering for more details. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListBatchesResponse(proto.Message): - r"""A list of batch workloads. - - Attributes: - batches (MutableSequence[google.cloud.dataproc_v1.types.Batch]): - The batches from the specified collection. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - batches: MutableSequence['Batch'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Batch', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteBatchRequest(proto.Message): - r"""A request to delete a batch workload. - - Attributes: - name (str): - Required. The fully qualified name of the batch to retrieve - in the format - "projects/PROJECT_ID/locations/DATAPROC_REGION/batches/BATCH_ID". - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class Batch(proto.Message): - r"""A representation of a batch workload in the service. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The resource name of the batch. - uuid (str): - Output only. A batch UUID (Unique Universal - Identifier). The service generates this value - when it creates the batch. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the batch was - created. - pyspark_batch (google.cloud.dataproc_v1.types.PySparkBatch): - Optional. PySpark batch config. - - This field is a member of `oneof`_ ``batch_config``. - spark_batch (google.cloud.dataproc_v1.types.SparkBatch): - Optional. Spark batch config. - - This field is a member of `oneof`_ ``batch_config``. - spark_r_batch (google.cloud.dataproc_v1.types.SparkRBatch): - Optional. SparkR batch config. - - This field is a member of `oneof`_ ``batch_config``. - spark_sql_batch (google.cloud.dataproc_v1.types.SparkSqlBatch): - Optional. SparkSql batch config. - - This field is a member of `oneof`_ ``batch_config``. - runtime_info (google.cloud.dataproc_v1.types.RuntimeInfo): - Output only. Runtime information about batch - execution. - state (google.cloud.dataproc_v1.types.Batch.State): - Output only. The state of the batch. - state_message (str): - Output only. Batch state details, such as a failure - description if the state is ``FAILED``. - state_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the batch entered - a current state. - creator (str): - Output only. The email address of the user - who created the batch. - labels (MutableMapping[str, str]): - Optional. The labels to associate with this batch. Label - **keys** must contain 1 to 63 characters, and must conform - to `RFC 1035 `__. - Label **values** may be empty, but, if present, must contain - 1 to 63 characters, and must conform to `RFC - 1035 `__. No more than - 32 labels can be associated with a batch. - runtime_config (google.cloud.dataproc_v1.types.RuntimeConfig): - Optional. Runtime configuration for the batch - execution. - environment_config (google.cloud.dataproc_v1.types.EnvironmentConfig): - Optional. Environment configuration for the - batch execution. - operation (str): - Output only. The resource name of the - operation associated with this batch. - state_history (MutableSequence[google.cloud.dataproc_v1.types.Batch.StateHistory]): - Output only. Historical state information for - the batch. - """ - class State(proto.Enum): - r"""The batch state. - - Values: - STATE_UNSPECIFIED (0): - The batch state is unknown. - PENDING (1): - The batch is created before running. - RUNNING (2): - The batch is running. - CANCELLING (3): - The batch is cancelling. - CANCELLED (4): - The batch cancellation was successful. - SUCCEEDED (5): - The batch completed successfully. - FAILED (6): - The batch is no longer running due to an - error. - """ - STATE_UNSPECIFIED = 0 - PENDING = 1 - RUNNING = 2 - CANCELLING = 3 - CANCELLED = 4 - SUCCEEDED = 5 - FAILED = 6 - - class StateHistory(proto.Message): - r"""Historical state information. - - Attributes: - state (google.cloud.dataproc_v1.types.Batch.State): - Output only. The state of the batch at this - point in history. - state_message (str): - Output only. Details about the state at this - point in history. - state_start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the batch entered - the historical state. - """ - - state: 'Batch.State' = proto.Field( - proto.ENUM, - number=1, - enum='Batch.State', - ) - state_message: str = proto.Field( - proto.STRING, - number=2, - ) - state_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uuid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - pyspark_batch: 'PySparkBatch' = proto.Field( - proto.MESSAGE, - number=4, - oneof='batch_config', - message='PySparkBatch', - ) - spark_batch: 'SparkBatch' = proto.Field( - proto.MESSAGE, - number=5, - oneof='batch_config', - message='SparkBatch', - ) - spark_r_batch: 'SparkRBatch' = proto.Field( - proto.MESSAGE, - number=6, - oneof='batch_config', - message='SparkRBatch', - ) - spark_sql_batch: 'SparkSqlBatch' = proto.Field( - proto.MESSAGE, - number=7, - oneof='batch_config', - message='SparkSqlBatch', - ) - runtime_info: shared.RuntimeInfo = proto.Field( - proto.MESSAGE, - number=8, - message=shared.RuntimeInfo, - ) - state: State = proto.Field( - proto.ENUM, - number=9, - enum=State, - ) - state_message: str = proto.Field( - proto.STRING, - number=10, - ) - state_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - creator: str = proto.Field( - proto.STRING, - number=12, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=13, - ) - runtime_config: shared.RuntimeConfig = proto.Field( - proto.MESSAGE, - number=14, - message=shared.RuntimeConfig, - ) - environment_config: shared.EnvironmentConfig = proto.Field( - proto.MESSAGE, - number=15, - message=shared.EnvironmentConfig, - ) - operation: str = proto.Field( - proto.STRING, - number=16, - ) - state_history: MutableSequence[StateHistory] = proto.RepeatedField( - proto.MESSAGE, - number=17, - message=StateHistory, - ) - - -class PySparkBatch(proto.Message): - r"""A configuration for running an `Apache - PySpark `__ - batch workload. - - Attributes: - main_python_file_uri (str): - Required. The HCFS URI of the main Python - file to use as the Spark driver. Must be a .py - file. - args (MutableSequence[str]): - Optional. The arguments to pass to the driver. Do not - include arguments that can be set as batch properties, such - as ``--conf``, since a collision can occur that causes an - incorrect batch submission. - python_file_uris (MutableSequence[str]): - Optional. HCFS file URIs of Python files to pass to the - PySpark framework. Supported file types: ``.py``, ``.egg``, - and ``.zip``. - jar_file_uris (MutableSequence[str]): - Optional. HCFS URIs of jar files to add to - the classpath of the Spark driver and tasks. - file_uris (MutableSequence[str]): - Optional. HCFS URIs of files to be placed in - the working directory of each executor. - archive_uris (MutableSequence[str]): - Optional. HCFS URIs of archives to be extracted into the - working directory of each executor. Supported file types: - ``.jar``, ``.tar``, ``.tar.gz``, ``.tgz``, and ``.zip``. - """ - - main_python_file_uri: str = proto.Field( - proto.STRING, - number=1, - ) - args: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - python_file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - jar_file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - archive_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - - -class SparkBatch(proto.Message): - r"""A configuration for running an `Apache - Spark `__ batch workload. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - main_jar_file_uri (str): - Optional. The HCFS URI of the jar file that - contains the main class. - - This field is a member of `oneof`_ ``driver``. - main_class (str): - Optional. The name of the driver main class. The jar file - that contains the class must be in the classpath or - specified in ``jar_file_uris``. - - This field is a member of `oneof`_ ``driver``. - args (MutableSequence[str]): - Optional. The arguments to pass to the driver. Do not - include arguments that can be set as batch properties, such - as ``--conf``, since a collision can occur that causes an - incorrect batch submission. - jar_file_uris (MutableSequence[str]): - Optional. HCFS URIs of jar files to add to - the classpath of the Spark driver and tasks. - file_uris (MutableSequence[str]): - Optional. HCFS URIs of files to be placed in - the working directory of each executor. - archive_uris (MutableSequence[str]): - Optional. HCFS URIs of archives to be extracted into the - working directory of each executor. Supported file types: - ``.jar``, ``.tar``, ``.tar.gz``, ``.tgz``, and ``.zip``. - """ - - main_jar_file_uri: str = proto.Field( - proto.STRING, - number=1, - oneof='driver', - ) - main_class: str = proto.Field( - proto.STRING, - number=2, - oneof='driver', - ) - args: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - jar_file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - archive_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - - -class SparkRBatch(proto.Message): - r"""A configuration for running an `Apache - SparkR `__ batch - workload. - - Attributes: - main_r_file_uri (str): - Required. The HCFS URI of the main R file to use as the - driver. Must be a ``.R`` or ``.r`` file. - args (MutableSequence[str]): - Optional. The arguments to pass to the Spark driver. Do not - include arguments that can be set as batch properties, such - as ``--conf``, since a collision can occur that causes an - incorrect batch submission. - file_uris (MutableSequence[str]): - Optional. HCFS URIs of files to be placed in - the working directory of each executor. - archive_uris (MutableSequence[str]): - Optional. HCFS URIs of archives to be extracted into the - working directory of each executor. Supported file types: - ``.jar``, ``.tar``, ``.tar.gz``, ``.tgz``, and ``.zip``. - """ - - main_r_file_uri: str = proto.Field( - proto.STRING, - number=1, - ) - args: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - archive_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class SparkSqlBatch(proto.Message): - r"""A configuration for running `Apache Spark - SQL `__ queries as a batch workload. - - Attributes: - query_file_uri (str): - Required. The HCFS URI of the script that - contains Spark SQL queries to execute. - query_variables (MutableMapping[str, str]): - Optional. Mapping of query variable names to values - (equivalent to the Spark SQL command: - ``SET name="value";``). - jar_file_uris (MutableSequence[str]): - Optional. HCFS URIs of jar files to be added - to the Spark CLASSPATH. - """ - - query_file_uri: str = proto.Field( - proto.STRING, - number=1, - ) - query_variables: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - jar_file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/clusters.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/types/clusters.py deleted file mode 100644 index c00f9f8a..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/clusters.py +++ /dev/null @@ -1,2262 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataproc_v1.types import shared -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataproc.v1', - manifest={ - 'Cluster', - 'ClusterConfig', - 'VirtualClusterConfig', - 'AuxiliaryServicesConfig', - 'EndpointConfig', - 'AutoscalingConfig', - 'EncryptionConfig', - 'GceClusterConfig', - 'NodeGroupAffinity', - 'ShieldedInstanceConfig', - 'ConfidentialInstanceConfig', - 'InstanceGroupConfig', - 'ManagedGroupConfig', - 'AcceleratorConfig', - 'DiskConfig', - 'AuxiliaryNodeGroup', - 'NodeGroup', - 'NodeInitializationAction', - 'ClusterStatus', - 'SecurityConfig', - 'KerberosConfig', - 'IdentityConfig', - 'SoftwareConfig', - 'LifecycleConfig', - 'MetastoreConfig', - 'ClusterMetrics', - 'DataprocMetricConfig', - 'CreateClusterRequest', - 'UpdateClusterRequest', - 'StopClusterRequest', - 'StartClusterRequest', - 'DeleteClusterRequest', - 'GetClusterRequest', - 'ListClustersRequest', - 'ListClustersResponse', - 'DiagnoseClusterRequest', - 'DiagnoseClusterResults', - 'ReservationAffinity', - }, -) - - -class Cluster(proto.Message): - r"""Describes the identifying information, config, and status of - a Dataproc cluster - - Attributes: - project_id (str): - Required. The Google Cloud Platform project - ID that the cluster belongs to. - cluster_name (str): - Required. The cluster name, which must be - unique within a project. The name must start - with a lowercase letter, and can contain up to - 51 lowercase letters, numbers, and hyphens. It - cannot end with a hyphen. The name of a deleted - cluster can be reused. - config (google.cloud.dataproc_v1.types.ClusterConfig): - Optional. The cluster config for a cluster of - Compute Engine Instances. Note that Dataproc may - set default values, and values may change when - clusters are updated. - - Exactly one of ClusterConfig or - VirtualClusterConfig must be specified. - virtual_cluster_config (google.cloud.dataproc_v1.types.VirtualClusterConfig): - Optional. The virtual cluster config is used when creating a - Dataproc cluster that does not directly control the - underlying compute resources, for example, when creating a - `Dataproc-on-GKE - cluster `__. - Dataproc may set default values, and values may change when - clusters are updated. Exactly one of - [config][google.cloud.dataproc.v1.Cluster.config] or - [virtual_cluster_config][google.cloud.dataproc.v1.Cluster.virtual_cluster_config] - must be specified. - labels (MutableMapping[str, str]): - Optional. The labels to associate with this cluster. Label - **keys** must contain 1 to 63 characters, and must conform - to `RFC 1035 `__. - Label **values** may be empty, but, if present, must contain - 1 to 63 characters, and must conform to `RFC - 1035 `__. No more than - 32 labels can be associated with a cluster. - status (google.cloud.dataproc_v1.types.ClusterStatus): - Output only. Cluster status. - status_history (MutableSequence[google.cloud.dataproc_v1.types.ClusterStatus]): - Output only. The previous cluster status. - cluster_uuid (str): - Output only. A cluster UUID (Unique Universal - Identifier). Dataproc generates this value when - it creates the cluster. - metrics (google.cloud.dataproc_v1.types.ClusterMetrics): - Output only. Contains cluster daemon metrics such as HDFS - and YARN stats. - - **Beta Feature**: This report is available for testing - purposes only. It may be changed before final release. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - cluster_name: str = proto.Field( - proto.STRING, - number=2, - ) - config: 'ClusterConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='ClusterConfig', - ) - virtual_cluster_config: 'VirtualClusterConfig' = proto.Field( - proto.MESSAGE, - number=10, - message='VirtualClusterConfig', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - status: 'ClusterStatus' = proto.Field( - proto.MESSAGE, - number=4, - message='ClusterStatus', - ) - status_history: MutableSequence['ClusterStatus'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='ClusterStatus', - ) - cluster_uuid: str = proto.Field( - proto.STRING, - number=6, - ) - metrics: 'ClusterMetrics' = proto.Field( - proto.MESSAGE, - number=9, - message='ClusterMetrics', - ) - - -class ClusterConfig(proto.Message): - r"""The cluster config. - - Attributes: - config_bucket (str): - Optional. A Cloud Storage bucket used to stage job - dependencies, config files, and job driver console output. - If you do not specify a staging bucket, Cloud Dataproc will - determine a Cloud Storage location (US, ASIA, or EU) for - your cluster's staging bucket according to the Compute - Engine zone where your cluster is deployed, and then create - and manage this project-level, per-location bucket (see - `Dataproc staging and temp - buckets `__). - **This field requires a Cloud Storage bucket name, not a - ``gs://...`` URI to a Cloud Storage bucket.** - temp_bucket (str): - Optional. A Cloud Storage bucket used to store ephemeral - cluster and jobs data, such as Spark and MapReduce history - files. If you do not specify a temp bucket, Dataproc will - determine a Cloud Storage location (US, ASIA, or EU) for - your cluster's temp bucket according to the Compute Engine - zone where your cluster is deployed, and then create and - manage this project-level, per-location bucket. The default - bucket has a TTL of 90 days, but you can use any TTL (or - none) if you specify a bucket (see `Dataproc staging and - temp - buckets `__). - **This field requires a Cloud Storage bucket name, not a - ``gs://...`` URI to a Cloud Storage bucket.** - gce_cluster_config (google.cloud.dataproc_v1.types.GceClusterConfig): - Optional. The shared Compute Engine config - settings for all instances in a cluster. - master_config (google.cloud.dataproc_v1.types.InstanceGroupConfig): - Optional. The Compute Engine config settings - for the cluster's master instance. - worker_config (google.cloud.dataproc_v1.types.InstanceGroupConfig): - Optional. The Compute Engine config settings - for the cluster's worker instances. - secondary_worker_config (google.cloud.dataproc_v1.types.InstanceGroupConfig): - Optional. The Compute Engine config settings - for a cluster's secondary worker instances - software_config (google.cloud.dataproc_v1.types.SoftwareConfig): - Optional. The config settings for cluster - software. - initialization_actions (MutableSequence[google.cloud.dataproc_v1.types.NodeInitializationAction]): - Optional. Commands to execute on each node after config is - completed. By default, executables are run on master and all - worker nodes. You can test a node's ``role`` metadata to run - an executable on a master or worker node, as shown below - using ``curl`` (you can also use ``wget``): - - :: - - ROLE=$(curl -H Metadata-Flavor:Google - http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) - if [[ "${ROLE}" == 'Master' ]]; then - ... master specific actions ... - else - ... worker specific actions ... - fi - encryption_config (google.cloud.dataproc_v1.types.EncryptionConfig): - Optional. Encryption settings for the - cluster. - autoscaling_config (google.cloud.dataproc_v1.types.AutoscalingConfig): - Optional. Autoscaling config for the policy - associated with the cluster. Cluster does not - autoscale if this field is unset. - security_config (google.cloud.dataproc_v1.types.SecurityConfig): - Optional. Security settings for the cluster. - lifecycle_config (google.cloud.dataproc_v1.types.LifecycleConfig): - Optional. Lifecycle setting for the cluster. - endpoint_config (google.cloud.dataproc_v1.types.EndpointConfig): - Optional. Port/endpoint configuration for - this cluster - metastore_config (google.cloud.dataproc_v1.types.MetastoreConfig): - Optional. Metastore configuration. - dataproc_metric_config (google.cloud.dataproc_v1.types.DataprocMetricConfig): - Optional. The config for Dataproc metrics. - auxiliary_node_groups (MutableSequence[google.cloud.dataproc_v1.types.AuxiliaryNodeGroup]): - Optional. The node group settings. - """ - - config_bucket: str = proto.Field( - proto.STRING, - number=1, - ) - temp_bucket: str = proto.Field( - proto.STRING, - number=2, - ) - gce_cluster_config: 'GceClusterConfig' = proto.Field( - proto.MESSAGE, - number=8, - message='GceClusterConfig', - ) - master_config: 'InstanceGroupConfig' = proto.Field( - proto.MESSAGE, - number=9, - message='InstanceGroupConfig', - ) - worker_config: 'InstanceGroupConfig' = proto.Field( - proto.MESSAGE, - number=10, - message='InstanceGroupConfig', - ) - secondary_worker_config: 'InstanceGroupConfig' = proto.Field( - proto.MESSAGE, - number=12, - message='InstanceGroupConfig', - ) - software_config: 'SoftwareConfig' = proto.Field( - proto.MESSAGE, - number=13, - message='SoftwareConfig', - ) - initialization_actions: MutableSequence['NodeInitializationAction'] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='NodeInitializationAction', - ) - encryption_config: 'EncryptionConfig' = proto.Field( - proto.MESSAGE, - number=15, - message='EncryptionConfig', - ) - autoscaling_config: 'AutoscalingConfig' = proto.Field( - proto.MESSAGE, - number=18, - message='AutoscalingConfig', - ) - security_config: 'SecurityConfig' = proto.Field( - proto.MESSAGE, - number=16, - message='SecurityConfig', - ) - lifecycle_config: 'LifecycleConfig' = proto.Field( - proto.MESSAGE, - number=17, - message='LifecycleConfig', - ) - endpoint_config: 'EndpointConfig' = proto.Field( - proto.MESSAGE, - number=19, - message='EndpointConfig', - ) - metastore_config: 'MetastoreConfig' = proto.Field( - proto.MESSAGE, - number=20, - message='MetastoreConfig', - ) - dataproc_metric_config: 'DataprocMetricConfig' = proto.Field( - proto.MESSAGE, - number=23, - message='DataprocMetricConfig', - ) - auxiliary_node_groups: MutableSequence['AuxiliaryNodeGroup'] = proto.RepeatedField( - proto.MESSAGE, - number=25, - message='AuxiliaryNodeGroup', - ) - - -class VirtualClusterConfig(proto.Message): - r"""The Dataproc cluster config for a cluster that does not directly - control the underlying compute resources, such as a `Dataproc-on-GKE - cluster `__. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - staging_bucket (str): - Optional. A Cloud Storage bucket used to stage job - dependencies, config files, and job driver console output. - If you do not specify a staging bucket, Cloud Dataproc will - determine a Cloud Storage location (US, ASIA, or EU) for - your cluster's staging bucket according to the Compute - Engine zone where your cluster is deployed, and then create - and manage this project-level, per-location bucket (see - `Dataproc staging and temp - buckets `__). - **This field requires a Cloud Storage bucket name, not a - ``gs://...`` URI to a Cloud Storage bucket.** - kubernetes_cluster_config (google.cloud.dataproc_v1.types.KubernetesClusterConfig): - Required. The configuration for running the - Dataproc cluster on Kubernetes. - - This field is a member of `oneof`_ ``infrastructure_config``. - auxiliary_services_config (google.cloud.dataproc_v1.types.AuxiliaryServicesConfig): - Optional. Configuration of auxiliary services - used by this cluster. - """ - - staging_bucket: str = proto.Field( - proto.STRING, - number=1, - ) - kubernetes_cluster_config: shared.KubernetesClusterConfig = proto.Field( - proto.MESSAGE, - number=6, - oneof='infrastructure_config', - message=shared.KubernetesClusterConfig, - ) - auxiliary_services_config: 'AuxiliaryServicesConfig' = proto.Field( - proto.MESSAGE, - number=7, - message='AuxiliaryServicesConfig', - ) - - -class AuxiliaryServicesConfig(proto.Message): - r"""Auxiliary services configuration for a Cluster. - - Attributes: - metastore_config (google.cloud.dataproc_v1.types.MetastoreConfig): - Optional. The Hive Metastore configuration - for this workload. - spark_history_server_config (google.cloud.dataproc_v1.types.SparkHistoryServerConfig): - Optional. The Spark History Server - configuration for the workload. - """ - - metastore_config: 'MetastoreConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='MetastoreConfig', - ) - spark_history_server_config: shared.SparkHistoryServerConfig = proto.Field( - proto.MESSAGE, - number=2, - message=shared.SparkHistoryServerConfig, - ) - - -class EndpointConfig(proto.Message): - r"""Endpoint config for this cluster - - Attributes: - http_ports (MutableMapping[str, str]): - Output only. The map of port descriptions to URLs. Will only - be populated if enable_http_port_access is true. - enable_http_port_access (bool): - Optional. If true, enable http access to - specific ports on the cluster from external - sources. Defaults to false. - """ - - http_ports: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - enable_http_port_access: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class AutoscalingConfig(proto.Message): - r"""Autoscaling Policy config associated with the cluster. - - Attributes: - policy_uri (str): - Optional. The autoscaling policy used by the cluster. - - Only resource names including projectid and location - (region) are valid. Examples: - - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]`` - - ``projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]`` - - Note that the policy must be in the same project and - Dataproc region. - """ - - policy_uri: str = proto.Field( - proto.STRING, - number=1, - ) - - -class EncryptionConfig(proto.Message): - r"""Encryption settings for the cluster. - - Attributes: - gce_pd_kms_key_name (str): - Optional. The Cloud KMS key name to use for - PD disk encryption for all instances in the - cluster. - """ - - gce_pd_kms_key_name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GceClusterConfig(proto.Message): - r"""Common config settings for resources of Compute Engine - cluster instances, applicable to all instances in the cluster. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - zone_uri (str): - Optional. The Compute Engine zone where the Dataproc cluster - will be located. If omitted, the service will pick a zone in - the cluster's Compute Engine region. On a get request, zone - will always be present. - - A full URL, partial URI, or short name are valid. Examples: - - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]`` - - ``projects/[project_id]/zones/[zone]`` - - ``[zone]`` - network_uri (str): - Optional. The Compute Engine network to be used for machine - communications. Cannot be specified with subnetwork_uri. If - neither ``network_uri`` nor ``subnetwork_uri`` is specified, - the "default" network of the project is used, if it exists. - Cannot be a "Custom Subnet Network" (see `Using - Subnetworks `__ - for more information). - - A full URL, partial URI, or short name are valid. Examples: - - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/global/networks/default`` - - ``projects/[project_id]/global/networks/default`` - - ``default`` - subnetwork_uri (str): - Optional. The Compute Engine subnetwork to be used for - machine communications. Cannot be specified with - network_uri. - - A full URL, partial URI, or short name are valid. Examples: - - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/regions/[region]/subnetworks/sub0`` - - ``projects/[project_id]/regions/[region]/subnetworks/sub0`` - - ``sub0`` - internal_ip_only (bool): - Optional. If true, all instances in the cluster will only - have internal IP addresses. By default, clusters are not - restricted to internal IP addresses, and will have ephemeral - external IP addresses assigned to each instance. This - ``internal_ip_only`` restriction can only be enabled for - subnetwork enabled networks, and all off-cluster - dependencies must be configured to be accessible without - external IP addresses. - - This field is a member of `oneof`_ ``_internal_ip_only``. - private_ipv6_google_access (google.cloud.dataproc_v1.types.GceClusterConfig.PrivateIpv6GoogleAccess): - Optional. The type of IPv6 access for a - cluster. - service_account (str): - Optional. The `Dataproc service - account `__ - (also see `VM Data Plane - identity `__) - used by Dataproc cluster VM instances to access Google Cloud - Platform services. - - If not specified, the `Compute Engine default service - account `__ - is used. - service_account_scopes (MutableSequence[str]): - Optional. The URIs of service account scopes to be included - in Compute Engine instances. The following base set of - scopes is always included: - - - https://www.googleapis.com/auth/cloud.useraccounts.readonly - - https://www.googleapis.com/auth/devstorage.read_write - - https://www.googleapis.com/auth/logging.write - - If no scopes are specified, the following defaults are also - provided: - - - https://www.googleapis.com/auth/bigquery - - https://www.googleapis.com/auth/bigtable.admin.table - - https://www.googleapis.com/auth/bigtable.data - - https://www.googleapis.com/auth/devstorage.full_control - tags (MutableSequence[str]): - The Compute Engine tags to add to all instances (see - `Tagging - instances `__). - metadata (MutableMapping[str, str]): - The Compute Engine metadata entries to add to all instances - (see `Project and instance - metadata `__). - reservation_affinity (google.cloud.dataproc_v1.types.ReservationAffinity): - Optional. Reservation Affinity for consuming - Zonal reservation. - node_group_affinity (google.cloud.dataproc_v1.types.NodeGroupAffinity): - Optional. Node Group Affinity for sole-tenant - clusters. - shielded_instance_config (google.cloud.dataproc_v1.types.ShieldedInstanceConfig): - Optional. Shielded Instance Config for clusters using - `Compute Engine Shielded - VMs `__. - confidential_instance_config (google.cloud.dataproc_v1.types.ConfidentialInstanceConfig): - Optional. Confidential Instance Config for clusters using - `Confidential - VMs `__. - """ - class PrivateIpv6GoogleAccess(proto.Enum): - r"""``PrivateIpv6GoogleAccess`` controls whether and how Dataproc - cluster nodes can communicate with Google Services through gRPC over - IPv6. These values are directly mapped to corresponding values in - the `Compute Engine Instance - fields `__. - - Values: - PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED (0): - If unspecified, Compute Engine default behavior will apply, - which is the same as - [INHERIT_FROM_SUBNETWORK][google.cloud.dataproc.v1.GceClusterConfig.PrivateIpv6GoogleAccess.INHERIT_FROM_SUBNETWORK]. - INHERIT_FROM_SUBNETWORK (1): - Private access to and from Google Services - configuration inherited from the subnetwork - configuration. This is the default Compute - Engine behavior. - OUTBOUND (2): - Enables outbound private IPv6 access to - Google Services from the Dataproc cluster. - BIDIRECTIONAL (3): - Enables bidirectional private IPv6 access - between Google Services and the Dataproc - cluster. - """ - PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0 - INHERIT_FROM_SUBNETWORK = 1 - OUTBOUND = 2 - BIDIRECTIONAL = 3 - - zone_uri: str = proto.Field( - proto.STRING, - number=1, - ) - network_uri: str = proto.Field( - proto.STRING, - number=2, - ) - subnetwork_uri: str = proto.Field( - proto.STRING, - number=6, - ) - internal_ip_only: bool = proto.Field( - proto.BOOL, - number=7, - optional=True, - ) - private_ipv6_google_access: PrivateIpv6GoogleAccess = proto.Field( - proto.ENUM, - number=12, - enum=PrivateIpv6GoogleAccess, - ) - service_account: str = proto.Field( - proto.STRING, - number=8, - ) - service_account_scopes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - reservation_affinity: 'ReservationAffinity' = proto.Field( - proto.MESSAGE, - number=11, - message='ReservationAffinity', - ) - node_group_affinity: 'NodeGroupAffinity' = proto.Field( - proto.MESSAGE, - number=13, - message='NodeGroupAffinity', - ) - shielded_instance_config: 'ShieldedInstanceConfig' = proto.Field( - proto.MESSAGE, - number=14, - message='ShieldedInstanceConfig', - ) - confidential_instance_config: 'ConfidentialInstanceConfig' = proto.Field( - proto.MESSAGE, - number=15, - message='ConfidentialInstanceConfig', - ) - - -class NodeGroupAffinity(proto.Message): - r"""Node Group Affinity for clusters using sole-tenant node groups. - **The Dataproc ``NodeGroupAffinity`` resource is not related to the - Dataproc [NodeGroup][google.cloud.dataproc.v1.NodeGroup] resource.** - - Attributes: - node_group_uri (str): - Required. The URI of a sole-tenant `node group - resource `__ - that the cluster will be created on. - - A full URL, partial URI, or node group name are valid. - Examples: - - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/nodeGroups/node-group-1`` - - ``projects/[project_id]/zones/[zone]/nodeGroups/node-group-1`` - - ``node-group-1`` - """ - - node_group_uri: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ShieldedInstanceConfig(proto.Message): - r"""Shielded Instance Config for clusters using `Compute Engine Shielded - VMs `__. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - enable_secure_boot (bool): - Optional. Defines whether instances have - Secure Boot enabled. - - This field is a member of `oneof`_ ``_enable_secure_boot``. - enable_vtpm (bool): - Optional. Defines whether instances have the - vTPM enabled. - - This field is a member of `oneof`_ ``_enable_vtpm``. - enable_integrity_monitoring (bool): - Optional. Defines whether instances have - integrity monitoring enabled. - - This field is a member of `oneof`_ ``_enable_integrity_monitoring``. - """ - - enable_secure_boot: bool = proto.Field( - proto.BOOL, - number=1, - optional=True, - ) - enable_vtpm: bool = proto.Field( - proto.BOOL, - number=2, - optional=True, - ) - enable_integrity_monitoring: bool = proto.Field( - proto.BOOL, - number=3, - optional=True, - ) - - -class ConfidentialInstanceConfig(proto.Message): - r"""Confidential Instance Config for clusters using `Confidential - VMs `__ - - Attributes: - enable_confidential_compute (bool): - Optional. Defines whether the instance should - have confidential compute enabled. - """ - - enable_confidential_compute: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -class InstanceGroupConfig(proto.Message): - r"""The config settings for Compute Engine resources in - an instance group, such as a master or worker group. - - Attributes: - num_instances (int): - Optional. The number of VM instances in the instance group. - For `HA - cluster `__ - `master_config <#FIELDS.master_config>`__ groups, **must be - set to 3**. For standard cluster - `master_config <#FIELDS.master_config>`__ groups, **must be - set to 1**. - instance_names (MutableSequence[str]): - Output only. The list of instance names. Dataproc derives - the names from ``cluster_name``, ``num_instances``, and the - instance group. - image_uri (str): - Optional. The Compute Engine image resource used for cluster - instances. - - The URI can represent an image or image family. - - Image examples: - - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/global/images/[image-id]`` - - ``projects/[project_id]/global/images/[image-id]`` - - ``image-id`` - - Image family examples. Dataproc will use the most recent - image from the family: - - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/global/images/family/[custom-image-family-name]`` - - ``projects/[project_id]/global/images/family/[custom-image-family-name]`` - - If the URI is unspecified, it will be inferred from - ``SoftwareConfig.image_version`` or the system default. - machine_type_uri (str): - Optional. The Compute Engine machine type used for cluster - instances. - - A full URL, partial URI, or short name are valid. Examples: - - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/machineTypes/n1-standard-2`` - - ``projects/[project_id]/zones/[zone]/machineTypes/n1-standard-2`` - - ``n1-standard-2`` - - **Auto Zone Exception**: If you are using the Dataproc `Auto - Zone - Placement `__ - feature, you must use the short name of the machine type - resource, for example, ``n1-standard-2``. - disk_config (google.cloud.dataproc_v1.types.DiskConfig): - Optional. Disk option config settings. - is_preemptible (bool): - Output only. Specifies that this instance - group contains preemptible instances. - preemptibility (google.cloud.dataproc_v1.types.InstanceGroupConfig.Preemptibility): - Optional. Specifies the preemptibility of the instance - group. - - The default value for master and worker groups is - ``NON_PREEMPTIBLE``. This default cannot be changed. - - The default value for secondary instances is - ``PREEMPTIBLE``. - managed_group_config (google.cloud.dataproc_v1.types.ManagedGroupConfig): - Output only. The config for Compute Engine - Instance Group Manager that manages this group. - This is only used for preemptible instance - groups. - accelerators (MutableSequence[google.cloud.dataproc_v1.types.AcceleratorConfig]): - Optional. The Compute Engine accelerator - configuration for these instances. - min_cpu_platform (str): - Optional. Specifies the minimum cpu platform for the - Instance Group. See `Dataproc -> Minimum CPU - Platform `__. - """ - class Preemptibility(proto.Enum): - r"""Controls the use of preemptible instances within the group. - - Values: - PREEMPTIBILITY_UNSPECIFIED (0): - Preemptibility is unspecified, the system - will choose the appropriate setting for each - instance group. - NON_PREEMPTIBLE (1): - Instances are non-preemptible. - This option is allowed for all instance groups - and is the only valid value for Master and - Worker instance groups. - PREEMPTIBLE (2): - Instances are [preemptible] - (https://cloud.google.com/compute/docs/instances/preemptible). - - This option is allowed only for [secondary worker] - (https://cloud.google.com/dataproc/docs/concepts/compute/secondary-vms) - groups. - SPOT (3): - Instances are [Spot VMs] - (https://cloud.google.com/compute/docs/instances/spot). - - This option is allowed only for [secondary worker] - (https://cloud.google.com/dataproc/docs/concepts/compute/secondary-vms) - groups. Spot VMs are the latest version of [preemptible VMs] - (https://cloud.google.com/compute/docs/instances/preemptible), - and provide additional features. - """ - PREEMPTIBILITY_UNSPECIFIED = 0 - NON_PREEMPTIBLE = 1 - PREEMPTIBLE = 2 - SPOT = 3 - - num_instances: int = proto.Field( - proto.INT32, - number=1, - ) - instance_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - image_uri: str = proto.Field( - proto.STRING, - number=3, - ) - machine_type_uri: str = proto.Field( - proto.STRING, - number=4, - ) - disk_config: 'DiskConfig' = proto.Field( - proto.MESSAGE, - number=5, - message='DiskConfig', - ) - is_preemptible: bool = proto.Field( - proto.BOOL, - number=6, - ) - preemptibility: Preemptibility = proto.Field( - proto.ENUM, - number=10, - enum=Preemptibility, - ) - managed_group_config: 'ManagedGroupConfig' = proto.Field( - proto.MESSAGE, - number=7, - message='ManagedGroupConfig', - ) - accelerators: MutableSequence['AcceleratorConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='AcceleratorConfig', - ) - min_cpu_platform: str = proto.Field( - proto.STRING, - number=9, - ) - - -class ManagedGroupConfig(proto.Message): - r"""Specifies the resources used to actively manage an instance - group. - - Attributes: - instance_template_name (str): - Output only. The name of the Instance - Template used for the Managed Instance Group. - instance_group_manager_name (str): - Output only. The name of the Instance Group - Manager for this group. - """ - - instance_template_name: str = proto.Field( - proto.STRING, - number=1, - ) - instance_group_manager_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AcceleratorConfig(proto.Message): - r"""Specifies the type and number of accelerator cards attached to the - instances of an instance. See `GPUs on Compute - Engine `__. - - Attributes: - accelerator_type_uri (str): - Full URL, partial URI, or short name of the accelerator type - resource to expose to this instance. See `Compute Engine - AcceleratorTypes `__. - - Examples: - - - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-k80`` - - ``projects/[project_id]/zones/[zone]/acceleratorTypes/nvidia-tesla-k80`` - - ``nvidia-tesla-k80`` - - **Auto Zone Exception**: If you are using the Dataproc `Auto - Zone - Placement `__ - feature, you must use the short name of the accelerator type - resource, for example, ``nvidia-tesla-k80``. - accelerator_count (int): - The number of the accelerator cards of this - type exposed to this instance. - """ - - accelerator_type_uri: str = proto.Field( - proto.STRING, - number=1, - ) - accelerator_count: int = proto.Field( - proto.INT32, - number=2, - ) - - -class DiskConfig(proto.Message): - r"""Specifies the config of disk options for a group of VM - instances. - - Attributes: - boot_disk_type (str): - Optional. Type of the boot disk (default is "pd-standard"). - Valid values: "pd-balanced" (Persistent Disk Balanced Solid - State Drive), "pd-ssd" (Persistent Disk Solid State Drive), - or "pd-standard" (Persistent Disk Hard Disk Drive). See - `Disk - types `__. - boot_disk_size_gb (int): - Optional. Size in GB of the boot disk - (default is 500GB). - num_local_ssds (int): - Optional. Number of attached SSDs, from 0 to 8 (default is - 0). If SSDs are not attached, the boot disk is used to store - runtime logs and - `HDFS `__ - data. If one or more SSDs are attached, this runtime bulk - data is spread across them, and the boot disk contains only - basic config and installed binaries. - - Note: Local SSD options may vary by machine type and number - of vCPUs selected. - local_ssd_interface (str): - Optional. Interface type of local SSDs (default is "scsi"). - Valid values: "scsi" (Small Computer System Interface), - "nvme" (Non-Volatile Memory Express). See `local SSD - performance `__. - """ - - boot_disk_type: str = proto.Field( - proto.STRING, - number=3, - ) - boot_disk_size_gb: int = proto.Field( - proto.INT32, - number=1, - ) - num_local_ssds: int = proto.Field( - proto.INT32, - number=2, - ) - local_ssd_interface: str = proto.Field( - proto.STRING, - number=4, - ) - - -class AuxiliaryNodeGroup(proto.Message): - r"""Node group identification and configuration information. - - Attributes: - node_group (google.cloud.dataproc_v1.types.NodeGroup): - Required. Node group configuration. - node_group_id (str): - Optional. A node group ID. Generated if not specified. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). Cannot begin or end with - underscore or hyphen. Must consist of from 3 to 33 - characters. - """ - - node_group: 'NodeGroup' = proto.Field( - proto.MESSAGE, - number=1, - message='NodeGroup', - ) - node_group_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class NodeGroup(proto.Message): - r"""Dataproc Node Group. **The Dataproc ``NodeGroup`` resource is not - related to the Dataproc - [NodeGroupAffinity][google.cloud.dataproc.v1.NodeGroupAffinity] - resource.** - - Attributes: - name (str): - The Node group `resource name `__. - roles (MutableSequence[google.cloud.dataproc_v1.types.NodeGroup.Role]): - Required. Node group roles. - node_group_config (google.cloud.dataproc_v1.types.InstanceGroupConfig): - Optional. The node group instance group - configuration. - labels (MutableMapping[str, str]): - Optional. Node group labels. - - - Label **keys** must consist of from 1 to 63 characters - and conform to `RFC - 1035 `__. - - Label **values** can be empty. If specified, they must - consist of from 1 to 63 characters and conform to [RFC - 1035] (https://www.ietf.org/rfc/rfc1035.txt). - - The node group must have no more than 32 labels. - """ - class Role(proto.Enum): - r"""Node group roles. - - Values: - ROLE_UNSPECIFIED (0): - Required unspecified role. - DRIVER (1): - Job drivers run on the node group. - """ - ROLE_UNSPECIFIED = 0 - DRIVER = 1 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - roles: MutableSequence[Role] = proto.RepeatedField( - proto.ENUM, - number=2, - enum=Role, - ) - node_group_config: 'InstanceGroupConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='InstanceGroupConfig', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - - -class NodeInitializationAction(proto.Message): - r"""Specifies an executable to run on a fully configured node and - a timeout period for executable completion. - - Attributes: - executable_file (str): - Required. Cloud Storage URI of executable - file. - execution_timeout (google.protobuf.duration_pb2.Duration): - Optional. Amount of time executable has to complete. Default - is 10 minutes (see JSON representation of - `Duration `__). - - Cluster creation fails with an explanatory error message - (the name of the executable that caused the error and the - exceeded timeout period) if the executable is not completed - at end of the timeout period. - """ - - executable_file: str = proto.Field( - proto.STRING, - number=1, - ) - execution_timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - -class ClusterStatus(proto.Message): - r"""The status of a cluster and its instances. - - Attributes: - state (google.cloud.dataproc_v1.types.ClusterStatus.State): - Output only. The cluster's state. - detail (str): - Optional. Output only. Details of cluster's - state. - state_start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when this state was entered (see JSON - representation of - `Timestamp `__). - substate (google.cloud.dataproc_v1.types.ClusterStatus.Substate): - Output only. Additional state information - that includes status reported by the agent. - """ - class State(proto.Enum): - r"""The cluster state. - - Values: - UNKNOWN (0): - The cluster state is unknown. - CREATING (1): - The cluster is being created and set up. It - is not ready for use. - RUNNING (2): - The cluster is currently running and healthy. It is ready - for use. - - **Note:** The cluster state changes from "creating" to - "running" status after the master node(s), first two primary - worker nodes (and the last primary worker node if primary - workers > 2) are running. - ERROR (3): - The cluster encountered an error. It is not - ready for use. - ERROR_DUE_TO_UPDATE (9): - The cluster has encountered an error while - being updated. Jobs can be submitted to the - cluster, but the cluster cannot be updated. - DELETING (4): - The cluster is being deleted. It cannot be - used. - UPDATING (5): - The cluster is being updated. It continues to - accept and process jobs. - STOPPING (6): - The cluster is being stopped. It cannot be - used. - STOPPED (7): - The cluster is currently stopped. It is not - ready for use. - STARTING (8): - The cluster is being started. It is not ready - for use. - """ - UNKNOWN = 0 - CREATING = 1 - RUNNING = 2 - ERROR = 3 - ERROR_DUE_TO_UPDATE = 9 - DELETING = 4 - UPDATING = 5 - STOPPING = 6 - STOPPED = 7 - STARTING = 8 - - class Substate(proto.Enum): - r"""The cluster substate. - - Values: - UNSPECIFIED (0): - The cluster substate is unknown. - UNHEALTHY (1): - The cluster is known to be in an unhealthy - state (for example, critical daemons are not - running or HDFS capacity is exhausted). - - Applies to RUNNING state. - STALE_STATUS (2): - The agent-reported status is out of date (may - occur if Dataproc loses communication with - Agent). - Applies to RUNNING state. - """ - UNSPECIFIED = 0 - UNHEALTHY = 1 - STALE_STATUS = 2 - - state: State = proto.Field( - proto.ENUM, - number=1, - enum=State, - ) - detail: str = proto.Field( - proto.STRING, - number=2, - ) - state_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - substate: Substate = proto.Field( - proto.ENUM, - number=4, - enum=Substate, - ) - - -class SecurityConfig(proto.Message): - r"""Security related configuration, including encryption, - Kerberos, etc. - - Attributes: - kerberos_config (google.cloud.dataproc_v1.types.KerberosConfig): - Optional. Kerberos related configuration. - identity_config (google.cloud.dataproc_v1.types.IdentityConfig): - Optional. Identity related configuration, - including service account based secure - multi-tenancy user mappings. - """ - - kerberos_config: 'KerberosConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='KerberosConfig', - ) - identity_config: 'IdentityConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='IdentityConfig', - ) - - -class KerberosConfig(proto.Message): - r"""Specifies Kerberos related configuration. - - Attributes: - enable_kerberos (bool): - Optional. Flag to indicate whether to - Kerberize the cluster (default: - - false). Set this field to true to enable - Kerberos on a cluster. - root_principal_password_uri (str): - Optional. The Cloud Storage URI of a KMS - encrypted file containing the root principal - password. - kms_key_uri (str): - Optional. The uri of the KMS key used to - encrypt various sensitive files. - keystore_uri (str): - Optional. The Cloud Storage URI of the - keystore file used for SSL encryption. If not - provided, Dataproc will provide a self-signed - certificate. - truststore_uri (str): - Optional. The Cloud Storage URI of the - truststore file used for SSL encryption. If not - provided, Dataproc will provide a self-signed - certificate. - keystore_password_uri (str): - Optional. The Cloud Storage URI of a KMS - encrypted file containing the password to the - user provided keystore. For the self-signed - certificate, this password is generated by - Dataproc. - key_password_uri (str): - Optional. The Cloud Storage URI of a KMS - encrypted file containing the password to the - user provided key. For the self-signed - certificate, this password is generated by - Dataproc. - truststore_password_uri (str): - Optional. The Cloud Storage URI of a KMS - encrypted file containing the password to the - user provided truststore. For the self-signed - certificate, this password is generated by - Dataproc. - cross_realm_trust_realm (str): - Optional. The remote realm the Dataproc - on-cluster KDC will trust, should the user - enable cross realm trust. - cross_realm_trust_kdc (str): - Optional. The KDC (IP or hostname) for the - remote trusted realm in a cross realm trust - relationship. - cross_realm_trust_admin_server (str): - Optional. The admin server (IP or hostname) - for the remote trusted realm in a cross realm - trust relationship. - cross_realm_trust_shared_password_uri (str): - Optional. The Cloud Storage URI of a KMS - encrypted file containing the shared password - between the on-cluster Kerberos realm and the - remote trusted realm, in a cross realm trust - relationship. - kdc_db_key_uri (str): - Optional. The Cloud Storage URI of a KMS - encrypted file containing the master key of the - KDC database. - tgt_lifetime_hours (int): - Optional. The lifetime of the ticket granting - ticket, in hours. If not specified, or user - specifies 0, then default value 10 will be used. - realm (str): - Optional. The name of the on-cluster Kerberos - realm. If not specified, the uppercased domain - of hostnames will be the realm. - """ - - enable_kerberos: bool = proto.Field( - proto.BOOL, - number=1, - ) - root_principal_password_uri: str = proto.Field( - proto.STRING, - number=2, - ) - kms_key_uri: str = proto.Field( - proto.STRING, - number=3, - ) - keystore_uri: str = proto.Field( - proto.STRING, - number=4, - ) - truststore_uri: str = proto.Field( - proto.STRING, - number=5, - ) - keystore_password_uri: str = proto.Field( - proto.STRING, - number=6, - ) - key_password_uri: str = proto.Field( - proto.STRING, - number=7, - ) - truststore_password_uri: str = proto.Field( - proto.STRING, - number=8, - ) - cross_realm_trust_realm: str = proto.Field( - proto.STRING, - number=9, - ) - cross_realm_trust_kdc: str = proto.Field( - proto.STRING, - number=10, - ) - cross_realm_trust_admin_server: str = proto.Field( - proto.STRING, - number=11, - ) - cross_realm_trust_shared_password_uri: str = proto.Field( - proto.STRING, - number=12, - ) - kdc_db_key_uri: str = proto.Field( - proto.STRING, - number=13, - ) - tgt_lifetime_hours: int = proto.Field( - proto.INT32, - number=14, - ) - realm: str = proto.Field( - proto.STRING, - number=15, - ) - - -class IdentityConfig(proto.Message): - r"""Identity related configuration, including service account - based secure multi-tenancy user mappings. - - Attributes: - user_service_account_mapping (MutableMapping[str, str]): - Required. Map of user to service account. - """ - - user_service_account_mapping: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - - -class SoftwareConfig(proto.Message): - r"""Specifies the selection and config of software inside the - cluster. - - Attributes: - image_version (str): - Optional. The version of software inside the cluster. It - must be one of the supported `Dataproc - Versions `__, - such as "1.2" (including a subminor version, such as - "1.2.29"), or the `"preview" - version `__. - If unspecified, it defaults to the latest Debian version. - properties (MutableMapping[str, str]): - Optional. The properties to set on daemon config files. - - Property keys are specified in ``prefix:property`` format, - for example ``core:hadoop.tmp.dir``. The following are - supported prefixes and their mappings: - - - capacity-scheduler: ``capacity-scheduler.xml`` - - core: ``core-site.xml`` - - distcp: ``distcp-default.xml`` - - hdfs: ``hdfs-site.xml`` - - hive: ``hive-site.xml`` - - mapred: ``mapred-site.xml`` - - pig: ``pig.properties`` - - spark: ``spark-defaults.conf`` - - yarn: ``yarn-site.xml`` - - For more information, see `Cluster - properties `__. - optional_components (MutableSequence[google.cloud.dataproc_v1.types.Component]): - Optional. The set of components to activate - on the cluster. - """ - - image_version: str = proto.Field( - proto.STRING, - number=1, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - optional_components: MutableSequence[shared.Component] = proto.RepeatedField( - proto.ENUM, - number=3, - enum=shared.Component, - ) - - -class LifecycleConfig(proto.Message): - r"""Specifies the cluster auto-delete schedule configuration. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - idle_delete_ttl (google.protobuf.duration_pb2.Duration): - Optional. The duration to keep the cluster alive while - idling (when no jobs are running). Passing this threshold - will cause the cluster to be deleted. Minimum value is 5 - minutes; maximum value is 14 days (see JSON representation - of - `Duration `__). - auto_delete_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The time when cluster will be auto-deleted (see - JSON representation of - `Timestamp `__). - - This field is a member of `oneof`_ ``ttl``. - auto_delete_ttl (google.protobuf.duration_pb2.Duration): - Optional. The lifetime duration of cluster. The cluster will - be auto-deleted at the end of this period. Minimum value is - 10 minutes; maximum value is 14 days (see JSON - representation of - `Duration `__). - - This field is a member of `oneof`_ ``ttl``. - idle_start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when cluster became idle (most recent - job finished) and became eligible for deletion due to - idleness (see JSON representation of - `Timestamp `__). - """ - - idle_delete_ttl: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - auto_delete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - oneof='ttl', - message=timestamp_pb2.Timestamp, - ) - auto_delete_ttl: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - oneof='ttl', - message=duration_pb2.Duration, - ) - idle_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class MetastoreConfig(proto.Message): - r"""Specifies a Metastore configuration. - - Attributes: - dataproc_metastore_service (str): - Required. Resource name of an existing Dataproc Metastore - service. - - Example: - - - ``projects/[project_id]/locations/[dataproc_region]/services/[service-name]`` - """ - - dataproc_metastore_service: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ClusterMetrics(proto.Message): - r"""Contains cluster daemon metrics, such as HDFS and YARN stats. - - **Beta Feature**: This report is available for testing purposes - only. It may be changed before final release. - - Attributes: - hdfs_metrics (MutableMapping[str, int]): - The HDFS metrics. - yarn_metrics (MutableMapping[str, int]): - YARN metrics. - """ - - hdfs_metrics: MutableMapping[str, int] = proto.MapField( - proto.STRING, - proto.INT64, - number=1, - ) - yarn_metrics: MutableMapping[str, int] = proto.MapField( - proto.STRING, - proto.INT64, - number=2, - ) - - -class DataprocMetricConfig(proto.Message): - r"""Dataproc metric config. - - Attributes: - metrics (MutableSequence[google.cloud.dataproc_v1.types.DataprocMetricConfig.Metric]): - Required. Metrics sources to enable. - """ - class MetricSource(proto.Enum): - r"""A source for the collection of Dataproc OSS metrics (see [available - OSS metrics] - (https://cloud.google.com//dataproc/docs/guides/monitoring#available_oss_metrics)). - - Values: - METRIC_SOURCE_UNSPECIFIED (0): - Required unspecified metric source. - MONITORING_AGENT_DEFAULTS (1): - Default monitoring agent metrics. If this source is enabled, - Dataproc enables the monitoring agent in Compute Engine, and - collects default monitoring agent metrics, which are - published with an ``agent.googleapis.com`` prefix. - HDFS (2): - HDFS metric source. - SPARK (3): - Spark metric source. - YARN (4): - YARN metric source. - SPARK_HISTORY_SERVER (5): - Spark History Server metric source. - HIVESERVER2 (6): - Hiveserver2 metric source. - HIVEMETASTORE (7): - hivemetastore metric source - """ - METRIC_SOURCE_UNSPECIFIED = 0 - MONITORING_AGENT_DEFAULTS = 1 - HDFS = 2 - SPARK = 3 - YARN = 4 - SPARK_HISTORY_SERVER = 5 - HIVESERVER2 = 6 - HIVEMETASTORE = 7 - - class Metric(proto.Message): - r"""A Dataproc OSS metric. - - Attributes: - metric_source (google.cloud.dataproc_v1.types.DataprocMetricConfig.MetricSource): - Required. Default metrics are collected unless - ``metricOverrides`` are specified for the metric source (see - [Available OSS metrics] - (https://cloud.google.com/dataproc/docs/guides/monitoring#available_oss_metrics) - for more information). - metric_overrides (MutableSequence[str]): - Optional. Specify one or more [available OSS metrics] - (https://cloud.google.com/dataproc/docs/guides/monitoring#available_oss_metrics) - to collect for the metric course (for the ``SPARK`` metric - source, any [Spark metric] - (https://spark.apache.org/docs/latest/monitoring.html#metrics) - can be specified). - - Provide metrics in the following format: - METRIC_SOURCE:INSTANCE:GROUP:METRIC Use camelcase as - appropriate. - - Examples: - - :: - - yarn:ResourceManager:QueueMetrics:AppsCompleted - spark:driver:DAGScheduler:job.allJobs - sparkHistoryServer:JVM:Memory:NonHeapMemoryUsage.committed - hiveserver2:JVM:Memory:NonHeapMemoryUsage.used - - Notes: - - - Only the specified overridden metrics will be collected - for the metric source. For example, if one or more - ``spark:executive`` metrics are listed as metric - overrides, other ``SPARK`` metrics will not be collected. - The collection of the default metrics for other OSS - metric sources is unaffected. For example, if both - ``SPARK`` andd ``YARN`` metric sources are enabled, and - overrides are provided for Spark metrics only, all - default YARN metrics will be collected. - """ - - metric_source: 'DataprocMetricConfig.MetricSource' = proto.Field( - proto.ENUM, - number=1, - enum='DataprocMetricConfig.MetricSource', - ) - metric_overrides: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - metrics: MutableSequence[Metric] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Metric, - ) - - -class CreateClusterRequest(proto.Message): - r"""A request to create a cluster. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project that the cluster belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - cluster (google.cloud.dataproc_v1.types.Cluster): - Required. The cluster to create. - request_id (str): - Optional. A unique ID used to identify the request. If the - server receives two - `CreateClusterRequest `__\ s - with the same id, then the second request will be ignored - and the first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. - - It is recommended to always set this value to a - `UUID `__. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - action_on_failed_primary_workers (google.cloud.dataproc_v1.types.FailureAction): - Optional. Failure action when primary worker - creation fails. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=3, - ) - cluster: 'Cluster' = proto.Field( - proto.MESSAGE, - number=2, - message='Cluster', - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - action_on_failed_primary_workers: shared.FailureAction = proto.Field( - proto.ENUM, - number=5, - enum=shared.FailureAction, - ) - - -class UpdateClusterRequest(proto.Message): - r"""A request to update a cluster. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project the cluster belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - cluster_name (str): - Required. The cluster name. - cluster (google.cloud.dataproc_v1.types.Cluster): - Required. The changes to the cluster. - graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): - Optional. Timeout for graceful YARN decommissioning. - Graceful decommissioning allows removing nodes from the - cluster without interrupting jobs in progress. Timeout - specifies how long to wait for jobs in progress to finish - before forcefully removing nodes (and potentially - interrupting jobs). Default timeout is 0 (for forceful - decommission), and the maximum allowed timeout is 1 day. - (see JSON representation of - `Duration `__). - - Only supported on Dataproc image versions 1.2 and higher. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Specifies the path, relative to ``Cluster``, of - the field to update. For example, to change the number of - workers in a cluster to 5, the ``update_mask`` parameter - would be specified as - ``config.worker_config.num_instances``, and the ``PATCH`` - request body would specify the new value, as follows: - - :: - - { - "config":{ - "workerConfig":{ - "numInstances":"5" - } - } - } - - Similarly, to change the number of preemptible workers in a - cluster to 5, the ``update_mask`` parameter would be - ``config.secondary_worker_config.num_instances``, and the - ``PATCH`` request body would be set as follows: - - :: - - { - "config":{ - "secondaryWorkerConfig":{ - "numInstances":"5" - } - } - } - - Note: Currently, only the following fields can be updated: - - .. raw:: html - - - - - - - - - - - - - - - - - - - - - - - -
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or - change autoscaling policies
- request_id (str): - Optional. A unique ID used to identify the request. If the - server receives two - `UpdateClusterRequest `__\ s - with the same id, then the second request will be ignored - and the first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. - - It is recommended to always set this value to a - `UUID `__. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=5, - ) - cluster_name: str = proto.Field( - proto.STRING, - number=2, - ) - cluster: 'Cluster' = proto.Field( - proto.MESSAGE, - number=3, - message='Cluster', - ) - graceful_decommission_timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=6, - message=duration_pb2.Duration, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=4, - message=field_mask_pb2.FieldMask, - ) - request_id: str = proto.Field( - proto.STRING, - number=7, - ) - - -class StopClusterRequest(proto.Message): - r"""A request to stop a cluster. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project the cluster belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - cluster_name (str): - Required. The cluster name. - cluster_uuid (str): - Optional. Specifying the ``cluster_uuid`` means the RPC will - fail (with error NOT_FOUND) if a cluster with the specified - UUID does not exist. - request_id (str): - Optional. A unique ID used to identify the request. If the - server receives two - `StopClusterRequest `__\ s - with the same id, then the second request will be ignored - and the first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. - - Recommendation: Set this value to a - `UUID `__. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=2, - ) - cluster_name: str = proto.Field( - proto.STRING, - number=3, - ) - cluster_uuid: str = proto.Field( - proto.STRING, - number=4, - ) - request_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class StartClusterRequest(proto.Message): - r"""A request to start a cluster. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project the cluster belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - cluster_name (str): - Required. The cluster name. - cluster_uuid (str): - Optional. Specifying the ``cluster_uuid`` means the RPC will - fail (with error NOT_FOUND) if a cluster with the specified - UUID does not exist. - request_id (str): - Optional. A unique ID used to identify the request. If the - server receives two - `StartClusterRequest `__\ s - with the same id, then the second request will be ignored - and the first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. - - Recommendation: Set this value to a - `UUID `__. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=2, - ) - cluster_name: str = proto.Field( - proto.STRING, - number=3, - ) - cluster_uuid: str = proto.Field( - proto.STRING, - number=4, - ) - request_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class DeleteClusterRequest(proto.Message): - r"""A request to delete a cluster. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project that the cluster belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - cluster_name (str): - Required. The cluster name. - cluster_uuid (str): - Optional. Specifying the ``cluster_uuid`` means the RPC - should fail (with error NOT_FOUND) if cluster with specified - UUID does not exist. - request_id (str): - Optional. A unique ID used to identify the request. If the - server receives two - `DeleteClusterRequest `__\ s - with the same id, then the second request will be ignored - and the first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. - - It is recommended to always set this value to a - `UUID `__. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=3, - ) - cluster_name: str = proto.Field( - proto.STRING, - number=2, - ) - cluster_uuid: str = proto.Field( - proto.STRING, - number=4, - ) - request_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class GetClusterRequest(proto.Message): - r"""Request to get the resource representation for a cluster in a - project. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project that the cluster belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - cluster_name (str): - Required. The cluster name. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=3, - ) - cluster_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListClustersRequest(proto.Message): - r"""A request to list the clusters in a project. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project that the cluster belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - filter (str): - Optional. A filter constraining the clusters to list. - Filters are case-sensitive and have the following syntax: - - field = value [AND [field = value]] ... - - where **field** is one of ``status.state``, ``clusterName``, - or ``labels.[KEY]``, and ``[KEY]`` is a label key. **value** - can be ``*`` to match all values. ``status.state`` can be - one of the following: ``ACTIVE``, ``INACTIVE``, - ``CREATING``, ``RUNNING``, ``ERROR``, ``DELETING``, or - ``UPDATING``. ``ACTIVE`` contains the ``CREATING``, - ``UPDATING``, and ``RUNNING`` states. ``INACTIVE`` contains - the ``DELETING`` and ``ERROR`` states. ``clusterName`` is - the name of the cluster provided at creation time. Only the - logical ``AND`` operator is supported; space-separated items - are treated as having an implicit ``AND`` operator. - - Example filter: - - status.state = ACTIVE AND clusterName = mycluster AND - labels.env = staging AND labels.starred = \* - page_size (int): - Optional. The standard List page size. - page_token (str): - Optional. The standard List page token. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListClustersResponse(proto.Message): - r"""The list of all clusters in a project. - - Attributes: - clusters (MutableSequence[google.cloud.dataproc_v1.types.Cluster]): - Output only. The clusters in the project. - next_page_token (str): - Output only. This token is included in the response if there - are more results to fetch. To fetch additional results, - provide this value as the ``page_token`` in a subsequent - ``ListClustersRequest``. - """ - - @property - def raw_page(self): - return self - - clusters: MutableSequence['Cluster'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Cluster', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DiagnoseClusterRequest(proto.Message): - r"""A request to collect cluster diagnostic information. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project that the cluster belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - cluster_name (str): - Required. The cluster name. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=3, - ) - cluster_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DiagnoseClusterResults(proto.Message): - r"""The location of diagnostic output. - - Attributes: - output_uri (str): - Output only. The Cloud Storage URI of the - diagnostic output. The output report is a plain - text file with a summary of collected - diagnostics. - """ - - output_uri: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ReservationAffinity(proto.Message): - r"""Reservation Affinity for consuming Zonal reservation. - - Attributes: - consume_reservation_type (google.cloud.dataproc_v1.types.ReservationAffinity.Type): - Optional. Type of reservation to consume - key (str): - Optional. Corresponds to the label key of - reservation resource. - values (MutableSequence[str]): - Optional. Corresponds to the label values of - reservation resource. - """ - class Type(proto.Enum): - r"""Indicates whether to consume capacity from an reservation or - not. - - Values: - TYPE_UNSPECIFIED (0): - No description available. - NO_RESERVATION (1): - Do not consume from any allocated capacity. - ANY_RESERVATION (2): - Consume any reservation available. - SPECIFIC_RESERVATION (3): - Must consume from a specific reservation. - Must specify key value fields for specifying the - reservations. - """ - TYPE_UNSPECIFIED = 0 - NO_RESERVATION = 1 - ANY_RESERVATION = 2 - SPECIFIC_RESERVATION = 3 - - consume_reservation_type: Type = proto.Field( - proto.ENUM, - number=1, - enum=Type, - ) - key: str = proto.Field( - proto.STRING, - number=2, - ) - values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/jobs.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/types/jobs.py deleted file mode 100644 index 1b390de6..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/jobs.py +++ /dev/null @@ -1,1716 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataproc.v1', - manifest={ - 'LoggingConfig', - 'HadoopJob', - 'SparkJob', - 'PySparkJob', - 'QueryList', - 'HiveJob', - 'SparkSqlJob', - 'PigJob', - 'SparkRJob', - 'PrestoJob', - 'TrinoJob', - 'JobPlacement', - 'JobStatus', - 'JobReference', - 'YarnApplication', - 'Job', - 'DriverSchedulingConfig', - 'JobScheduling', - 'SubmitJobRequest', - 'JobMetadata', - 'GetJobRequest', - 'ListJobsRequest', - 'UpdateJobRequest', - 'ListJobsResponse', - 'CancelJobRequest', - 'DeleteJobRequest', - }, -) - - -class LoggingConfig(proto.Message): - r"""The runtime logging config of the job. - - Attributes: - driver_log_levels (MutableMapping[str, google.cloud.dataproc_v1.types.LoggingConfig.Level]): - The per-package log levels for the driver. - This may include "root" package name to - configure rootLogger. Examples: - - - 'com.google = FATAL' - - 'root = INFO' - - 'org.apache = DEBUG' - """ - class Level(proto.Enum): - r"""The Log4j level for job execution. When running an `Apache - Hive `__ job, Cloud Dataproc configures - the Hive client to an equivalent verbosity level. - - Values: - LEVEL_UNSPECIFIED (0): - Level is unspecified. Use default level for - log4j. - ALL (1): - Use ALL level for log4j. - TRACE (2): - Use TRACE level for log4j. - DEBUG (3): - Use DEBUG level for log4j. - INFO (4): - Use INFO level for log4j. - WARN (5): - Use WARN level for log4j. - ERROR (6): - Use ERROR level for log4j. - FATAL (7): - Use FATAL level for log4j. - OFF (8): - Turn off log4j. - """ - LEVEL_UNSPECIFIED = 0 - ALL = 1 - TRACE = 2 - DEBUG = 3 - INFO = 4 - WARN = 5 - ERROR = 6 - FATAL = 7 - OFF = 8 - - driver_log_levels: MutableMapping[str, Level] = proto.MapField( - proto.STRING, - proto.ENUM, - number=2, - enum=Level, - ) - - -class HadoopJob(proto.Message): - r"""A Dataproc job for running `Apache Hadoop - MapReduce `__ - jobs on `Apache Hadoop - YARN `__. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - main_jar_file_uri (str): - The HCFS URI of the jar file containing the - main class. Examples: - - 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' - 'hdfs:/tmp/test-samples/custom-wordcount.jar' - 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar' - - This field is a member of `oneof`_ ``driver``. - main_class (str): - The name of the driver's main class. The jar file containing - the class must be in the default CLASSPATH or specified in - ``jar_file_uris``. - - This field is a member of `oneof`_ ``driver``. - args (MutableSequence[str]): - Optional. The arguments to pass to the driver. Do not - include arguments, such as ``-libjars`` or ``-Dfoo=bar``, - that can be set as job properties, since a collision may - occur that causes an incorrect job submission. - jar_file_uris (MutableSequence[str]): - Optional. Jar file URIs to add to the - CLASSPATHs of the Hadoop driver and tasks. - file_uris (MutableSequence[str]): - Optional. HCFS (Hadoop Compatible Filesystem) - URIs of files to be copied to the working - directory of Hadoop drivers and distributed - tasks. Useful for naively parallel tasks. - archive_uris (MutableSequence[str]): - Optional. HCFS URIs of archives to be - extracted in the working directory of Hadoop - drivers and tasks. Supported file types: - - .jar, .tar, .tar.gz, .tgz, or .zip. - properties (MutableMapping[str, str]): - Optional. A mapping of property names to values, used to - configure Hadoop. Properties that conflict with values set - by the Dataproc API may be overwritten. Can include - properties set in ``/etc/hadoop/conf/*-site`` and classes in - user code. - logging_config (google.cloud.dataproc_v1.types.LoggingConfig): - Optional. The runtime log config for job - execution. - """ - - main_jar_file_uri: str = proto.Field( - proto.STRING, - number=1, - oneof='driver', - ) - main_class: str = proto.Field( - proto.STRING, - number=2, - oneof='driver', - ) - args: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - jar_file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - archive_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - logging_config: 'LoggingConfig' = proto.Field( - proto.MESSAGE, - number=8, - message='LoggingConfig', - ) - - -class SparkJob(proto.Message): - r"""A Dataproc job for running `Apache - Spark `__ applications on YARN. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - main_jar_file_uri (str): - The HCFS URI of the jar file that contains - the main class. - - This field is a member of `oneof`_ ``driver``. - main_class (str): - The name of the driver's main class. The jar file that - contains the class must be in the default CLASSPATH or - specified in ``jar_file_uris``. - - This field is a member of `oneof`_ ``driver``. - args (MutableSequence[str]): - Optional. The arguments to pass to the driver. Do not - include arguments, such as ``--conf``, that can be set as - job properties, since a collision may occur that causes an - incorrect job submission. - jar_file_uris (MutableSequence[str]): - Optional. HCFS URIs of jar files to add to - the CLASSPATHs of the Spark driver and tasks. - file_uris (MutableSequence[str]): - Optional. HCFS URIs of files to be placed in - the working directory of each executor. Useful - for naively parallel tasks. - archive_uris (MutableSequence[str]): - Optional. HCFS URIs of archives to be - extracted into the working directory of each - executor. Supported file types: - - .jar, .tar, .tar.gz, .tgz, and .zip. - properties (MutableMapping[str, str]): - Optional. A mapping of property names to - values, used to configure Spark. Properties that - conflict with values set by the Dataproc API may - be overwritten. Can include properties set in - /etc/spark/conf/spark-defaults.conf and classes - in user code. - logging_config (google.cloud.dataproc_v1.types.LoggingConfig): - Optional. The runtime log config for job - execution. - """ - - main_jar_file_uri: str = proto.Field( - proto.STRING, - number=1, - oneof='driver', - ) - main_class: str = proto.Field( - proto.STRING, - number=2, - oneof='driver', - ) - args: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - jar_file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - archive_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - logging_config: 'LoggingConfig' = proto.Field( - proto.MESSAGE, - number=8, - message='LoggingConfig', - ) - - -class PySparkJob(proto.Message): - r"""A Dataproc job for running `Apache - PySpark `__ - applications on YARN. - - Attributes: - main_python_file_uri (str): - Required. The HCFS URI of the main Python - file to use as the driver. Must be a .py file. - args (MutableSequence[str]): - Optional. The arguments to pass to the driver. Do not - include arguments, such as ``--conf``, that can be set as - job properties, since a collision may occur that causes an - incorrect job submission. - python_file_uris (MutableSequence[str]): - Optional. HCFS file URIs of Python files to - pass to the PySpark framework. Supported file - types: .py, .egg, and .zip. - jar_file_uris (MutableSequence[str]): - Optional. HCFS URIs of jar files to add to - the CLASSPATHs of the Python driver and tasks. - file_uris (MutableSequence[str]): - Optional. HCFS URIs of files to be placed in - the working directory of each executor. Useful - for naively parallel tasks. - archive_uris (MutableSequence[str]): - Optional. HCFS URIs of archives to be - extracted into the working directory of each - executor. Supported file types: - - .jar, .tar, .tar.gz, .tgz, and .zip. - properties (MutableMapping[str, str]): - Optional. A mapping of property names to - values, used to configure PySpark. Properties - that conflict with values set by the Dataproc - API may be overwritten. Can include properties - set in - /etc/spark/conf/spark-defaults.conf and classes - in user code. - logging_config (google.cloud.dataproc_v1.types.LoggingConfig): - Optional. The runtime log config for job - execution. - """ - - main_python_file_uri: str = proto.Field( - proto.STRING, - number=1, - ) - args: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - python_file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - jar_file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - archive_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - logging_config: 'LoggingConfig' = proto.Field( - proto.MESSAGE, - number=8, - message='LoggingConfig', - ) - - -class QueryList(proto.Message): - r"""A list of queries to run on a cluster. - - Attributes: - queries (MutableSequence[str]): - Required. The queries to execute. You do not need to end a - query expression with a semicolon. Multiple queries can be - specified in one string by separating each with a semicolon. - Here is an example of a Dataproc API snippet that uses a - QueryList to specify a HiveJob: - - :: - - "hiveJob": { - "queryList": { - "queries": [ - "query1", - "query2", - "query3;query4", - ] - } - } - """ - - queries: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class HiveJob(proto.Message): - r"""A Dataproc job for running `Apache - Hive `__ queries on YARN. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - query_file_uri (str): - The HCFS URI of the script that contains Hive - queries. - - This field is a member of `oneof`_ ``queries``. - query_list (google.cloud.dataproc_v1.types.QueryList): - A list of queries. - - This field is a member of `oneof`_ ``queries``. - continue_on_failure (bool): - Optional. Whether to continue executing queries if a query - fails. The default value is ``false``. Setting to ``true`` - can be useful when executing independent parallel queries. - script_variables (MutableMapping[str, str]): - Optional. Mapping of query variable names to values - (equivalent to the Hive command: ``SET name="value";``). - properties (MutableMapping[str, str]): - Optional. A mapping of property names and values, used to - configure Hive. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in ``/etc/hadoop/conf/*-site.xml``, - /etc/hive/conf/hive-site.xml, and classes in user code. - jar_file_uris (MutableSequence[str]): - Optional. HCFS URIs of jar files to add to - the CLASSPATH of the Hive server and Hadoop - MapReduce (MR) tasks. Can contain Hive SerDes - and UDFs. - """ - - query_file_uri: str = proto.Field( - proto.STRING, - number=1, - oneof='queries', - ) - query_list: 'QueryList' = proto.Field( - proto.MESSAGE, - number=2, - oneof='queries', - message='QueryList', - ) - continue_on_failure: bool = proto.Field( - proto.BOOL, - number=3, - ) - script_variables: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - jar_file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - - -class SparkSqlJob(proto.Message): - r"""A Dataproc job for running `Apache Spark - SQL `__ queries. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - query_file_uri (str): - The HCFS URI of the script that contains SQL - queries. - - This field is a member of `oneof`_ ``queries``. - query_list (google.cloud.dataproc_v1.types.QueryList): - A list of queries. - - This field is a member of `oneof`_ ``queries``. - script_variables (MutableMapping[str, str]): - Optional. Mapping of query variable names to values - (equivalent to the Spark SQL command: SET - ``name="value";``). - properties (MutableMapping[str, str]): - Optional. A mapping of property names to - values, used to configure Spark SQL's SparkConf. - Properties that conflict with values set by the - Dataproc API may be overwritten. - jar_file_uris (MutableSequence[str]): - Optional. HCFS URIs of jar files to be added - to the Spark CLASSPATH. - logging_config (google.cloud.dataproc_v1.types.LoggingConfig): - Optional. The runtime log config for job - execution. - """ - - query_file_uri: str = proto.Field( - proto.STRING, - number=1, - oneof='queries', - ) - query_list: 'QueryList' = proto.Field( - proto.MESSAGE, - number=2, - oneof='queries', - message='QueryList', - ) - script_variables: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - jar_file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=56, - ) - logging_config: 'LoggingConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='LoggingConfig', - ) - - -class PigJob(proto.Message): - r"""A Dataproc job for running `Apache Pig `__ - queries on YARN. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - query_file_uri (str): - The HCFS URI of the script that contains the - Pig queries. - - This field is a member of `oneof`_ ``queries``. - query_list (google.cloud.dataproc_v1.types.QueryList): - A list of queries. - - This field is a member of `oneof`_ ``queries``. - continue_on_failure (bool): - Optional. Whether to continue executing queries if a query - fails. The default value is ``false``. Setting to ``true`` - can be useful when executing independent parallel queries. - script_variables (MutableMapping[str, str]): - Optional. Mapping of query variable names to values - (equivalent to the Pig command: ``name=[value]``). - properties (MutableMapping[str, str]): - Optional. A mapping of property names to values, used to - configure Pig. Properties that conflict with values set by - the Dataproc API may be overwritten. Can include properties - set in ``/etc/hadoop/conf/*-site.xml``, - /etc/pig/conf/pig.properties, and classes in user code. - jar_file_uris (MutableSequence[str]): - Optional. HCFS URIs of jar files to add to - the CLASSPATH of the Pig Client and Hadoop - MapReduce (MR) tasks. Can contain Pig UDFs. - logging_config (google.cloud.dataproc_v1.types.LoggingConfig): - Optional. The runtime log config for job - execution. - """ - - query_file_uri: str = proto.Field( - proto.STRING, - number=1, - oneof='queries', - ) - query_list: 'QueryList' = proto.Field( - proto.MESSAGE, - number=2, - oneof='queries', - message='QueryList', - ) - continue_on_failure: bool = proto.Field( - proto.BOOL, - number=3, - ) - script_variables: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - jar_file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - logging_config: 'LoggingConfig' = proto.Field( - proto.MESSAGE, - number=7, - message='LoggingConfig', - ) - - -class SparkRJob(proto.Message): - r"""A Dataproc job for running `Apache - SparkR `__ - applications on YARN. - - Attributes: - main_r_file_uri (str): - Required. The HCFS URI of the main R file to - use as the driver. Must be a .R file. - args (MutableSequence[str]): - Optional. The arguments to pass to the driver. Do not - include arguments, such as ``--conf``, that can be set as - job properties, since a collision may occur that causes an - incorrect job submission. - file_uris (MutableSequence[str]): - Optional. HCFS URIs of files to be placed in - the working directory of each executor. Useful - for naively parallel tasks. - archive_uris (MutableSequence[str]): - Optional. HCFS URIs of archives to be - extracted into the working directory of each - executor. Supported file types: - - .jar, .tar, .tar.gz, .tgz, and .zip. - properties (MutableMapping[str, str]): - Optional. A mapping of property names to - values, used to configure SparkR. Properties - that conflict with values set by the Dataproc - API may be overwritten. Can include properties - set in - /etc/spark/conf/spark-defaults.conf and classes - in user code. - logging_config (google.cloud.dataproc_v1.types.LoggingConfig): - Optional. The runtime log config for job - execution. - """ - - main_r_file_uri: str = proto.Field( - proto.STRING, - number=1, - ) - args: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - file_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - archive_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - logging_config: 'LoggingConfig' = proto.Field( - proto.MESSAGE, - number=6, - message='LoggingConfig', - ) - - -class PrestoJob(proto.Message): - r"""A Dataproc job for running `Presto `__ - queries. **IMPORTANT**: The `Dataproc Presto Optional - Component `__ - must be enabled when the cluster is created to submit a Presto job - to the cluster. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - query_file_uri (str): - The HCFS URI of the script that contains SQL - queries. - - This field is a member of `oneof`_ ``queries``. - query_list (google.cloud.dataproc_v1.types.QueryList): - A list of queries. - - This field is a member of `oneof`_ ``queries``. - continue_on_failure (bool): - Optional. Whether to continue executing queries if a query - fails. The default value is ``false``. Setting to ``true`` - can be useful when executing independent parallel queries. - output_format (str): - Optional. The format in which query output - will be displayed. See the Presto documentation - for supported output formats - client_tags (MutableSequence[str]): - Optional. Presto client tags to attach to - this query - properties (MutableMapping[str, str]): - Optional. A mapping of property names to values. Used to set - Presto `session - properties `__ - Equivalent to using the --session flag in the Presto CLI - logging_config (google.cloud.dataproc_v1.types.LoggingConfig): - Optional. The runtime log config for job - execution. - """ - - query_file_uri: str = proto.Field( - proto.STRING, - number=1, - oneof='queries', - ) - query_list: 'QueryList' = proto.Field( - proto.MESSAGE, - number=2, - oneof='queries', - message='QueryList', - ) - continue_on_failure: bool = proto.Field( - proto.BOOL, - number=3, - ) - output_format: str = proto.Field( - proto.STRING, - number=4, - ) - client_tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - logging_config: 'LoggingConfig' = proto.Field( - proto.MESSAGE, - number=7, - message='LoggingConfig', - ) - - -class TrinoJob(proto.Message): - r"""A Dataproc job for running `Trino `__ queries. - **IMPORTANT**: The `Dataproc Trino Optional - Component `__ - must be enabled when the cluster is created to submit a Trino job to - the cluster. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - query_file_uri (str): - The HCFS URI of the script that contains SQL - queries. - - This field is a member of `oneof`_ ``queries``. - query_list (google.cloud.dataproc_v1.types.QueryList): - A list of queries. - - This field is a member of `oneof`_ ``queries``. - continue_on_failure (bool): - Optional. Whether to continue executing queries if a query - fails. The default value is ``false``. Setting to ``true`` - can be useful when executing independent parallel queries. - output_format (str): - Optional. The format in which query output - will be displayed. See the Trino documentation - for supported output formats - client_tags (MutableSequence[str]): - Optional. Trino client tags to attach to this - query - properties (MutableMapping[str, str]): - Optional. A mapping of property names to values. Used to set - Trino `session - properties `__ - Equivalent to using the --session flag in the Trino CLI - logging_config (google.cloud.dataproc_v1.types.LoggingConfig): - Optional. The runtime log config for job - execution. - """ - - query_file_uri: str = proto.Field( - proto.STRING, - number=1, - oneof='queries', - ) - query_list: 'QueryList' = proto.Field( - proto.MESSAGE, - number=2, - oneof='queries', - message='QueryList', - ) - continue_on_failure: bool = proto.Field( - proto.BOOL, - number=3, - ) - output_format: str = proto.Field( - proto.STRING, - number=4, - ) - client_tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - logging_config: 'LoggingConfig' = proto.Field( - proto.MESSAGE, - number=7, - message='LoggingConfig', - ) - - -class JobPlacement(proto.Message): - r"""Dataproc job config. - - Attributes: - cluster_name (str): - Required. The name of the cluster where the - job will be submitted. - cluster_uuid (str): - Output only. A cluster UUID generated by the - Dataproc service when the job is submitted. - cluster_labels (MutableMapping[str, str]): - Optional. Cluster labels to identify a - cluster where the job will be submitted. - """ - - cluster_name: str = proto.Field( - proto.STRING, - number=1, - ) - cluster_uuid: str = proto.Field( - proto.STRING, - number=2, - ) - cluster_labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class JobStatus(proto.Message): - r"""Dataproc job status. - - Attributes: - state (google.cloud.dataproc_v1.types.JobStatus.State): - Output only. A state message specifying the - overall job state. - details (str): - Optional. Output only. Job state details, - such as an error description if the state is - ERROR. - state_start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when this state was - entered. - substate (google.cloud.dataproc_v1.types.JobStatus.Substate): - Output only. Additional state information, - which includes status reported by the agent. - """ - class State(proto.Enum): - r"""The job state. - - Values: - STATE_UNSPECIFIED (0): - The job state is unknown. - PENDING (1): - The job is pending; it has been submitted, - but is not yet running. - SETUP_DONE (8): - Job has been received by the service and - completed initial setup; it will soon be - submitted to the cluster. - RUNNING (2): - The job is running on the cluster. - CANCEL_PENDING (3): - A CancelJob request has been received, but is - pending. - CANCEL_STARTED (7): - Transient in-flight resources have been - canceled, and the request to cancel the running - job has been issued to the cluster. - CANCELLED (4): - The job cancellation was successful. - DONE (5): - The job has completed successfully. - ERROR (6): - The job has completed, but encountered an - error. - ATTEMPT_FAILURE (9): - Job attempt has failed. The detail field - contains failure details for this attempt. - - Applies to restartable jobs only. - """ - STATE_UNSPECIFIED = 0 - PENDING = 1 - SETUP_DONE = 8 - RUNNING = 2 - CANCEL_PENDING = 3 - CANCEL_STARTED = 7 - CANCELLED = 4 - DONE = 5 - ERROR = 6 - ATTEMPT_FAILURE = 9 - - class Substate(proto.Enum): - r"""The job substate. - - Values: - UNSPECIFIED (0): - The job substate is unknown. - SUBMITTED (1): - The Job is submitted to the agent. - Applies to RUNNING state. - QUEUED (2): - The Job has been received and is awaiting - execution (it may be waiting for a condition to - be met). See the "details" field for the reason - for the delay. - - Applies to RUNNING state. - STALE_STATUS (3): - The agent-reported status is out of date, - which may be caused by a loss of communication - between the agent and Dataproc. If the agent - does not send a timely update, the job will - fail. - Applies to RUNNING state. - """ - UNSPECIFIED = 0 - SUBMITTED = 1 - QUEUED = 2 - STALE_STATUS = 3 - - state: State = proto.Field( - proto.ENUM, - number=1, - enum=State, - ) - details: str = proto.Field( - proto.STRING, - number=2, - ) - state_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - substate: Substate = proto.Field( - proto.ENUM, - number=7, - enum=Substate, - ) - - -class JobReference(proto.Message): - r"""Encapsulates the full scoping used to reference a job. - - Attributes: - project_id (str): - Optional. The ID of the Google Cloud Platform - project that the job belongs to. If specified, - must match the request project ID. - job_id (str): - Optional. The job ID, which must be unique within the - project. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), or hyphens (-). The maximum length is 100 - characters. - - If not specified by the caller, the job ID will be provided - by the server. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class YarnApplication(proto.Message): - r"""A YARN application created by a job. Application information is a - subset of - org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. - - **Beta Feature**: This report is available for testing purposes - only. It may be changed before final release. - - Attributes: - name (str): - Required. The application name. - state (google.cloud.dataproc_v1.types.YarnApplication.State): - Required. The application state. - progress (float): - Required. The numerical progress of the - application, from 1 to 100. - tracking_url (str): - Optional. The HTTP URL of the - ApplicationMaster, HistoryServer, or - TimelineServer that provides - application-specific information. The URL uses - the internal hostname, and requires a proxy - server for resolution and, possibly, access. - """ - class State(proto.Enum): - r"""The application state, corresponding to - YarnProtos.YarnApplicationStateProto. - - Values: - STATE_UNSPECIFIED (0): - Status is unspecified. - NEW (1): - Status is NEW. - NEW_SAVING (2): - Status is NEW_SAVING. - SUBMITTED (3): - Status is SUBMITTED. - ACCEPTED (4): - Status is ACCEPTED. - RUNNING (5): - Status is RUNNING. - FINISHED (6): - Status is FINISHED. - FAILED (7): - Status is FAILED. - KILLED (8): - Status is KILLED. - """ - STATE_UNSPECIFIED = 0 - NEW = 1 - NEW_SAVING = 2 - SUBMITTED = 3 - ACCEPTED = 4 - RUNNING = 5 - FINISHED = 6 - FAILED = 7 - KILLED = 8 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - state: State = proto.Field( - proto.ENUM, - number=2, - enum=State, - ) - progress: float = proto.Field( - proto.FLOAT, - number=3, - ) - tracking_url: str = proto.Field( - proto.STRING, - number=4, - ) - - -class Job(proto.Message): - r"""A Dataproc job resource. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - reference (google.cloud.dataproc_v1.types.JobReference): - Optional. The fully qualified reference to the job, which - can be used to obtain the equivalent REST path of the job - resource. If this property is not specified when a job is - created, the server generates a job_id. - placement (google.cloud.dataproc_v1.types.JobPlacement): - Required. Job information, including how, - when, and where to run the job. - hadoop_job (google.cloud.dataproc_v1.types.HadoopJob): - Optional. Job is a Hadoop job. - - This field is a member of `oneof`_ ``type_job``. - spark_job (google.cloud.dataproc_v1.types.SparkJob): - Optional. Job is a Spark job. - - This field is a member of `oneof`_ ``type_job``. - pyspark_job (google.cloud.dataproc_v1.types.PySparkJob): - Optional. Job is a PySpark job. - - This field is a member of `oneof`_ ``type_job``. - hive_job (google.cloud.dataproc_v1.types.HiveJob): - Optional. Job is a Hive job. - - This field is a member of `oneof`_ ``type_job``. - pig_job (google.cloud.dataproc_v1.types.PigJob): - Optional. Job is a Pig job. - - This field is a member of `oneof`_ ``type_job``. - spark_r_job (google.cloud.dataproc_v1.types.SparkRJob): - Optional. Job is a SparkR job. - - This field is a member of `oneof`_ ``type_job``. - spark_sql_job (google.cloud.dataproc_v1.types.SparkSqlJob): - Optional. Job is a SparkSql job. - - This field is a member of `oneof`_ ``type_job``. - presto_job (google.cloud.dataproc_v1.types.PrestoJob): - Optional. Job is a Presto job. - - This field is a member of `oneof`_ ``type_job``. - trino_job (google.cloud.dataproc_v1.types.TrinoJob): - Optional. Job is a Trino job. - - This field is a member of `oneof`_ ``type_job``. - status (google.cloud.dataproc_v1.types.JobStatus): - Output only. The job status. Additional application-specific - status information may be contained in the type_job and - yarn_applications fields. - status_history (MutableSequence[google.cloud.dataproc_v1.types.JobStatus]): - Output only. The previous job status. - yarn_applications (MutableSequence[google.cloud.dataproc_v1.types.YarnApplication]): - Output only. The collection of YARN applications spun up by - this job. - - **Beta** Feature: This report is available for testing - purposes only. It may be changed before final release. - driver_output_resource_uri (str): - Output only. A URI pointing to the location - of the stdout of the job's driver program. - driver_control_files_uri (str): - Output only. If present, the location of miscellaneous - control files which may be used as part of job setup and - handling. If not present, control files may be placed in the - same location as ``driver_output_uri``. - labels (MutableMapping[str, str]): - Optional. The labels to associate with this job. Label - **keys** must contain 1 to 63 characters, and must conform - to `RFC 1035 `__. - Label **values** may be empty, but, if present, must contain - 1 to 63 characters, and must conform to `RFC - 1035 `__. No more than - 32 labels can be associated with a job. - scheduling (google.cloud.dataproc_v1.types.JobScheduling): - Optional. Job scheduling configuration. - job_uuid (str): - Output only. A UUID that uniquely identifies a job within - the project over time. This is in contrast to a - user-settable reference.job_id that may be reused over time. - done (bool): - Output only. Indicates whether the job is completed. If the - value is ``false``, the job is still in progress. If - ``true``, the job is completed, and ``status.state`` field - will indicate if it was successful, failed, or cancelled. - driver_scheduling_config (google.cloud.dataproc_v1.types.DriverSchedulingConfig): - Optional. Driver scheduling configuration. - """ - - reference: 'JobReference' = proto.Field( - proto.MESSAGE, - number=1, - message='JobReference', - ) - placement: 'JobPlacement' = proto.Field( - proto.MESSAGE, - number=2, - message='JobPlacement', - ) - hadoop_job: 'HadoopJob' = proto.Field( - proto.MESSAGE, - number=3, - oneof='type_job', - message='HadoopJob', - ) - spark_job: 'SparkJob' = proto.Field( - proto.MESSAGE, - number=4, - oneof='type_job', - message='SparkJob', - ) - pyspark_job: 'PySparkJob' = proto.Field( - proto.MESSAGE, - number=5, - oneof='type_job', - message='PySparkJob', - ) - hive_job: 'HiveJob' = proto.Field( - proto.MESSAGE, - number=6, - oneof='type_job', - message='HiveJob', - ) - pig_job: 'PigJob' = proto.Field( - proto.MESSAGE, - number=7, - oneof='type_job', - message='PigJob', - ) - spark_r_job: 'SparkRJob' = proto.Field( - proto.MESSAGE, - number=21, - oneof='type_job', - message='SparkRJob', - ) - spark_sql_job: 'SparkSqlJob' = proto.Field( - proto.MESSAGE, - number=12, - oneof='type_job', - message='SparkSqlJob', - ) - presto_job: 'PrestoJob' = proto.Field( - proto.MESSAGE, - number=23, - oneof='type_job', - message='PrestoJob', - ) - trino_job: 'TrinoJob' = proto.Field( - proto.MESSAGE, - number=28, - oneof='type_job', - message='TrinoJob', - ) - status: 'JobStatus' = proto.Field( - proto.MESSAGE, - number=8, - message='JobStatus', - ) - status_history: MutableSequence['JobStatus'] = proto.RepeatedField( - proto.MESSAGE, - number=13, - message='JobStatus', - ) - yarn_applications: MutableSequence['YarnApplication'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='YarnApplication', - ) - driver_output_resource_uri: str = proto.Field( - proto.STRING, - number=17, - ) - driver_control_files_uri: str = proto.Field( - proto.STRING, - number=15, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=18, - ) - scheduling: 'JobScheduling' = proto.Field( - proto.MESSAGE, - number=20, - message='JobScheduling', - ) - job_uuid: str = proto.Field( - proto.STRING, - number=22, - ) - done: bool = proto.Field( - proto.BOOL, - number=24, - ) - driver_scheduling_config: 'DriverSchedulingConfig' = proto.Field( - proto.MESSAGE, - number=27, - message='DriverSchedulingConfig', - ) - - -class DriverSchedulingConfig(proto.Message): - r"""Driver scheduling configuration. - - Attributes: - memory_mb (int): - Required. The amount of memory in MB the - driver is requesting. - vcores (int): - Required. The number of vCPUs the driver is - requesting. - """ - - memory_mb: int = proto.Field( - proto.INT32, - number=1, - ) - vcores: int = proto.Field( - proto.INT32, - number=2, - ) - - -class JobScheduling(proto.Message): - r"""Job scheduling options. - - Attributes: - max_failures_per_hour (int): - Optional. Maximum number of times per hour a driver may be - restarted as a result of driver exiting with non-zero code - before job is reported failed. - - A job may be reported as thrashing if the driver exits with - a non-zero code four times within a 10-minute window. - - Maximum value is 10. - - **Note:** This restartable job option is not supported in - Dataproc [workflow templates] - (https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template). - max_failures_total (int): - Optional. Maximum total number of times a driver may be - restarted as a result of the driver exiting with a non-zero - code. After the maximum number is reached, the job will be - reported as failed. - - Maximum value is 240. - - **Note:** Currently, this restartable job option is not - supported in Dataproc `workflow - templates `__. - """ - - max_failures_per_hour: int = proto.Field( - proto.INT32, - number=1, - ) - max_failures_total: int = proto.Field( - proto.INT32, - number=2, - ) - - -class SubmitJobRequest(proto.Message): - r"""A request to submit a job. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project that the job belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - job (google.cloud.dataproc_v1.types.Job): - Required. The job resource. - request_id (str): - Optional. A unique id used to identify the request. If the - server receives two - `SubmitJobRequest `__\ s - with the same id, then the second request will be ignored - and the first [Job][google.cloud.dataproc.v1.Job] created - and stored in the backend is returned. - - It is recommended to always set this value to a - `UUID `__. - - The id must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=3, - ) - job: 'Job' = proto.Field( - proto.MESSAGE, - number=2, - message='Job', - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class JobMetadata(proto.Message): - r"""Job Operation metadata. - - Attributes: - job_id (str): - Output only. The job id. - status (google.cloud.dataproc_v1.types.JobStatus): - Output only. Most recent job status. - operation_type (str): - Output only. Operation type. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Job submission time. - """ - - job_id: str = proto.Field( - proto.STRING, - number=1, - ) - status: 'JobStatus' = proto.Field( - proto.MESSAGE, - number=2, - message='JobStatus', - ) - operation_type: str = proto.Field( - proto.STRING, - number=3, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class GetJobRequest(proto.Message): - r"""A request to get the resource representation for a job in a - project. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project that the job belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - job_id (str): - Required. The job ID. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=3, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListJobsRequest(proto.Message): - r"""A request to list jobs in a project. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project that the job belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - page_size (int): - Optional. The number of results to return in - each response. - page_token (str): - Optional. The page token, returned by a - previous call, to request the next page of - results. - cluster_name (str): - Optional. If set, the returned jobs list - includes only jobs that were submitted to the - named cluster. - job_state_matcher (google.cloud.dataproc_v1.types.ListJobsRequest.JobStateMatcher): - Optional. Specifies enumerated categories of jobs to list. - (default = match ALL jobs). - - If ``filter`` is provided, ``jobStateMatcher`` will be - ignored. - filter (str): - Optional. A filter constraining the jobs to list. Filters - are case-sensitive and have the following syntax: - - [field = value] AND [field [= value]] ... - - where **field** is ``status.state`` or ``labels.[KEY]``, and - ``[KEY]`` is a label key. **value** can be ``*`` to match - all values. ``status.state`` can be either ``ACTIVE`` or - ``NON_ACTIVE``. Only the logical ``AND`` operator is - supported; space-separated items are treated as having an - implicit ``AND`` operator. - - Example filter: - - status.state = ACTIVE AND labels.env = staging AND - labels.starred = \* - """ - class JobStateMatcher(proto.Enum): - r"""A matcher that specifies categories of job states. - - Values: - ALL (0): - Match all jobs, regardless of state. - ACTIVE (1): - Only match jobs in non-terminal states: PENDING, RUNNING, or - CANCEL_PENDING. - NON_ACTIVE (2): - Only match jobs in terminal states: - CANCELLED, DONE, or ERROR. - """ - ALL = 0 - ACTIVE = 1 - NON_ACTIVE = 2 - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=6, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - cluster_name: str = proto.Field( - proto.STRING, - number=4, - ) - job_state_matcher: JobStateMatcher = proto.Field( - proto.ENUM, - number=5, - enum=JobStateMatcher, - ) - filter: str = proto.Field( - proto.STRING, - number=7, - ) - - -class UpdateJobRequest(proto.Message): - r"""A request to update a job. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project that the job belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - job_id (str): - Required. The job ID. - job (google.cloud.dataproc_v1.types.Job): - Required. The changes to the job. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Specifies the path, relative to Job, of the field - to update. For example, to update the labels of a Job the - update_mask parameter would be specified as labels, and the - ``PATCH`` request body would specify the new value. Note: - Currently, labels is the only field that can be updated. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=2, - ) - job_id: str = proto.Field( - proto.STRING, - number=3, - ) - job: 'Job' = proto.Field( - proto.MESSAGE, - number=4, - message='Job', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=5, - message=field_mask_pb2.FieldMask, - ) - - -class ListJobsResponse(proto.Message): - r"""A list of jobs in a project. - - Attributes: - jobs (MutableSequence[google.cloud.dataproc_v1.types.Job]): - Output only. Jobs list. - next_page_token (str): - Optional. This token is included in the response if there - are more results to fetch. To fetch additional results, - provide this value as the ``page_token`` in a subsequent - ListJobsRequest. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence['Job'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Job', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CancelJobRequest(proto.Message): - r"""A request to cancel a job. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project that the job belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - job_id (str): - Required. The job ID. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=3, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteJobRequest(proto.Message): - r"""A request to delete a job. - - Attributes: - project_id (str): - Required. The ID of the Google Cloud Platform - project that the job belongs to. - region (str): - Required. The Dataproc region in which to - handle the request. - job_id (str): - Required. The job ID. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - region: str = proto.Field( - proto.STRING, - number=3, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/node_groups.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/types/node_groups.py deleted file mode 100644 index 81e3bd6e..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/node_groups.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataproc_v1.types import clusters -from google.protobuf import duration_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataproc.v1', - manifest={ - 'CreateNodeGroupRequest', - 'ResizeNodeGroupRequest', - 'GetNodeGroupRequest', - }, -) - - -class CreateNodeGroupRequest(proto.Message): - r"""A request to create a node group. - - Attributes: - parent (str): - Required. The parent resource where this node group will be - created. Format: - ``projects/{project}/regions/{region}/clusters/{cluster}`` - node_group (google.cloud.dataproc_v1.types.NodeGroup): - Required. The node group to create. - node_group_id (str): - Optional. An optional node group ID. Generated if not - specified. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). Cannot begin or end with - underscore or hyphen. Must consist of from 3 to 33 - characters. - request_id (str): - Optional. A unique ID used to identify the request. If the - server receives two - `CreateNodeGroupRequest `__ - with the same ID, the second request is ignored and the - first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. - - Recommendation: Set this value to a - `UUID `__. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - node_group: clusters.NodeGroup = proto.Field( - proto.MESSAGE, - number=2, - message=clusters.NodeGroup, - ) - node_group_id: str = proto.Field( - proto.STRING, - number=4, - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ResizeNodeGroupRequest(proto.Message): - r"""A request to resize a node group. - - Attributes: - name (str): - Required. The name of the node group to resize. Format: - ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` - size (int): - Required. The number of running instances for - the node group to maintain. The group adds or - removes instances to maintain the number of - instances specified by this parameter. - request_id (str): - Optional. A unique ID used to identify the request. If the - server receives two - `ResizeNodeGroupRequest `__ - with the same ID, the second request is ignored and the - first - [google.longrunning.Operation][google.longrunning.Operation] - created and stored in the backend is returned. - - Recommendation: Set this value to a - `UUID `__. - - The ID must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): - Optional. Timeout for graceful YARN decomissioning. - [Graceful decommissioning] - (https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/scaling-clusters#graceful_decommissioning) - allows the removal of nodes from the Compute Engine node - group without interrupting jobs in progress. This timeout - specifies how long to wait for jobs in progress to finish - before forcefully removing nodes (and potentially - interrupting jobs). Default timeout is 0 (for forceful - decommission), and the maximum allowed timeout is 1 day. - (see JSON representation of - `Duration `__). - - Only supported on Dataproc image versions 1.2 and higher. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - size: int = proto.Field( - proto.INT32, - number=2, - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - graceful_decommission_timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - - -class GetNodeGroupRequest(proto.Message): - r"""A request to get a node group . - - Attributes: - name (str): - Required. The name of the node group to retrieve. Format: - ``projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{nodeGroup}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/operations.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/types/operations.py deleted file mode 100644 index f93dafdc..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/operations.py +++ /dev/null @@ -1,314 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataproc.v1', - manifest={ - 'BatchOperationMetadata', - 'ClusterOperationStatus', - 'ClusterOperationMetadata', - 'NodeGroupOperationMetadata', - }, -) - - -class BatchOperationMetadata(proto.Message): - r"""Metadata describing the Batch operation. - - Attributes: - batch (str): - Name of the batch for the operation. - batch_uuid (str): - Batch UUID for the operation. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the operation was created. - done_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the operation finished. - operation_type (google.cloud.dataproc_v1.types.BatchOperationMetadata.BatchOperationType): - The operation type. - description (str): - Short description of the operation. - labels (MutableMapping[str, str]): - Labels associated with the operation. - warnings (MutableSequence[str]): - Warnings encountered during operation - execution. - """ - class BatchOperationType(proto.Enum): - r"""Operation type for Batch resources - - Values: - BATCH_OPERATION_TYPE_UNSPECIFIED (0): - Batch operation type is unknown. - BATCH (1): - Batch operation type. - """ - BATCH_OPERATION_TYPE_UNSPECIFIED = 0 - BATCH = 1 - - batch: str = proto.Field( - proto.STRING, - number=1, - ) - batch_uuid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - done_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - operation_type: BatchOperationType = proto.Field( - proto.ENUM, - number=6, - enum=BatchOperationType, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - warnings: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=9, - ) - - -class ClusterOperationStatus(proto.Message): - r"""The status of the operation. - - Attributes: - state (google.cloud.dataproc_v1.types.ClusterOperationStatus.State): - Output only. A message containing the - operation state. - inner_state (str): - Output only. A message containing the - detailed operation state. - details (str): - Output only. A message containing any - operation metadata details. - state_start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time this state was entered. - """ - class State(proto.Enum): - r"""The operation state. - - Values: - UNKNOWN (0): - Unused. - PENDING (1): - The operation has been created. - RUNNING (2): - The operation is running. - DONE (3): - The operation is done; either cancelled or - completed. - """ - UNKNOWN = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - - state: State = proto.Field( - proto.ENUM, - number=1, - enum=State, - ) - inner_state: str = proto.Field( - proto.STRING, - number=2, - ) - details: str = proto.Field( - proto.STRING, - number=3, - ) - state_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class ClusterOperationMetadata(proto.Message): - r"""Metadata describing the operation. - - Attributes: - cluster_name (str): - Output only. Name of the cluster for the - operation. - cluster_uuid (str): - Output only. Cluster UUID for the operation. - status (google.cloud.dataproc_v1.types.ClusterOperationStatus): - Output only. Current operation status. - status_history (MutableSequence[google.cloud.dataproc_v1.types.ClusterOperationStatus]): - Output only. The previous operation status. - operation_type (str): - Output only. The operation type. - description (str): - Output only. Short description of operation. - labels (MutableMapping[str, str]): - Output only. Labels associated with the - operation - warnings (MutableSequence[str]): - Output only. Errors encountered during - operation execution. - child_operation_ids (MutableSequence[str]): - Output only. Child operation ids - """ - - cluster_name: str = proto.Field( - proto.STRING, - number=7, - ) - cluster_uuid: str = proto.Field( - proto.STRING, - number=8, - ) - status: 'ClusterOperationStatus' = proto.Field( - proto.MESSAGE, - number=9, - message='ClusterOperationStatus', - ) - status_history: MutableSequence['ClusterOperationStatus'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='ClusterOperationStatus', - ) - operation_type: str = proto.Field( - proto.STRING, - number=11, - ) - description: str = proto.Field( - proto.STRING, - number=12, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=13, - ) - warnings: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=14, - ) - child_operation_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=15, - ) - - -class NodeGroupOperationMetadata(proto.Message): - r"""Metadata describing the node group operation. - - Attributes: - node_group_id (str): - Output only. Node group ID for the operation. - cluster_uuid (str): - Output only. Cluster UUID associated with the - node group operation. - status (google.cloud.dataproc_v1.types.ClusterOperationStatus): - Output only. Current operation status. - status_history (MutableSequence[google.cloud.dataproc_v1.types.ClusterOperationStatus]): - Output only. The previous operation status. - operation_type (google.cloud.dataproc_v1.types.NodeGroupOperationMetadata.NodeGroupOperationType): - The operation type. - description (str): - Output only. Short description of operation. - labels (MutableMapping[str, str]): - Output only. Labels associated with the - operation. - warnings (MutableSequence[str]): - Output only. Errors encountered during - operation execution. - """ - class NodeGroupOperationType(proto.Enum): - r"""Operation type for node group resources. - - Values: - NODE_GROUP_OPERATION_TYPE_UNSPECIFIED (0): - Node group operation type is unknown. - CREATE (1): - Create node group operation type. - UPDATE (2): - Update node group operation type. - DELETE (3): - Delete node group operation type. - RESIZE (4): - Resize node group operation type. - """ - NODE_GROUP_OPERATION_TYPE_UNSPECIFIED = 0 - CREATE = 1 - UPDATE = 2 - DELETE = 3 - RESIZE = 4 - - node_group_id: str = proto.Field( - proto.STRING, - number=1, - ) - cluster_uuid: str = proto.Field( - proto.STRING, - number=2, - ) - status: 'ClusterOperationStatus' = proto.Field( - proto.MESSAGE, - number=3, - message='ClusterOperationStatus', - ) - status_history: MutableSequence['ClusterOperationStatus'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ClusterOperationStatus', - ) - operation_type: NodeGroupOperationType = proto.Field( - proto.ENUM, - number=5, - enum=NodeGroupOperationType, - ) - description: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - warnings: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/shared.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/types/shared.py deleted file mode 100644 index ad611d94..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/shared.py +++ /dev/null @@ -1,788 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataproc.v1', - manifest={ - 'Component', - 'FailureAction', - 'RuntimeConfig', - 'EnvironmentConfig', - 'ExecutionConfig', - 'SparkHistoryServerConfig', - 'PeripheralsConfig', - 'RuntimeInfo', - 'UsageMetrics', - 'UsageSnapshot', - 'GkeClusterConfig', - 'KubernetesClusterConfig', - 'KubernetesSoftwareConfig', - 'GkeNodePoolTarget', - 'GkeNodePoolConfig', - }, -) - - -class Component(proto.Enum): - r"""Cluster components that can be activated. - - Values: - COMPONENT_UNSPECIFIED (0): - Unspecified component. Specifying this will - cause Cluster creation to fail. - ANACONDA (5): - The Anaconda python distribution. The - Anaconda component is not supported in the - Dataproc 2.0 - image. The 2.0 image is pre-installed with - Miniconda. - DOCKER (13): - Docker - DRUID (9): - The Druid query engine. (alpha) - FLINK (14): - Flink - HBASE (11): - HBase. (beta) - HIVE_WEBHCAT (3): - The Hive Web HCatalog (the REST service for - accessing HCatalog). - HUDI (18): - Hudi. - JUPYTER (1): - The Jupyter Notebook. - PRESTO (6): - The Presto query engine. - TRINO (17): - The Trino query engine. - RANGER (12): - The Ranger service. - SOLR (10): - The Solr service. - ZEPPELIN (4): - The Zeppelin notebook. - ZOOKEEPER (8): - The Zookeeper service. - """ - COMPONENT_UNSPECIFIED = 0 - ANACONDA = 5 - DOCKER = 13 - DRUID = 9 - FLINK = 14 - HBASE = 11 - HIVE_WEBHCAT = 3 - HUDI = 18 - JUPYTER = 1 - PRESTO = 6 - TRINO = 17 - RANGER = 12 - SOLR = 10 - ZEPPELIN = 4 - ZOOKEEPER = 8 - - -class FailureAction(proto.Enum): - r"""Actions in response to failure of a resource associated with - a cluster. - - Values: - FAILURE_ACTION_UNSPECIFIED (0): - When FailureAction is unspecified, failure action defaults - to NO_ACTION. - NO_ACTION (1): - Take no action on failure to create a cluster resource. - NO_ACTION is the default. - DELETE (2): - Delete the failed cluster resource. - """ - FAILURE_ACTION_UNSPECIFIED = 0 - NO_ACTION = 1 - DELETE = 2 - - -class RuntimeConfig(proto.Message): - r"""Runtime configuration for a workload. - - Attributes: - version (str): - Optional. Version of the batch runtime. - container_image (str): - Optional. Optional custom container image for - the job runtime environment. If not specified, a - default container image will be used. - properties (MutableMapping[str, str]): - Optional. A mapping of property names to - values, which are used to configure workload - execution. - """ - - version: str = proto.Field( - proto.STRING, - number=1, - ) - container_image: str = proto.Field( - proto.STRING, - number=2, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class EnvironmentConfig(proto.Message): - r"""Environment configuration for a workload. - - Attributes: - execution_config (google.cloud.dataproc_v1.types.ExecutionConfig): - Optional. Execution configuration for a - workload. - peripherals_config (google.cloud.dataproc_v1.types.PeripheralsConfig): - Optional. Peripherals configuration that - workload has access to. - """ - - execution_config: 'ExecutionConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='ExecutionConfig', - ) - peripherals_config: 'PeripheralsConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='PeripheralsConfig', - ) - - -class ExecutionConfig(proto.Message): - r"""Execution configuration for a workload. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - service_account (str): - Optional. Service account that used to - execute workload. - network_uri (str): - Optional. Network URI to connect workload to. - - This field is a member of `oneof`_ ``network``. - subnetwork_uri (str): - Optional. Subnetwork URI to connect workload - to. - - This field is a member of `oneof`_ ``network``. - network_tags (MutableSequence[str]): - Optional. Tags used for network traffic - control. - kms_key (str): - Optional. The Cloud KMS key to use for - encryption. - ttl (google.protobuf.duration_pb2.Duration): - Optional. The duration after which the workload will be - terminated. When the workload passes this ttl, it will be - unconditionally killed without waiting for ongoing work to - finish. Minimum value is 10 minutes; maximum value is 14 - days (see JSON representation of - `Duration `__). - If both ttl and idle_ttl are specified, the conditions are - treated as and OR: the workload will be terminated when it - has been idle for idle_ttl or when the ttl has passed, - whichever comes first. If ttl is not specified for a - session, it defaults to 24h. - staging_bucket (str): - Optional. A Cloud Storage bucket used to stage workload - dependencies, config files, and store workload output and - other ephemeral data, such as Spark history files. If you do - not specify a staging bucket, Cloud Dataproc will determine - a Cloud Storage location according to the region where your - workload is running, and then create and manage - project-level, per-location staging and temporary buckets. - **This field requires a Cloud Storage bucket name, not a - ``gs://...`` URI to a Cloud Storage bucket.** - """ - - service_account: str = proto.Field( - proto.STRING, - number=2, - ) - network_uri: str = proto.Field( - proto.STRING, - number=4, - oneof='network', - ) - subnetwork_uri: str = proto.Field( - proto.STRING, - number=5, - oneof='network', - ) - network_tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - kms_key: str = proto.Field( - proto.STRING, - number=7, - ) - ttl: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=9, - message=duration_pb2.Duration, - ) - staging_bucket: str = proto.Field( - proto.STRING, - number=10, - ) - - -class SparkHistoryServerConfig(proto.Message): - r"""Spark History Server configuration for the workload. - - Attributes: - dataproc_cluster (str): - Optional. Resource name of an existing Dataproc Cluster to - act as a Spark History Server for the workload. - - Example: - - - ``projects/[project_id]/regions/[region]/clusters/[cluster_name]`` - """ - - dataproc_cluster: str = proto.Field( - proto.STRING, - number=1, - ) - - -class PeripheralsConfig(proto.Message): - r"""Auxiliary services configuration for a workload. - - Attributes: - metastore_service (str): - Optional. Resource name of an existing Dataproc Metastore - service. - - Example: - - - ``projects/[project_id]/locations/[region]/services/[service_id]`` - spark_history_server_config (google.cloud.dataproc_v1.types.SparkHistoryServerConfig): - Optional. The Spark History Server - configuration for the workload. - """ - - metastore_service: str = proto.Field( - proto.STRING, - number=1, - ) - spark_history_server_config: 'SparkHistoryServerConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='SparkHistoryServerConfig', - ) - - -class RuntimeInfo(proto.Message): - r"""Runtime information about workload execution. - - Attributes: - endpoints (MutableMapping[str, str]): - Output only. Map of remote access endpoints - (such as web interfaces and APIs) to their URIs. - output_uri (str): - Output only. A URI pointing to the location - of the stdout and stderr of the workload. - diagnostic_output_uri (str): - Output only. A URI pointing to the location - of the diagnostics tarball. - approximate_usage (google.cloud.dataproc_v1.types.UsageMetrics): - Output only. Approximate workload resource usage calculated - after workload finishes (see [Dataproc Serverless pricing] - (https://cloud.google.com/dataproc-serverless/pricing)). - current_usage (google.cloud.dataproc_v1.types.UsageSnapshot): - Output only. Snapshot of current workload - resource usage. - """ - - endpoints: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - output_uri: str = proto.Field( - proto.STRING, - number=2, - ) - diagnostic_output_uri: str = proto.Field( - proto.STRING, - number=3, - ) - approximate_usage: 'UsageMetrics' = proto.Field( - proto.MESSAGE, - number=6, - message='UsageMetrics', - ) - current_usage: 'UsageSnapshot' = proto.Field( - proto.MESSAGE, - number=7, - message='UsageSnapshot', - ) - - -class UsageMetrics(proto.Message): - r"""Usage metrics represent approximate total resources consumed - by a workload. - - Attributes: - milli_dcu_seconds (int): - Optional. DCU (Dataproc Compute Units) usage in - (``milliDCU`` x ``seconds``) (see [Dataproc Serverless - pricing] - (https://cloud.google.com/dataproc-serverless/pricing)). - shuffle_storage_gb_seconds (int): - Optional. Shuffle storage usage in (``GB`` x ``seconds``) - (see [Dataproc Serverless pricing] - (https://cloud.google.com/dataproc-serverless/pricing)). - """ - - milli_dcu_seconds: int = proto.Field( - proto.INT64, - number=1, - ) - shuffle_storage_gb_seconds: int = proto.Field( - proto.INT64, - number=2, - ) - - -class UsageSnapshot(proto.Message): - r"""The usage snaphot represents the resources consumed by a - workload at a specified time. - - Attributes: - milli_dcu (int): - Optional. Milli (one-thousandth) Dataproc Compute Units - (DCUs) (see [Dataproc Serverless pricing] - (https://cloud.google.com/dataproc-serverless/pricing)). - shuffle_storage_gb (int): - Optional. Shuffle Storage in gigabytes (GB). (see [Dataproc - Serverless pricing] - (https://cloud.google.com/dataproc-serverless/pricing)) - snapshot_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The timestamp of the usage - snapshot. - """ - - milli_dcu: int = proto.Field( - proto.INT64, - number=1, - ) - shuffle_storage_gb: int = proto.Field( - proto.INT64, - number=2, - ) - snapshot_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class GkeClusterConfig(proto.Message): - r"""The cluster's GKE config. - - Attributes: - gke_cluster_target (str): - Optional. A target GKE cluster to deploy to. It must be in - the same project and region as the Dataproc cluster (the GKE - cluster can be zonal or regional). Format: - 'projects/{project}/locations/{location}/clusters/{cluster_id}' - node_pool_target (MutableSequence[google.cloud.dataproc_v1.types.GkeNodePoolTarget]): - Optional. GKE node pools where workloads will be scheduled. - At least one node pool must be assigned the ``DEFAULT`` - [GkeNodePoolTarget.Role][google.cloud.dataproc.v1.GkeNodePoolTarget.Role]. - If a ``GkeNodePoolTarget`` is not specified, Dataproc - constructs a ``DEFAULT`` ``GkeNodePoolTarget``. Each role - can be given to only one ``GkeNodePoolTarget``. All node - pools must have the same location settings. - """ - - gke_cluster_target: str = proto.Field( - proto.STRING, - number=2, - ) - node_pool_target: MutableSequence['GkeNodePoolTarget'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='GkeNodePoolTarget', - ) - - -class KubernetesClusterConfig(proto.Message): - r"""The configuration for running the Dataproc cluster on - Kubernetes. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - kubernetes_namespace (str): - Optional. A namespace within the Kubernetes - cluster to deploy into. If this namespace does - not exist, it is created. If it exists, Dataproc - verifies that another Dataproc VirtualCluster is - not installed into it. If not specified, the - name of the Dataproc Cluster is used. - gke_cluster_config (google.cloud.dataproc_v1.types.GkeClusterConfig): - Required. The configuration for running the - Dataproc cluster on GKE. - - This field is a member of `oneof`_ ``config``. - kubernetes_software_config (google.cloud.dataproc_v1.types.KubernetesSoftwareConfig): - Optional. The software configuration for this - Dataproc cluster running on Kubernetes. - """ - - kubernetes_namespace: str = proto.Field( - proto.STRING, - number=1, - ) - gke_cluster_config: 'GkeClusterConfig' = proto.Field( - proto.MESSAGE, - number=2, - oneof='config', - message='GkeClusterConfig', - ) - kubernetes_software_config: 'KubernetesSoftwareConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='KubernetesSoftwareConfig', - ) - - -class KubernetesSoftwareConfig(proto.Message): - r"""The software configuration for this Dataproc cluster running - on Kubernetes. - - Attributes: - component_version (MutableMapping[str, str]): - The components that should be installed in - this Dataproc cluster. The key must be a string - from the KubernetesComponent enumeration. The - value is the version of the software to be - installed. - At least one entry must be specified. - properties (MutableMapping[str, str]): - The properties to set on daemon config files. - - Property keys are specified in ``prefix:property`` format, - for example ``spark:spark.kubernetes.container.image``. The - following are supported prefixes and their mappings: - - - spark: ``spark-defaults.conf`` - - For more information, see `Cluster - properties `__. - """ - - component_version: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - properties: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - - -class GkeNodePoolTarget(proto.Message): - r"""GKE node pools that Dataproc workloads run on. - - Attributes: - node_pool (str): - Required. The target GKE node pool. Format: - 'projects/{project}/locations/{location}/clusters/{cluster}/nodePools/{node_pool}' - roles (MutableSequence[google.cloud.dataproc_v1.types.GkeNodePoolTarget.Role]): - Required. The roles associated with the GKE - node pool. - node_pool_config (google.cloud.dataproc_v1.types.GkeNodePoolConfig): - Input only. The configuration for the GKE - node pool. - If specified, Dataproc attempts to create a node - pool with the specified shape. If one with the - same name already exists, it is verified against - all specified fields. If a field differs, the - virtual cluster creation will fail. - - If omitted, any node pool with the specified - name is used. If a node pool with the specified - name does not exist, Dataproc create a node pool - with default values. - - This is an input only field. It will not be - returned by the API. - """ - class Role(proto.Enum): - r"""``Role`` specifies the tasks that will run on the node pool. Roles - can be specific to workloads. Exactly one - [GkeNodePoolTarget][google.cloud.dataproc.v1.GkeNodePoolTarget] - within the virtual cluster must have the ``DEFAULT`` role, which is - used to run all workloads that are not associated with a node pool. - - Values: - ROLE_UNSPECIFIED (0): - Role is unspecified. - DEFAULT (1): - At least one node pool must have the ``DEFAULT`` role. Work - assigned to a role that is not associated with a node pool - is assigned to the node pool with the ``DEFAULT`` role. For - example, work assigned to the ``CONTROLLER`` role will be - assigned to the node pool with the ``DEFAULT`` role if no - node pool has the ``CONTROLLER`` role. - CONTROLLER (2): - Run work associated with the Dataproc control - plane (for example, controllers and webhooks). - Very low resource requirements. - SPARK_DRIVER (3): - Run work associated with a Spark driver of a - job. - SPARK_EXECUTOR (4): - Run work associated with a Spark executor of - a job. - """ - ROLE_UNSPECIFIED = 0 - DEFAULT = 1 - CONTROLLER = 2 - SPARK_DRIVER = 3 - SPARK_EXECUTOR = 4 - - node_pool: str = proto.Field( - proto.STRING, - number=1, - ) - roles: MutableSequence[Role] = proto.RepeatedField( - proto.ENUM, - number=2, - enum=Role, - ) - node_pool_config: 'GkeNodePoolConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='GkeNodePoolConfig', - ) - - -class GkeNodePoolConfig(proto.Message): - r"""The configuration of a GKE node pool used by a `Dataproc-on-GKE - cluster `__. - - Attributes: - config (google.cloud.dataproc_v1.types.GkeNodePoolConfig.GkeNodeConfig): - Optional. The node pool configuration. - locations (MutableSequence[str]): - Optional. The list of Compute Engine - `zones `__ - where node pool nodes associated with a Dataproc on GKE - virtual cluster will be located. - - **Note:** All node pools associated with a virtual cluster - must be located in the same region as the virtual cluster, - and they must be located in the same zone within that - region. - - If a location is not specified during node pool creation, - Dataproc on GKE will choose the zone. - autoscaling (google.cloud.dataproc_v1.types.GkeNodePoolConfig.GkeNodePoolAutoscalingConfig): - Optional. The autoscaler configuration for - this node pool. The autoscaler is enabled only - when a valid configuration is present. - """ - - class GkeNodeConfig(proto.Message): - r"""Parameters that describe cluster nodes. - - Attributes: - machine_type (str): - Optional. The name of a Compute Engine `machine - type `__. - local_ssd_count (int): - Optional. The number of local SSD disks to attach to the - node, which is limited by the maximum number of disks - allowable per zone (see `Adding Local - SSDs `__). - preemptible (bool): - Optional. Whether the nodes are created as legacy - [preemptible VM instances] - (https://cloud.google.com/compute/docs/instances/preemptible). - Also see - [Spot][google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodeConfig.spot] - VMs, preemptible VM instances without a maximum lifetime. - Legacy and Spot preemptible nodes cannot be used in a node - pool with the ``CONTROLLER`` [role] - (/dataproc/docs/reference/rest/v1/projects.regions.clusters#role) - or in the DEFAULT node pool if the CONTROLLER role is not - assigned (the DEFAULT node pool will assume the CONTROLLER - role). - accelerators (MutableSequence[google.cloud.dataproc_v1.types.GkeNodePoolConfig.GkeNodePoolAcceleratorConfig]): - Optional. A list of `hardware - accelerators `__ - to attach to each node. - min_cpu_platform (str): - Optional. `Minimum CPU - platform `__ - to be used by this instance. The instance may be scheduled - on the specified or a newer CPU platform. Specify the - friendly names of CPU platforms, such as "Intel Haswell"\` - or Intel Sandy Bridge". - boot_disk_kms_key (str): - Optional. The [Customer Managed Encryption Key (CMEK)] - (https://cloud.google.com/kubernetes-engine/docs/how-to/using-cmek) - used to encrypt the boot disk attached to each node in the - node pool. Specify the key using the following format: - projects/KEY_PROJECT_ID/locations/LOCATION/keyRings/RING_NAME/cryptoKeys/KEY_NAME. - spot (bool): - Optional. Whether the nodes are created as [Spot VM - instances] - (https://cloud.google.com/compute/docs/instances/spot). Spot - VMs are the latest update to legacy [preemptible - VMs][google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodeConfig.preemptible]. - Spot VMs do not have a maximum lifetime. Legacy and Spot - preemptible nodes cannot be used in a node pool with the - ``CONTROLLER`` - `role `__ - or in the DEFAULT node pool if the CONTROLLER role is not - assigned (the DEFAULT node pool will assume the CONTROLLER - role). - """ - - machine_type: str = proto.Field( - proto.STRING, - number=1, - ) - local_ssd_count: int = proto.Field( - proto.INT32, - number=7, - ) - preemptible: bool = proto.Field( - proto.BOOL, - number=10, - ) - accelerators: MutableSequence['GkeNodePoolConfig.GkeNodePoolAcceleratorConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='GkeNodePoolConfig.GkeNodePoolAcceleratorConfig', - ) - min_cpu_platform: str = proto.Field( - proto.STRING, - number=13, - ) - boot_disk_kms_key: str = proto.Field( - proto.STRING, - number=23, - ) - spot: bool = proto.Field( - proto.BOOL, - number=32, - ) - - class GkeNodePoolAcceleratorConfig(proto.Message): - r"""A GkeNodeConfigAcceleratorConfig represents a Hardware - Accelerator request for a node pool. - - Attributes: - accelerator_count (int): - The number of accelerator cards exposed to an - instance. - accelerator_type (str): - The accelerator type resource namename (see - GPUs on Compute Engine). - gpu_partition_size (str): - Size of partitions to create on the GPU. Valid values are - described in the NVIDIA `mig user - guide `__. - """ - - accelerator_count: int = proto.Field( - proto.INT64, - number=1, - ) - accelerator_type: str = proto.Field( - proto.STRING, - number=2, - ) - gpu_partition_size: str = proto.Field( - proto.STRING, - number=3, - ) - - class GkeNodePoolAutoscalingConfig(proto.Message): - r"""GkeNodePoolAutoscaling contains information the cluster - autoscaler needs to adjust the size of the node pool to the - current cluster usage. - - Attributes: - min_node_count (int): - The minimum number of nodes in the node pool. Must be >= 0 - and <= max_node_count. - max_node_count (int): - The maximum number of nodes in the node pool. Must be >= - min_node_count, and must be > 0. **Note:** Quota must be - sufficient to scale up the cluster. - """ - - min_node_count: int = proto.Field( - proto.INT32, - number=2, - ) - max_node_count: int = proto.Field( - proto.INT32, - number=3, - ) - - config: GkeNodeConfig = proto.Field( - proto.MESSAGE, - number=2, - message=GkeNodeConfig, - ) - locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=13, - ) - autoscaling: GkeNodePoolAutoscalingConfig = proto.Field( - proto.MESSAGE, - number=4, - message=GkeNodePoolAutoscalingConfig, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/workflow_templates.py b/owl-bot-staging/v1/google/cloud/dataproc_v1/types/workflow_templates.py deleted file mode 100644 index f3140672..00000000 --- a/owl-bot-staging/v1/google/cloud/dataproc_v1/types/workflow_templates.py +++ /dev/null @@ -1,1145 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import jobs as gcd_jobs -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.dataproc.v1', - manifest={ - 'WorkflowTemplate', - 'WorkflowTemplatePlacement', - 'ManagedCluster', - 'ClusterSelector', - 'OrderedJob', - 'TemplateParameter', - 'ParameterValidation', - 'RegexValidation', - 'ValueValidation', - 'WorkflowMetadata', - 'ClusterOperation', - 'WorkflowGraph', - 'WorkflowNode', - 'CreateWorkflowTemplateRequest', - 'GetWorkflowTemplateRequest', - 'InstantiateWorkflowTemplateRequest', - 'InstantiateInlineWorkflowTemplateRequest', - 'UpdateWorkflowTemplateRequest', - 'ListWorkflowTemplatesRequest', - 'ListWorkflowTemplatesResponse', - 'DeleteWorkflowTemplateRequest', - }, -) - - -class WorkflowTemplate(proto.Message): - r"""A Dataproc workflow template resource. - - Attributes: - id (str): - - name (str): - Output only. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates``, the resource - name of the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates``, the - resource name of the template has the following format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - version (int): - Optional. Used to perform a consistent read-modify-write. - - This field should be left blank for a - ``CreateWorkflowTemplate`` request. It is required for an - ``UpdateWorkflowTemplate`` request, and must match the - current server version. A typical update template flow would - fetch the current template with a ``GetWorkflowTemplate`` - request, which will return the current template with the - ``version`` field filled in with the current server version. - The user updates other fields in the template, then returns - it as part of the ``UpdateWorkflowTemplate`` request. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time template was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time template was last - updated. - labels (MutableMapping[str, str]): - Optional. The labels to associate with this template. These - labels will be propagated to all jobs and clusters created - by the workflow instance. - - Label **keys** must contain 1 to 63 characters, and must - conform to `RFC - 1035 `__. - - Label **values** may be empty, but, if present, must contain - 1 to 63 characters, and must conform to `RFC - 1035 `__. - - No more than 32 labels can be associated with a template. - placement (google.cloud.dataproc_v1.types.WorkflowTemplatePlacement): - Required. WorkflowTemplate scheduling - information. - jobs (MutableSequence[google.cloud.dataproc_v1.types.OrderedJob]): - Required. The Directed Acyclic Graph of Jobs - to submit. - parameters (MutableSequence[google.cloud.dataproc_v1.types.TemplateParameter]): - Optional. Template parameters whose values - are substituted into the template. Values for - parameters must be provided when the template is - instantiated. - dag_timeout (google.protobuf.duration_pb2.Duration): - Optional. Timeout duration for the DAG of jobs, expressed in - seconds (see `JSON representation of - duration `__). - The timeout duration must be from 10 minutes ("600s") to 24 - hours ("86400s"). The timer begins when the first job is - submitted. If the workflow is running at the end of the - timeout period, any remaining jobs are cancelled, the - workflow is ended, and if the workflow was running on a - `managed - cluster `__, - the cluster is deleted. - """ - - id: str = proto.Field( - proto.STRING, - number=2, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - version: int = proto.Field( - proto.INT32, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - placement: 'WorkflowTemplatePlacement' = proto.Field( - proto.MESSAGE, - number=7, - message='WorkflowTemplatePlacement', - ) - jobs: MutableSequence['OrderedJob'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='OrderedJob', - ) - parameters: MutableSequence['TemplateParameter'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='TemplateParameter', - ) - dag_timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=10, - message=duration_pb2.Duration, - ) - - -class WorkflowTemplatePlacement(proto.Message): - r"""Specifies workflow execution target. - - Either ``managed_cluster`` or ``cluster_selector`` is required. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - managed_cluster (google.cloud.dataproc_v1.types.ManagedCluster): - A cluster that is managed by the workflow. - - This field is a member of `oneof`_ ``placement``. - cluster_selector (google.cloud.dataproc_v1.types.ClusterSelector): - Optional. A selector that chooses target - cluster for jobs based on metadata. - - The selector is evaluated at the time each job - is submitted. - - This field is a member of `oneof`_ ``placement``. - """ - - managed_cluster: 'ManagedCluster' = proto.Field( - proto.MESSAGE, - number=1, - oneof='placement', - message='ManagedCluster', - ) - cluster_selector: 'ClusterSelector' = proto.Field( - proto.MESSAGE, - number=2, - oneof='placement', - message='ClusterSelector', - ) - - -class ManagedCluster(proto.Message): - r"""Cluster that is managed by the workflow. - - Attributes: - cluster_name (str): - Required. The cluster name prefix. A unique - cluster name will be formed by appending a - random suffix. - The name must contain only lower-case letters - (a-z), numbers (0-9), and hyphens (-). Must - begin with a letter. Cannot begin or end with - hyphen. Must consist of between 2 and 35 - characters. - config (google.cloud.dataproc_v1.types.ClusterConfig): - Required. The cluster configuration. - labels (MutableMapping[str, str]): - Optional. The labels to associate with this cluster. - - Label keys must be between 1 and 63 characters long, and - must conform to the following PCRE regular expression: - [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} - - Label values must be between 1 and 63 characters long, and - must conform to the following PCRE regular expression: - [\p{Ll}\p{Lo}\p{N}_-]{0,63} - - No more than 32 labels can be associated with a given - cluster. - """ - - cluster_name: str = proto.Field( - proto.STRING, - number=2, - ) - config: clusters.ClusterConfig = proto.Field( - proto.MESSAGE, - number=3, - message=clusters.ClusterConfig, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - - -class ClusterSelector(proto.Message): - r"""A selector that chooses target cluster for jobs based on - metadata. - - Attributes: - zone (str): - Optional. The zone where workflow process - executes. This parameter does not affect the - selection of the cluster. - If unspecified, the zone of the first cluster - matching the selector is used. - cluster_labels (MutableMapping[str, str]): - Required. The cluster labels. Cluster must - have all labels to match. - """ - - zone: str = proto.Field( - proto.STRING, - number=1, - ) - cluster_labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - - -class OrderedJob(proto.Message): - r"""A job executed by the workflow. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - step_id (str): - Required. The step id. The id must be unique among all jobs - within the template. - - The step id is used as prefix for job id, as job - ``goog-dataproc-workflow-step-id`` label, and in - [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] - field from other steps. - - The id must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). Cannot begin or end with - underscore or hyphen. Must consist of between 3 and 50 - characters. - hadoop_job (google.cloud.dataproc_v1.types.HadoopJob): - Optional. Job is a Hadoop job. - - This field is a member of `oneof`_ ``job_type``. - spark_job (google.cloud.dataproc_v1.types.SparkJob): - Optional. Job is a Spark job. - - This field is a member of `oneof`_ ``job_type``. - pyspark_job (google.cloud.dataproc_v1.types.PySparkJob): - Optional. Job is a PySpark job. - - This field is a member of `oneof`_ ``job_type``. - hive_job (google.cloud.dataproc_v1.types.HiveJob): - Optional. Job is a Hive job. - - This field is a member of `oneof`_ ``job_type``. - pig_job (google.cloud.dataproc_v1.types.PigJob): - Optional. Job is a Pig job. - - This field is a member of `oneof`_ ``job_type``. - spark_r_job (google.cloud.dataproc_v1.types.SparkRJob): - Optional. Job is a SparkR job. - - This field is a member of `oneof`_ ``job_type``. - spark_sql_job (google.cloud.dataproc_v1.types.SparkSqlJob): - Optional. Job is a SparkSql job. - - This field is a member of `oneof`_ ``job_type``. - presto_job (google.cloud.dataproc_v1.types.PrestoJob): - Optional. Job is a Presto job. - - This field is a member of `oneof`_ ``job_type``. - labels (MutableMapping[str, str]): - Optional. The labels to associate with this job. - - Label keys must be between 1 and 63 characters long, and - must conform to the following regular expression: - [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} - - Label values must be between 1 and 63 characters long, and - must conform to the following regular expression: - [\p{Ll}\p{Lo}\p{N}_-]{0,63} - - No more than 32 labels can be associated with a given job. - scheduling (google.cloud.dataproc_v1.types.JobScheduling): - Optional. Job scheduling configuration. - prerequisite_step_ids (MutableSequence[str]): - Optional. The optional list of prerequisite job step_ids. If - not specified, the job will start at the beginning of - workflow. - """ - - step_id: str = proto.Field( - proto.STRING, - number=1, - ) - hadoop_job: gcd_jobs.HadoopJob = proto.Field( - proto.MESSAGE, - number=2, - oneof='job_type', - message=gcd_jobs.HadoopJob, - ) - spark_job: gcd_jobs.SparkJob = proto.Field( - proto.MESSAGE, - number=3, - oneof='job_type', - message=gcd_jobs.SparkJob, - ) - pyspark_job: gcd_jobs.PySparkJob = proto.Field( - proto.MESSAGE, - number=4, - oneof='job_type', - message=gcd_jobs.PySparkJob, - ) - hive_job: gcd_jobs.HiveJob = proto.Field( - proto.MESSAGE, - number=5, - oneof='job_type', - message=gcd_jobs.HiveJob, - ) - pig_job: gcd_jobs.PigJob = proto.Field( - proto.MESSAGE, - number=6, - oneof='job_type', - message=gcd_jobs.PigJob, - ) - spark_r_job: gcd_jobs.SparkRJob = proto.Field( - proto.MESSAGE, - number=11, - oneof='job_type', - message=gcd_jobs.SparkRJob, - ) - spark_sql_job: gcd_jobs.SparkSqlJob = proto.Field( - proto.MESSAGE, - number=7, - oneof='job_type', - message=gcd_jobs.SparkSqlJob, - ) - presto_job: gcd_jobs.PrestoJob = proto.Field( - proto.MESSAGE, - number=12, - oneof='job_type', - message=gcd_jobs.PrestoJob, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - scheduling: gcd_jobs.JobScheduling = proto.Field( - proto.MESSAGE, - number=9, - message=gcd_jobs.JobScheduling, - ) - prerequisite_step_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=10, - ) - - -class TemplateParameter(proto.Message): - r"""A configurable parameter that replaces one or more fields in - the template. Parameterizable fields: - - - Labels - - File uris - - Job properties - - Job arguments - - Script variables - - Main class (in HadoopJob and SparkJob) - - Zone (in ClusterSelector) - - Attributes: - name (str): - Required. Parameter name. The parameter name is used as the - key, and paired with the parameter value, which are passed - to the template when the template is instantiated. The name - must contain only capital letters (A-Z), numbers (0-9), and - underscores (_), and must not start with a number. The - maximum length is 40 characters. - fields (MutableSequence[str]): - Required. Paths to all fields that the parameter replaces. A - field is allowed to appear in at most one parameter's list - of field paths. - - A field path is similar in syntax to a - [google.protobuf.FieldMask][google.protobuf.FieldMask]. For - example, a field path that references the zone field of a - workflow template's cluster selector would be specified as - ``placement.clusterSelector.zone``. - - Also, field paths can reference fields using the following - syntax: - - - Values in maps can be referenced by key: - - - labels['key'] - - placement.clusterSelector.clusterLabels['key'] - - placement.managedCluster.labels['key'] - - placement.clusterSelector.clusterLabels['key'] - - jobs['step-id'].labels['key'] - - - Jobs in the jobs list can be referenced by step-id: - - - jobs['step-id'].hadoopJob.mainJarFileUri - - jobs['step-id'].hiveJob.queryFileUri - - jobs['step-id'].pySparkJob.mainPythonFileUri - - jobs['step-id'].hadoopJob.jarFileUris[0] - - jobs['step-id'].hadoopJob.archiveUris[0] - - jobs['step-id'].hadoopJob.fileUris[0] - - jobs['step-id'].pySparkJob.pythonFileUris[0] - - - Items in repeated fields can be referenced by a - zero-based index: - - - jobs['step-id'].sparkJob.args[0] - - - Other examples: - - - jobs['step-id'].hadoopJob.properties['key'] - - jobs['step-id'].hadoopJob.args[0] - - jobs['step-id'].hiveJob.scriptVariables['key'] - - jobs['step-id'].hadoopJob.mainJarFileUri - - placement.clusterSelector.zone - - It may not be possible to parameterize maps and repeated - fields in their entirety since only individual map values - and individual items in repeated fields can be referenced. - For example, the following field paths are invalid: - - - placement.clusterSelector.clusterLabels - - jobs['step-id'].sparkJob.args - description (str): - Optional. Brief description of the parameter. - Must not exceed 1024 characters. - validation (google.cloud.dataproc_v1.types.ParameterValidation): - Optional. Validation rules to be applied to - this parameter's value. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - fields: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - validation: 'ParameterValidation' = proto.Field( - proto.MESSAGE, - number=4, - message='ParameterValidation', - ) - - -class ParameterValidation(proto.Message): - r"""Configuration for parameter validation. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - regex (google.cloud.dataproc_v1.types.RegexValidation): - Validation based on regular expressions. - - This field is a member of `oneof`_ ``validation_type``. - values (google.cloud.dataproc_v1.types.ValueValidation): - Validation based on a list of allowed values. - - This field is a member of `oneof`_ ``validation_type``. - """ - - regex: 'RegexValidation' = proto.Field( - proto.MESSAGE, - number=1, - oneof='validation_type', - message='RegexValidation', - ) - values: 'ValueValidation' = proto.Field( - proto.MESSAGE, - number=2, - oneof='validation_type', - message='ValueValidation', - ) - - -class RegexValidation(proto.Message): - r"""Validation based on regular expressions. - - Attributes: - regexes (MutableSequence[str]): - Required. RE2 regular expressions used to - validate the parameter's value. The value must - match the regex in its entirety (substring - matches are not sufficient). - """ - - regexes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class ValueValidation(proto.Message): - r"""Validation based on a list of allowed values. - - Attributes: - values (MutableSequence[str]): - Required. List of allowed values for the - parameter. - """ - - values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class WorkflowMetadata(proto.Message): - r"""A Dataproc workflow template resource. - - Attributes: - template (str): - Output only. The resource name of the workflow template as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates``, the resource - name of the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates``, the - resource name of the template has the following format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - version (int): - Output only. The version of template at the - time of workflow instantiation. - create_cluster (google.cloud.dataproc_v1.types.ClusterOperation): - Output only. The create cluster operation - metadata. - graph (google.cloud.dataproc_v1.types.WorkflowGraph): - Output only. The workflow graph. - delete_cluster (google.cloud.dataproc_v1.types.ClusterOperation): - Output only. The delete cluster operation - metadata. - state (google.cloud.dataproc_v1.types.WorkflowMetadata.State): - Output only. The workflow state. - cluster_name (str): - Output only. The name of the target cluster. - parameters (MutableMapping[str, str]): - Map from parameter names to values that were - used for those parameters. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Workflow start time. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Workflow end time. - cluster_uuid (str): - Output only. The UUID of target cluster. - dag_timeout (google.protobuf.duration_pb2.Duration): - Output only. The timeout duration for the DAG of jobs, - expressed in seconds (see `JSON representation of - duration `__). - dag_start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. DAG start time, only set for workflows with - [dag_timeout][google.cloud.dataproc.v1.WorkflowMetadata.dag_timeout] - when DAG begins. - dag_end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. DAG end time, only set for workflows with - [dag_timeout][google.cloud.dataproc.v1.WorkflowMetadata.dag_timeout] - when DAG ends. - """ - class State(proto.Enum): - r"""The operation state. - - Values: - UNKNOWN (0): - Unused. - PENDING (1): - The operation has been created. - RUNNING (2): - The operation is running. - DONE (3): - The operation is done; either cancelled or - completed. - """ - UNKNOWN = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - - template: str = proto.Field( - proto.STRING, - number=1, - ) - version: int = proto.Field( - proto.INT32, - number=2, - ) - create_cluster: 'ClusterOperation' = proto.Field( - proto.MESSAGE, - number=3, - message='ClusterOperation', - ) - graph: 'WorkflowGraph' = proto.Field( - proto.MESSAGE, - number=4, - message='WorkflowGraph', - ) - delete_cluster: 'ClusterOperation' = proto.Field( - proto.MESSAGE, - number=5, - message='ClusterOperation', - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - cluster_name: str = proto.Field( - proto.STRING, - number=7, - ) - parameters: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - cluster_uuid: str = proto.Field( - proto.STRING, - number=11, - ) - dag_timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=12, - message=duration_pb2.Duration, - ) - dag_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp_pb2.Timestamp, - ) - dag_end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp_pb2.Timestamp, - ) - - -class ClusterOperation(proto.Message): - r"""The cluster operation triggered by a workflow. - - Attributes: - operation_id (str): - Output only. The id of the cluster operation. - error (str): - Output only. Error, if operation failed. - done (bool): - Output only. Indicates the operation is done. - """ - - operation_id: str = proto.Field( - proto.STRING, - number=1, - ) - error: str = proto.Field( - proto.STRING, - number=2, - ) - done: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class WorkflowGraph(proto.Message): - r"""The workflow graph. - - Attributes: - nodes (MutableSequence[google.cloud.dataproc_v1.types.WorkflowNode]): - Output only. The workflow nodes. - """ - - nodes: MutableSequence['WorkflowNode'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='WorkflowNode', - ) - - -class WorkflowNode(proto.Message): - r"""The workflow node. - - Attributes: - step_id (str): - Output only. The name of the node. - prerequisite_step_ids (MutableSequence[str]): - Output only. Node's prerequisite nodes. - job_id (str): - Output only. The job id; populated after the - node enters RUNNING state. - state (google.cloud.dataproc_v1.types.WorkflowNode.NodeState): - Output only. The node state. - error (str): - Output only. The error detail. - """ - class NodeState(proto.Enum): - r"""The workflow node state. - - Values: - NODE_STATE_UNSPECIFIED (0): - State is unspecified. - BLOCKED (1): - The node is awaiting prerequisite node to - finish. - RUNNABLE (2): - The node is runnable but not running. - RUNNING (3): - The node is running. - COMPLETED (4): - The node completed successfully. - FAILED (5): - The node failed. A node can be marked FAILED - because its ancestor or peer failed. - """ - NODE_STATE_UNSPECIFIED = 0 - BLOCKED = 1 - RUNNABLE = 2 - RUNNING = 3 - COMPLETED = 4 - FAILED = 5 - - step_id: str = proto.Field( - proto.STRING, - number=1, - ) - prerequisite_step_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - job_id: str = proto.Field( - proto.STRING, - number=3, - ) - state: NodeState = proto.Field( - proto.ENUM, - number=5, - enum=NodeState, - ) - error: str = proto.Field( - proto.STRING, - number=6, - ) - - -class CreateWorkflowTemplateRequest(proto.Message): - r"""A request to create a workflow template. - - Attributes: - parent (str): - Required. The resource name of the region or location, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates.create``, the - resource name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.create``, the - resource name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - template (google.cloud.dataproc_v1.types.WorkflowTemplate): - Required. The Dataproc workflow template to - create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - template: 'WorkflowTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='WorkflowTemplate', - ) - - -class GetWorkflowTemplateRequest(proto.Message): - r"""A request to fetch a workflow template. - - Attributes: - name (str): - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates.get``, the - resource name of the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates.get``, the - resource name of the template has the following format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - version (int): - Optional. The version of workflow template to - retrieve. Only previously instantiated versions - can be retrieved. - If unspecified, retrieves the current version. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - version: int = proto.Field( - proto.INT32, - number=2, - ) - - -class InstantiateWorkflowTemplateRequest(proto.Message): - r"""A request to instantiate a workflow template. - - Attributes: - name (str): - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates.instantiate``, - the resource name of the template has the following - format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates.instantiate``, - the resource name of the template has the following - format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - version (int): - Optional. The version of workflow template to - instantiate. If specified, the workflow will be - instantiated only if the current version of the - workflow template has the supplied version. - This option cannot be used to instantiate a - previous version of workflow template. - request_id (str): - Optional. A tag that prevents multiple concurrent workflow - instances with the same tag from running. This mitigates - risk of concurrent instances started due to retries. - - It is recommended to always set this value to a - `UUID `__. - - The tag must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - parameters (MutableMapping[str, str]): - Optional. Map from parameter names to values - that should be used for those parameters. Values - may not exceed 1000 characters. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - version: int = proto.Field( - proto.INT32, - number=2, - ) - request_id: str = proto.Field( - proto.STRING, - number=5, - ) - parameters: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - - -class InstantiateInlineWorkflowTemplateRequest(proto.Message): - r"""A request to instantiate an inline workflow template. - - Attributes: - parent (str): - Required. The resource name of the region or location, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For - ``projects.regions.workflowTemplates,instantiateinline``, - the resource name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For - ``projects.locations.workflowTemplates.instantiateinline``, - the resource name of the location has the following - format: ``projects/{project_id}/locations/{location}`` - template (google.cloud.dataproc_v1.types.WorkflowTemplate): - Required. The workflow template to - instantiate. - request_id (str): - Optional. A tag that prevents multiple concurrent workflow - instances with the same tag from running. This mitigates - risk of concurrent instances started due to retries. - - It is recommended to always set this value to a - `UUID `__. - - The tag must contain only letters (a-z, A-Z), numbers (0-9), - underscores (_), and hyphens (-). The maximum length is 40 - characters. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - template: 'WorkflowTemplate' = proto.Field( - proto.MESSAGE, - number=2, - message='WorkflowTemplate', - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class UpdateWorkflowTemplateRequest(proto.Message): - r"""A request to update a workflow template. - - Attributes: - template (google.cloud.dataproc_v1.types.WorkflowTemplate): - Required. The updated workflow template. - - The ``template.version`` field must match the current - version. - """ - - template: 'WorkflowTemplate' = proto.Field( - proto.MESSAGE, - number=1, - message='WorkflowTemplate', - ) - - -class ListWorkflowTemplatesRequest(proto.Message): - r"""A request to list workflow templates in a project. - - Attributes: - parent (str): - Required. The resource name of the region or location, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates,list``, the - resource name of the region has the following format: - ``projects/{project_id}/regions/{region}`` - - - For ``projects.locations.workflowTemplates.list``, the - resource name of the location has the following format: - ``projects/{project_id}/locations/{location}`` - page_size (int): - Optional. The maximum number of results to - return in each response. - page_token (str): - Optional. The page token, returned by a - previous call, to request the next page of - results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListWorkflowTemplatesResponse(proto.Message): - r"""A response to a request to list workflow templates in a - project. - - Attributes: - templates (MutableSequence[google.cloud.dataproc_v1.types.WorkflowTemplate]): - Output only. WorkflowTemplates list. - next_page_token (str): - Output only. This token is included in the response if there - are more results to fetch. To fetch additional results, - provide this value as the page_token in a subsequent - ListWorkflowTemplatesRequest. - """ - - @property - def raw_page(self): - return self - - templates: MutableSequence['WorkflowTemplate'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='WorkflowTemplate', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteWorkflowTemplateRequest(proto.Message): - r"""A request to delete a workflow template. - Currently started workflows will remain running. - - Attributes: - name (str): - Required. The resource name of the workflow template, as - described in - https://cloud.google.com/apis/design/resource_names. - - - For ``projects.regions.workflowTemplates.delete``, the - resource name of the template has the following format: - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` - - - For ``projects.locations.workflowTemplates.instantiate``, - the resource name of the template has the following - format: - ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` - version (int): - Optional. The version of workflow template to - delete. If specified, will only delete the - template if the current server version matches - specified version. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - version: int = proto.Field( - proto.INT32, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini deleted file mode 100644 index 574c5aed..00000000 --- a/owl-bot-staging/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py deleted file mode 100644 index eae7bcca..00000000 --- a/owl-bot-staging/v1/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/dataproc_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_async.py deleted file mode 100644 index d71538aa..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAutoscalingPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_create_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() - - # Initialize request argument(s) - policy = dataproc_v1.AutoscalingPolicy() - policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 - policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 - policy.worker_config.max_instances = 1389 - - request = dataproc_v1.CreateAutoscalingPolicyRequest( - parent="parent_value", - policy=policy, - ) - - # Make the request - response = await client.create_autoscaling_policy(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_sync.py deleted file mode 100644 index a960ad69..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAutoscalingPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_create_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() - - # Initialize request argument(s) - policy = dataproc_v1.AutoscalingPolicy() - policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 - policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 - policy.worker_config.max_instances = 1389 - - request = dataproc_v1.CreateAutoscalingPolicyRequest( - parent="parent_value", - policy=policy, - ) - - # Make the request - response = client.create_autoscaling_policy(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_async.py deleted file mode 100644 index f386da4f..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAutoscalingPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_delete_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteAutoscalingPolicyRequest( - name="name_value", - ) - - # Make the request - await client.delete_autoscaling_policy(request=request) - - -# [END dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_sync.py deleted file mode 100644 index e9878928..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAutoscalingPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_delete_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteAutoscalingPolicyRequest( - name="name_value", - ) - - # Make the request - client.delete_autoscaling_policy(request=request) - - -# [END dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_async.py deleted file mode 100644 index 512483c5..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAutoscalingPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_get_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetAutoscalingPolicyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_autoscaling_policy(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_sync.py deleted file mode 100644 index d3fa46eb..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAutoscalingPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_get_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.GetAutoscalingPolicyRequest( - name="name_value", - ) - - # Make the request - response = client.get_autoscaling_policy(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_async.py deleted file mode 100644 index f7f9e058..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAutoscalingPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_list_autoscaling_policies(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ListAutoscalingPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_autoscaling_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_sync.py deleted file mode 100644 index 1d834547..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAutoscalingPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_list_autoscaling_policies(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.ListAutoscalingPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_autoscaling_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_async.py deleted file mode 100644 index 55091bd7..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAutoscalingPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_update_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceAsyncClient() - - # Initialize request argument(s) - policy = dataproc_v1.AutoscalingPolicy() - policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 - policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 - policy.worker_config.max_instances = 1389 - - request = dataproc_v1.UpdateAutoscalingPolicyRequest( - policy=policy, - ) - - # Make the request - response = await client.update_autoscaling_policy(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_sync.py deleted file mode 100644 index 1446f9bd..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAutoscalingPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_update_autoscaling_policy(): - # Create a client - client = dataproc_v1.AutoscalingPolicyServiceClient() - - # Initialize request argument(s) - policy = dataproc_v1.AutoscalingPolicy() - policy.basic_algorithm.yarn_config.scale_up_factor = 0.1578 - policy.basic_algorithm.yarn_config.scale_down_factor = 0.1789 - policy.worker_config.max_instances = 1389 - - request = dataproc_v1.UpdateAutoscalingPolicyRequest( - policy=policy, - ) - - # Make the request - response = client.update_autoscaling_policy(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_create_batch_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_create_batch_async.py deleted file mode 100644 index c0c15b46..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_create_batch_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBatch -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_BatchController_CreateBatch_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_create_batch(): - # Create a client - client = dataproc_v1.BatchControllerAsyncClient() - - # Initialize request argument(s) - batch = dataproc_v1.Batch() - batch.pyspark_batch.main_python_file_uri = "main_python_file_uri_value" - - request = dataproc_v1.CreateBatchRequest( - parent="parent_value", - batch=batch, - ) - - # Make the request - operation = client.create_batch(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_BatchController_CreateBatch_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_create_batch_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_create_batch_sync.py deleted file mode 100644 index 504ab671..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_create_batch_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBatch -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_BatchController_CreateBatch_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_create_batch(): - # Create a client - client = dataproc_v1.BatchControllerClient() - - # Initialize request argument(s) - batch = dataproc_v1.Batch() - batch.pyspark_batch.main_python_file_uri = "main_python_file_uri_value" - - request = dataproc_v1.CreateBatchRequest( - parent="parent_value", - batch=batch, - ) - - # Make the request - operation = client.create_batch(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_BatchController_CreateBatch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_delete_batch_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_delete_batch_async.py deleted file mode 100644 index 16a69d70..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_delete_batch_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBatch -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_BatchController_DeleteBatch_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_delete_batch(): - # Create a client - client = dataproc_v1.BatchControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteBatchRequest( - name="name_value", - ) - - # Make the request - await client.delete_batch(request=request) - - -# [END dataproc_v1_generated_BatchController_DeleteBatch_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_delete_batch_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_delete_batch_sync.py deleted file mode 100644 index 2cbbe3e2..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_delete_batch_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBatch -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_BatchController_DeleteBatch_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_delete_batch(): - # Create a client - client = dataproc_v1.BatchControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteBatchRequest( - name="name_value", - ) - - # Make the request - client.delete_batch(request=request) - - -# [END dataproc_v1_generated_BatchController_DeleteBatch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_get_batch_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_get_batch_async.py deleted file mode 100644 index 4356b33b..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_get_batch_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBatch -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_BatchController_GetBatch_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_get_batch(): - # Create a client - client = dataproc_v1.BatchControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetBatchRequest( - name="name_value", - ) - - # Make the request - response = await client.get_batch(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_BatchController_GetBatch_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_get_batch_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_get_batch_sync.py deleted file mode 100644 index a31e7bd5..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_get_batch_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBatch -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_BatchController_GetBatch_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_get_batch(): - # Create a client - client = dataproc_v1.BatchControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.GetBatchRequest( - name="name_value", - ) - - # Make the request - response = client.get_batch(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_BatchController_GetBatch_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_list_batches_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_list_batches_async.py deleted file mode 100644 index 9d8fb10c..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_list_batches_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBatches -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_BatchController_ListBatches_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_list_batches(): - # Create a client - client = dataproc_v1.BatchControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ListBatchesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_batches(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataproc_v1_generated_BatchController_ListBatches_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_list_batches_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_list_batches_sync.py deleted file mode 100644 index 2fbe7e49..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_batch_controller_list_batches_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBatches -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_BatchController_ListBatches_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_list_batches(): - # Create a client - client = dataproc_v1.BatchControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.ListBatchesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_batches(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataproc_v1_generated_BatchController_ListBatches_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_create_cluster_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_create_cluster_async.py deleted file mode 100644 index 7e92b68e..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_create_cluster_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_CreateCluster_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_create_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - cluster = dataproc_v1.Cluster() - cluster.project_id = "project_id_value" - cluster.cluster_name = "cluster_name_value" - - request = dataproc_v1.CreateClusterRequest( - project_id="project_id_value", - region="region_value", - cluster=cluster, - ) - - # Make the request - operation = client.create_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_CreateCluster_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_create_cluster_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_create_cluster_sync.py deleted file mode 100644 index d0045cb5..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_create_cluster_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_CreateCluster_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_create_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - cluster = dataproc_v1.Cluster() - cluster.project_id = "project_id_value" - cluster.cluster_name = "cluster_name_value" - - request = dataproc_v1.CreateClusterRequest( - project_id="project_id_value", - region="region_value", - cluster=cluster, - ) - - # Make the request - operation = client.create_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_CreateCluster_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_delete_cluster_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_delete_cluster_async.py deleted file mode 100644 index 741c33f7..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_delete_cluster_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_DeleteCluster_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_delete_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.delete_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_DeleteCluster_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_delete_cluster_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_delete_cluster_sync.py deleted file mode 100644 index 56c1f6e2..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_delete_cluster_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_DeleteCluster_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_delete_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.delete_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_DeleteCluster_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_diagnose_cluster_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_diagnose_cluster_async.py deleted file mode 100644 index 477511c8..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_diagnose_cluster_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DiagnoseCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_DiagnoseCluster_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_diagnose_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DiagnoseClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.diagnose_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_DiagnoseCluster_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_diagnose_cluster_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_diagnose_cluster_sync.py deleted file mode 100644 index 659793c0..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_diagnose_cluster_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DiagnoseCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_DiagnoseCluster_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_diagnose_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.DiagnoseClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.diagnose_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_DiagnoseCluster_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_get_cluster_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_get_cluster_async.py deleted file mode 100644 index f99060cf..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_get_cluster_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_GetCluster_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_get_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - response = await client.get_cluster(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_GetCluster_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_get_cluster_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_get_cluster_sync.py deleted file mode 100644 index b5beefbd..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_get_cluster_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_GetCluster_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_get_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.GetClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - response = client.get_cluster(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_GetCluster_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_list_clusters_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_list_clusters_async.py deleted file mode 100644 index 1fc6871d..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_list_clusters_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListClusters -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_ListClusters_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_list_clusters(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ListClustersRequest( - project_id="project_id_value", - region="region_value", - ) - - # Make the request - page_result = client.list_clusters(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataproc_v1_generated_ClusterController_ListClusters_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_list_clusters_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_list_clusters_sync.py deleted file mode 100644 index ffcc2de4..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_list_clusters_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListClusters -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_ListClusters_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_list_clusters(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.ListClustersRequest( - project_id="project_id_value", - region="region_value", - ) - - # Make the request - page_result = client.list_clusters(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataproc_v1_generated_ClusterController_ListClusters_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_start_cluster_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_start_cluster_async.py deleted file mode 100644 index fe547f27..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_start_cluster_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_StartCluster_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_start_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.StartClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.start_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_StartCluster_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_start_cluster_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_start_cluster_sync.py deleted file mode 100644 index 191b90cc..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_start_cluster_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_StartCluster_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_start_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.StartClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.start_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_StartCluster_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_stop_cluster_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_stop_cluster_async.py deleted file mode 100644 index 3ade21a1..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_stop_cluster_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StopCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_StopCluster_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_stop_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.StopClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.stop_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_StopCluster_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_stop_cluster_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_stop_cluster_sync.py deleted file mode 100644 index 619a787f..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_stop_cluster_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StopCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_StopCluster_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_stop_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.StopClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - ) - - # Make the request - operation = client.stop_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_StopCluster_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_update_cluster_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_update_cluster_async.py deleted file mode 100644 index ac5afd6d..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_update_cluster_async.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_UpdateCluster_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_update_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerAsyncClient() - - # Initialize request argument(s) - cluster = dataproc_v1.Cluster() - cluster.project_id = "project_id_value" - cluster.cluster_name = "cluster_name_value" - - request = dataproc_v1.UpdateClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - cluster=cluster, - ) - - # Make the request - operation = client.update_cluster(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_UpdateCluster_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_update_cluster_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_update_cluster_sync.py deleted file mode 100644 index 0e755729..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_cluster_controller_update_cluster_sync.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateCluster -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_ClusterController_UpdateCluster_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_update_cluster(): - # Create a client - client = dataproc_v1.ClusterControllerClient() - - # Initialize request argument(s) - cluster = dataproc_v1.Cluster() - cluster.project_id = "project_id_value" - cluster.cluster_name = "cluster_name_value" - - request = dataproc_v1.UpdateClusterRequest( - project_id="project_id_value", - region="region_value", - cluster_name="cluster_name_value", - cluster=cluster, - ) - - # Make the request - operation = client.update_cluster(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_ClusterController_UpdateCluster_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_cancel_job_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_cancel_job_async.py deleted file mode 100644 index 3b93e936..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_cancel_job_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_CancelJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_cancel_job(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.CancelJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - response = await client.cancel_job(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_JobController_CancelJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_cancel_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_cancel_job_sync.py deleted file mode 100644 index 43dfe446..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_cancel_job_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_CancelJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_cancel_job(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.CancelJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - response = client.cancel_job(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_JobController_CancelJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_delete_job_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_delete_job_async.py deleted file mode 100644 index 0878f959..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_delete_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_DeleteJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_delete_job(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - await client.delete_job(request=request) - - -# [END dataproc_v1_generated_JobController_DeleteJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_delete_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_delete_job_sync.py deleted file mode 100644 index 7b31faec..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_delete_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_DeleteJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_delete_job(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - client.delete_job(request=request) - - -# [END dataproc_v1_generated_JobController_DeleteJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_get_job_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_get_job_async.py deleted file mode 100644 index 1f54e0b2..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_get_job_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_GetJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_get_job(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_JobController_GetJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_get_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_get_job_sync.py deleted file mode 100644 index 16cb7e12..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_get_job_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_GetJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_get_job(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.GetJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_JobController_GetJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_list_jobs_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_list_jobs_async.py deleted file mode 100644 index 4c4aa580..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_list_jobs_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_ListJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_list_jobs(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ListJobsRequest( - project_id="project_id_value", - region="region_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataproc_v1_generated_JobController_ListJobs_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_list_jobs_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_list_jobs_sync.py deleted file mode 100644 index 4e5dc322..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_list_jobs_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_ListJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_list_jobs(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.ListJobsRequest( - project_id="project_id_value", - region="region_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataproc_v1_generated_JobController_ListJobs_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_as_operation_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_as_operation_async.py deleted file mode 100644 index 5ae1918c..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_as_operation_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubmitJobAsOperation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_SubmitJobAsOperation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_submit_job_as_operation(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.SubmitJobRequest( - project_id="project_id_value", - region="region_value", - job=job, - ) - - # Make the request - operation = client.submit_job_as_operation(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_JobController_SubmitJobAsOperation_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_as_operation_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_as_operation_sync.py deleted file mode 100644 index 252bf2c0..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_as_operation_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubmitJobAsOperation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_SubmitJobAsOperation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_submit_job_as_operation(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.SubmitJobRequest( - project_id="project_id_value", - region="region_value", - job=job, - ) - - # Make the request - operation = client.submit_job_as_operation(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_JobController_SubmitJobAsOperation_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_async.py deleted file mode 100644 index 5443fa60..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubmitJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_SubmitJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_submit_job(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.SubmitJobRequest( - project_id="project_id_value", - region="region_value", - job=job, - ) - - # Make the request - response = await client.submit_job(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_JobController_SubmitJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_sync.py deleted file mode 100644 index 6e00ec94..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_submit_job_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubmitJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_SubmitJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_submit_job(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.SubmitJobRequest( - project_id="project_id_value", - region="region_value", - job=job, - ) - - # Make the request - response = client.submit_job(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_JobController_SubmitJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_update_job_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_update_job_async.py deleted file mode 100644 index 735d36c3..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_update_job_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_UpdateJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_update_job(): - # Create a client - client = dataproc_v1.JobControllerAsyncClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.UpdateJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - job=job, - ) - - # Make the request - response = await client.update_job(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_JobController_UpdateJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_update_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_update_job_sync.py deleted file mode 100644 index b14b5a6c..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_job_controller_update_job_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_JobController_UpdateJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_update_job(): - # Create a client - client = dataproc_v1.JobControllerClient() - - # Initialize request argument(s) - job = dataproc_v1.Job() - job.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - job.placement.cluster_name = "cluster_name_value" - - request = dataproc_v1.UpdateJobRequest( - project_id="project_id_value", - region="region_value", - job_id="job_id_value", - job=job, - ) - - # Make the request - response = client.update_job(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_JobController_UpdateJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_create_node_group_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_create_node_group_async.py deleted file mode 100644 index ac1398ab..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_create_node_group_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateNodeGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_NodeGroupController_CreateNodeGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_create_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerAsyncClient() - - # Initialize request argument(s) - node_group = dataproc_v1.NodeGroup() - node_group.roles = ['DRIVER'] - - request = dataproc_v1.CreateNodeGroupRequest( - parent="parent_value", - node_group=node_group, - ) - - # Make the request - operation = client.create_node_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_NodeGroupController_CreateNodeGroup_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_create_node_group_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_create_node_group_sync.py deleted file mode 100644 index 767a7856..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_create_node_group_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateNodeGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_NodeGroupController_CreateNodeGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_create_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerClient() - - # Initialize request argument(s) - node_group = dataproc_v1.NodeGroup() - node_group.roles = ['DRIVER'] - - request = dataproc_v1.CreateNodeGroupRequest( - parent="parent_value", - node_group=node_group, - ) - - # Make the request - operation = client.create_node_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_NodeGroupController_CreateNodeGroup_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_get_node_group_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_get_node_group_async.py deleted file mode 100644 index abd64756..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_get_node_group_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetNodeGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_NodeGroupController_GetNodeGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_get_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetNodeGroupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_node_group(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_NodeGroupController_GetNodeGroup_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_get_node_group_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_get_node_group_sync.py deleted file mode 100644 index 282626e0..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_get_node_group_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetNodeGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_NodeGroupController_GetNodeGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_get_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.GetNodeGroupRequest( - name="name_value", - ) - - # Make the request - response = client.get_node_group(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_NodeGroupController_GetNodeGroup_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_resize_node_group_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_resize_node_group_async.py deleted file mode 100644 index 6c8c62f0..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_resize_node_group_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ResizeNodeGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_NodeGroupController_ResizeNodeGroup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_resize_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ResizeNodeGroupRequest( - name="name_value", - size=443, - ) - - # Make the request - operation = client.resize_node_group(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_NodeGroupController_ResizeNodeGroup_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_resize_node_group_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_resize_node_group_sync.py deleted file mode 100644 index 7cfc4719..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_node_group_controller_resize_node_group_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ResizeNodeGroup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_NodeGroupController_ResizeNodeGroup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_resize_node_group(): - # Create a client - client = dataproc_v1.NodeGroupControllerClient() - - # Initialize request argument(s) - request = dataproc_v1.ResizeNodeGroupRequest( - name="name_value", - size=443, - ) - - # Make the request - operation = client.resize_node_group(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_NodeGroupController_ResizeNodeGroup_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_create_workflow_template_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_create_workflow_template_async.py deleted file mode 100644 index 4f31e4c2..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_create_workflow_template_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_create_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.CreateWorkflowTemplateRequest( - parent="parent_value", - template=template, - ) - - # Make the request - response = await client.create_workflow_template(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_create_workflow_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_create_workflow_template_sync.py deleted file mode 100644 index 30f3fd60..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_create_workflow_template_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_create_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.CreateWorkflowTemplateRequest( - parent="parent_value", - template=template, - ) - - # Make the request - response = client.create_workflow_template(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_delete_workflow_template_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_delete_workflow_template_async.py deleted file mode 100644 index 07a012f6..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_delete_workflow_template_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_delete_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_workflow_template(request=request) - - -# [END dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_delete_workflow_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_delete_workflow_template_sync.py deleted file mode 100644 index 8ebe7258..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_delete_workflow_template_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_delete_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.DeleteWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_workflow_template(request=request) - - -# [END dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_get_workflow_template_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_get_workflow_template_async.py deleted file mode 100644 index eb2d4fa6..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_get_workflow_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_get_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.GetWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_workflow_template(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_get_workflow_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_get_workflow_template_sync.py deleted file mode 100644 index 54b5da17..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_get_workflow_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_get_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.GetWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_workflow_template(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_async.py deleted file mode 100644 index ab2577b5..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_async.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InstantiateInlineWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_instantiate_inline_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.InstantiateInlineWorkflowTemplateRequest( - parent="parent_value", - template=template, - ) - - # Make the request - operation = client.instantiate_inline_workflow_template(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_sync.py deleted file mode 100644 index 3da97116..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_sync.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InstantiateInlineWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_instantiate_inline_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.InstantiateInlineWorkflowTemplateRequest( - parent="parent_value", - template=template, - ) - - # Make the request - operation = client.instantiate_inline_workflow_template(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_async.py deleted file mode 100644 index 4a65fead..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InstantiateWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_instantiate_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.InstantiateWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - operation = client.instantiate_workflow_template(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_sync.py deleted file mode 100644 index 976a24cc..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InstantiateWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_instantiate_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.InstantiateWorkflowTemplateRequest( - name="name_value", - ) - - # Make the request - operation = client.instantiate_workflow_template(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_list_workflow_templates_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_list_workflow_templates_async.py deleted file mode 100644 index 897995d8..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_list_workflow_templates_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListWorkflowTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_list_workflow_templates(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - request = dataproc_v1.ListWorkflowTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workflow_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_list_workflow_templates_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_list_workflow_templates_sync.py deleted file mode 100644 index 12ae0fc7..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_list_workflow_templates_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListWorkflowTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_list_workflow_templates(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - request = dataproc_v1.ListWorkflowTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workflow_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_update_workflow_template_async.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_update_workflow_template_async.py deleted file mode 100644 index de8ad3c2..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_update_workflow_template_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -async def sample_update_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceAsyncClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.UpdateWorkflowTemplateRequest( - template=template, - ) - - # Make the request - response = await client.update_workflow_template(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_update_workflow_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_update_workflow_template_sync.py deleted file mode 100644 index e29393bc..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/dataproc_v1_generated_workflow_template_service_update_workflow_template_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateWorkflowTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataproc - - -# [START dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataproc_v1 - - -def sample_update_workflow_template(): - # Create a client - client = dataproc_v1.WorkflowTemplateServiceClient() - - # Initialize request argument(s) - template = dataproc_v1.WorkflowTemplate() - template.id = "id_value" - template.placement.managed_cluster.cluster_name = "cluster_name_value" - template.jobs.hadoop_job.main_jar_file_uri = "main_jar_file_uri_value" - template.jobs.step_id = "step_id_value" - - request = dataproc_v1.UpdateWorkflowTemplateRequest( - template=template, - ) - - # Make the request - response = client.update_workflow_template(request=request) - - # Handle the response - print(response) - -# [END dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json deleted file mode 100644 index f1a48076..00000000 --- a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ /dev/null @@ -1,5721 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.dataproc.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-dataproc", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", - "shortName": "AutoscalingPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.create_autoscaling_policy", - "method": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.CreateAutoscalingPolicy", - "service": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", - "shortName": "AutoscalingPolicyService" - }, - "shortName": "CreateAutoscalingPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "policy", - "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", - "shortName": "create_autoscaling_policy" - }, - "description": "Sample for CreateAutoscalingPolicy", - "file": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", - "shortName": "AutoscalingPolicyServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.create_autoscaling_policy", - "method": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.CreateAutoscalingPolicy", - "service": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", - "shortName": "AutoscalingPolicyService" - }, - "shortName": "CreateAutoscalingPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CreateAutoscalingPolicyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "policy", - "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", - "shortName": "create_autoscaling_policy" - }, - "description": "Sample for CreateAutoscalingPolicy", - "file": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_autoscaling_policy_service_create_autoscaling_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", - "shortName": "AutoscalingPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.delete_autoscaling_policy", - "method": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.DeleteAutoscalingPolicy", - "service": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", - "shortName": "AutoscalingPolicyService" - }, - "shortName": "DeleteAutoscalingPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_autoscaling_policy" - }, - "description": "Sample for DeleteAutoscalingPolicy", - "file": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", - "shortName": "AutoscalingPolicyServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.delete_autoscaling_policy", - "method": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.DeleteAutoscalingPolicy", - "service": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", - "shortName": "AutoscalingPolicyService" - }, - "shortName": "DeleteAutoscalingPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DeleteAutoscalingPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_autoscaling_policy" - }, - "description": "Sample for DeleteAutoscalingPolicy", - "file": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_autoscaling_policy_service_delete_autoscaling_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", - "shortName": "AutoscalingPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.get_autoscaling_policy", - "method": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.GetAutoscalingPolicy", - "service": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", - "shortName": "AutoscalingPolicyService" - }, - "shortName": "GetAutoscalingPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", - "shortName": "get_autoscaling_policy" - }, - "description": "Sample for GetAutoscalingPolicy", - "file": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", - "shortName": "AutoscalingPolicyServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.get_autoscaling_policy", - "method": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.GetAutoscalingPolicy", - "service": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", - "shortName": "AutoscalingPolicyService" - }, - "shortName": "GetAutoscalingPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetAutoscalingPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", - "shortName": "get_autoscaling_policy" - }, - "description": "Sample for GetAutoscalingPolicy", - "file": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_autoscaling_policy_service_get_autoscaling_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", - "shortName": "AutoscalingPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.list_autoscaling_policies", - "method": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.ListAutoscalingPolicies", - "service": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", - "shortName": "AutoscalingPolicyService" - }, - "shortName": "ListAutoscalingPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesAsyncPager", - "shortName": "list_autoscaling_policies" - }, - "description": "Sample for ListAutoscalingPolicies", - "file": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", - "shortName": "AutoscalingPolicyServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.list_autoscaling_policies", - "method": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.ListAutoscalingPolicies", - "service": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", - "shortName": "AutoscalingPolicyService" - }, - "shortName": "ListAutoscalingPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ListAutoscalingPoliciesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers.ListAutoscalingPoliciesPager", - "shortName": "list_autoscaling_policies" - }, - "description": "Sample for ListAutoscalingPolicies", - "file": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_autoscaling_policy_service_list_autoscaling_policies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient", - "shortName": "AutoscalingPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceAsyncClient.update_autoscaling_policy", - "method": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.UpdateAutoscalingPolicy", - "service": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", - "shortName": "AutoscalingPolicyService" - }, - "shortName": "UpdateAutoscalingPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest" - }, - { - "name": "policy", - "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", - "shortName": "update_autoscaling_policy" - }, - "description": "Sample for UpdateAutoscalingPolicy", - "file": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient", - "shortName": "AutoscalingPolicyServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.AutoscalingPolicyServiceClient.update_autoscaling_policy", - "method": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.UpdateAutoscalingPolicy", - "service": { - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService", - "shortName": "AutoscalingPolicyService" - }, - "shortName": "UpdateAutoscalingPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.UpdateAutoscalingPolicyRequest" - }, - { - "name": "policy", - "type": "google.cloud.dataproc_v1.types.AutoscalingPolicy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.AutoscalingPolicy", - "shortName": "update_autoscaling_policy" - }, - "description": "Sample for UpdateAutoscalingPolicy", - "file": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_autoscaling_policy_service_update_autoscaling_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", - "shortName": "BatchControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.create_batch", - "method": { - "fullName": "google.cloud.dataproc.v1.BatchController.CreateBatch", - "service": { - "fullName": "google.cloud.dataproc.v1.BatchController", - "shortName": "BatchController" - }, - "shortName": "CreateBatch" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CreateBatchRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "batch", - "type": "google.cloud.dataproc_v1.types.Batch" - }, - { - "name": "batch_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_batch" - }, - "description": "Sample for CreateBatch", - "file": "dataproc_v1_generated_batch_controller_create_batch_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_BatchController_CreateBatch_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_batch_controller_create_batch_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.BatchControllerClient", - "shortName": "BatchControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.BatchControllerClient.create_batch", - "method": { - "fullName": "google.cloud.dataproc.v1.BatchController.CreateBatch", - "service": { - "fullName": "google.cloud.dataproc.v1.BatchController", - "shortName": "BatchController" - }, - "shortName": "CreateBatch" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CreateBatchRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "batch", - "type": "google.cloud.dataproc_v1.types.Batch" - }, - { - "name": "batch_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_batch" - }, - "description": "Sample for CreateBatch", - "file": "dataproc_v1_generated_batch_controller_create_batch_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_BatchController_CreateBatch_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_batch_controller_create_batch_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", - "shortName": "BatchControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.delete_batch", - "method": { - "fullName": "google.cloud.dataproc.v1.BatchController.DeleteBatch", - "service": { - "fullName": "google.cloud.dataproc.v1.BatchController", - "shortName": "BatchController" - }, - "shortName": "DeleteBatch" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DeleteBatchRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_batch" - }, - "description": "Sample for DeleteBatch", - "file": "dataproc_v1_generated_batch_controller_delete_batch_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_BatchController_DeleteBatch_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_batch_controller_delete_batch_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.BatchControllerClient", - "shortName": "BatchControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.BatchControllerClient.delete_batch", - "method": { - "fullName": "google.cloud.dataproc.v1.BatchController.DeleteBatch", - "service": { - "fullName": "google.cloud.dataproc.v1.BatchController", - "shortName": "BatchController" - }, - "shortName": "DeleteBatch" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DeleteBatchRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_batch" - }, - "description": "Sample for DeleteBatch", - "file": "dataproc_v1_generated_batch_controller_delete_batch_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_BatchController_DeleteBatch_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_batch_controller_delete_batch_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", - "shortName": "BatchControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.get_batch", - "method": { - "fullName": "google.cloud.dataproc.v1.BatchController.GetBatch", - "service": { - "fullName": "google.cloud.dataproc.v1.BatchController", - "shortName": "BatchController" - }, - "shortName": "GetBatch" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetBatchRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Batch", - "shortName": "get_batch" - }, - "description": "Sample for GetBatch", - "file": "dataproc_v1_generated_batch_controller_get_batch_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_BatchController_GetBatch_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_batch_controller_get_batch_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.BatchControllerClient", - "shortName": "BatchControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.BatchControllerClient.get_batch", - "method": { - "fullName": "google.cloud.dataproc.v1.BatchController.GetBatch", - "service": { - "fullName": "google.cloud.dataproc.v1.BatchController", - "shortName": "BatchController" - }, - "shortName": "GetBatch" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetBatchRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Batch", - "shortName": "get_batch" - }, - "description": "Sample for GetBatch", - "file": "dataproc_v1_generated_batch_controller_get_batch_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_BatchController_GetBatch_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_batch_controller_get_batch_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient", - "shortName": "BatchControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.BatchControllerAsyncClient.list_batches", - "method": { - "fullName": "google.cloud.dataproc.v1.BatchController.ListBatches", - "service": { - "fullName": "google.cloud.dataproc.v1.BatchController", - "shortName": "BatchController" - }, - "shortName": "ListBatches" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ListBatchesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.services.batch_controller.pagers.ListBatchesAsyncPager", - "shortName": "list_batches" - }, - "description": "Sample for ListBatches", - "file": "dataproc_v1_generated_batch_controller_list_batches_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_BatchController_ListBatches_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_batch_controller_list_batches_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.BatchControllerClient", - "shortName": "BatchControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.BatchControllerClient.list_batches", - "method": { - "fullName": "google.cloud.dataproc.v1.BatchController.ListBatches", - "service": { - "fullName": "google.cloud.dataproc.v1.BatchController", - "shortName": "BatchController" - }, - "shortName": "ListBatches" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ListBatchesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.services.batch_controller.pagers.ListBatchesPager", - "shortName": "list_batches" - }, - "description": "Sample for ListBatches", - "file": "dataproc_v1_generated_batch_controller_list_batches_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_BatchController_ListBatches_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_batch_controller_list_batches_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", - "shortName": "ClusterControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.create_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.CreateCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "CreateCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CreateClusterRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "cluster", - "type": "google.cloud.dataproc_v1.types.Cluster" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_cluster" - }, - "description": "Sample for CreateCluster", - "file": "dataproc_v1_generated_cluster_controller_create_cluster_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_CreateCluster_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_create_cluster_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", - "shortName": "ClusterControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.create_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.CreateCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "CreateCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CreateClusterRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "cluster", - "type": "google.cloud.dataproc_v1.types.Cluster" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_cluster" - }, - "description": "Sample for CreateCluster", - "file": "dataproc_v1_generated_cluster_controller_create_cluster_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_CreateCluster_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_create_cluster_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", - "shortName": "ClusterControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.delete_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.DeleteCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "DeleteCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DeleteClusterRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "cluster_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_cluster" - }, - "description": "Sample for DeleteCluster", - "file": "dataproc_v1_generated_cluster_controller_delete_cluster_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_DeleteCluster_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_delete_cluster_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", - "shortName": "ClusterControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.delete_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.DeleteCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "DeleteCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DeleteClusterRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "cluster_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_cluster" - }, - "description": "Sample for DeleteCluster", - "file": "dataproc_v1_generated_cluster_controller_delete_cluster_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_DeleteCluster_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_delete_cluster_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", - "shortName": "ClusterControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.diagnose_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.DiagnoseCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "DiagnoseCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DiagnoseClusterRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "cluster_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "diagnose_cluster" - }, - "description": "Sample for DiagnoseCluster", - "file": "dataproc_v1_generated_cluster_controller_diagnose_cluster_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_DiagnoseCluster_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_diagnose_cluster_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", - "shortName": "ClusterControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.diagnose_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.DiagnoseCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "DiagnoseCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DiagnoseClusterRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "cluster_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "diagnose_cluster" - }, - "description": "Sample for DiagnoseCluster", - "file": "dataproc_v1_generated_cluster_controller_diagnose_cluster_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_DiagnoseCluster_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_diagnose_cluster_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", - "shortName": "ClusterControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.get_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.GetCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "GetCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetClusterRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "cluster_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Cluster", - "shortName": "get_cluster" - }, - "description": "Sample for GetCluster", - "file": "dataproc_v1_generated_cluster_controller_get_cluster_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_GetCluster_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_get_cluster_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", - "shortName": "ClusterControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.get_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.GetCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "GetCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetClusterRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "cluster_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Cluster", - "shortName": "get_cluster" - }, - "description": "Sample for GetCluster", - "file": "dataproc_v1_generated_cluster_controller_get_cluster_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_GetCluster_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_get_cluster_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", - "shortName": "ClusterControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.list_clusters", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.ListClusters", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "ListClusters" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ListClustersRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersAsyncPager", - "shortName": "list_clusters" - }, - "description": "Sample for ListClusters", - "file": "dataproc_v1_generated_cluster_controller_list_clusters_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_ListClusters_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_list_clusters_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", - "shortName": "ClusterControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.list_clusters", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.ListClusters", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "ListClusters" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ListClustersRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.services.cluster_controller.pagers.ListClustersPager", - "shortName": "list_clusters" - }, - "description": "Sample for ListClusters", - "file": "dataproc_v1_generated_cluster_controller_list_clusters_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_ListClusters_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_list_clusters_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", - "shortName": "ClusterControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.start_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.StartCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "StartCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.StartClusterRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "start_cluster" - }, - "description": "Sample for StartCluster", - "file": "dataproc_v1_generated_cluster_controller_start_cluster_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_StartCluster_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_start_cluster_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", - "shortName": "ClusterControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.start_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.StartCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "StartCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.StartClusterRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "start_cluster" - }, - "description": "Sample for StartCluster", - "file": "dataproc_v1_generated_cluster_controller_start_cluster_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_StartCluster_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_start_cluster_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", - "shortName": "ClusterControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.stop_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.StopCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "StopCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.StopClusterRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "stop_cluster" - }, - "description": "Sample for StopCluster", - "file": "dataproc_v1_generated_cluster_controller_stop_cluster_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_StopCluster_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_stop_cluster_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", - "shortName": "ClusterControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.stop_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.StopCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "StopCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.StopClusterRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "stop_cluster" - }, - "description": "Sample for StopCluster", - "file": "dataproc_v1_generated_cluster_controller_stop_cluster_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_StopCluster_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_stop_cluster_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient", - "shortName": "ClusterControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerAsyncClient.update_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.UpdateCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "UpdateCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.UpdateClusterRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "cluster_name", - "type": "str" - }, - { - "name": "cluster", - "type": "google.cloud.dataproc_v1.types.Cluster" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_cluster" - }, - "description": "Sample for UpdateCluster", - "file": "dataproc_v1_generated_cluster_controller_update_cluster_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_UpdateCluster_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_update_cluster_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient", - "shortName": "ClusterControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.ClusterControllerClient.update_cluster", - "method": { - "fullName": "google.cloud.dataproc.v1.ClusterController.UpdateCluster", - "service": { - "fullName": "google.cloud.dataproc.v1.ClusterController", - "shortName": "ClusterController" - }, - "shortName": "UpdateCluster" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.UpdateClusterRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "cluster_name", - "type": "str" - }, - { - "name": "cluster", - "type": "google.cloud.dataproc_v1.types.Cluster" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_cluster" - }, - "description": "Sample for UpdateCluster", - "file": "dataproc_v1_generated_cluster_controller_update_cluster_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_ClusterController_UpdateCluster_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_cluster_controller_update_cluster_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", - "shortName": "JobControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.cancel_job", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.CancelJob", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CancelJobRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Job", - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "dataproc_v1_generated_job_controller_cancel_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_CancelJob_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_cancel_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerClient", - "shortName": "JobControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerClient.cancel_job", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.CancelJob", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CancelJobRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Job", - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "dataproc_v1_generated_job_controller_cancel_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_CancelJob_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_cancel_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", - "shortName": "JobControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.delete_job", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.DeleteJob", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DeleteJobRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "dataproc_v1_generated_job_controller_delete_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_DeleteJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_delete_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerClient", - "shortName": "JobControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerClient.delete_job", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.DeleteJob", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DeleteJobRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "dataproc_v1_generated_job_controller_delete_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_DeleteJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_delete_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", - "shortName": "JobControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.get_job", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.GetJob", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetJobRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "dataproc_v1_generated_job_controller_get_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_GetJob_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_get_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerClient", - "shortName": "JobControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerClient.get_job", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.GetJob", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetJobRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "dataproc_v1_generated_job_controller_get_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_GetJob_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_get_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", - "shortName": "JobControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.list_jobs", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.ListJobs", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ListJobsRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsAsyncPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "dataproc_v1_generated_job_controller_list_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_ListJobs_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_list_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerClient", - "shortName": "JobControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerClient.list_jobs", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.ListJobs", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ListJobsRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.services.job_controller.pagers.ListJobsPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "dataproc_v1_generated_job_controller_list_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_ListJobs_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_list_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", - "shortName": "JobControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.submit_job_as_operation", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.SubmitJobAsOperation", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "SubmitJobAsOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.dataproc_v1.types.Job" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "submit_job_as_operation" - }, - "description": "Sample for SubmitJobAsOperation", - "file": "dataproc_v1_generated_job_controller_submit_job_as_operation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_SubmitJobAsOperation_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_submit_job_as_operation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerClient", - "shortName": "JobControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerClient.submit_job_as_operation", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.SubmitJobAsOperation", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "SubmitJobAsOperation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.dataproc_v1.types.Job" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "submit_job_as_operation" - }, - "description": "Sample for SubmitJobAsOperation", - "file": "dataproc_v1_generated_job_controller_submit_job_as_operation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_SubmitJobAsOperation_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_submit_job_as_operation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", - "shortName": "JobControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.submit_job", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.SubmitJob", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "SubmitJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.dataproc_v1.types.Job" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Job", - "shortName": "submit_job" - }, - "description": "Sample for SubmitJob", - "file": "dataproc_v1_generated_job_controller_submit_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_SubmitJob_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_submit_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerClient", - "shortName": "JobControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerClient.submit_job", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.SubmitJob", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "SubmitJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.SubmitJobRequest" - }, - { - "name": "project_id", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.dataproc_v1.types.Job" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Job", - "shortName": "submit_job" - }, - "description": "Sample for SubmitJob", - "file": "dataproc_v1_generated_job_controller_submit_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_SubmitJob_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_submit_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient", - "shortName": "JobControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerAsyncClient.update_job", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.UpdateJob", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "UpdateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.UpdateJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Job", - "shortName": "update_job" - }, - "description": "Sample for UpdateJob", - "file": "dataproc_v1_generated_job_controller_update_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_UpdateJob_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_update_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.JobControllerClient", - "shortName": "JobControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.JobControllerClient.update_job", - "method": { - "fullName": "google.cloud.dataproc.v1.JobController.UpdateJob", - "service": { - "fullName": "google.cloud.dataproc.v1.JobController", - "shortName": "JobController" - }, - "shortName": "UpdateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.UpdateJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.Job", - "shortName": "update_job" - }, - "description": "Sample for UpdateJob", - "file": "dataproc_v1_generated_job_controller_update_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_JobController_UpdateJob_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_job_controller_update_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerAsyncClient", - "shortName": "NodeGroupControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerAsyncClient.create_node_group", - "method": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController.CreateNodeGroup", - "service": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController", - "shortName": "NodeGroupController" - }, - "shortName": "CreateNodeGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CreateNodeGroupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "node_group", - "type": "google.cloud.dataproc_v1.types.NodeGroup" - }, - { - "name": "node_group_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_node_group" - }, - "description": "Sample for CreateNodeGroup", - "file": "dataproc_v1_generated_node_group_controller_create_node_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_NodeGroupController_CreateNodeGroup_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_node_group_controller_create_node_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerClient", - "shortName": "NodeGroupControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerClient.create_node_group", - "method": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController.CreateNodeGroup", - "service": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController", - "shortName": "NodeGroupController" - }, - "shortName": "CreateNodeGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CreateNodeGroupRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "node_group", - "type": "google.cloud.dataproc_v1.types.NodeGroup" - }, - { - "name": "node_group_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_node_group" - }, - "description": "Sample for CreateNodeGroup", - "file": "dataproc_v1_generated_node_group_controller_create_node_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_NodeGroupController_CreateNodeGroup_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_node_group_controller_create_node_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerAsyncClient", - "shortName": "NodeGroupControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerAsyncClient.get_node_group", - "method": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController.GetNodeGroup", - "service": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController", - "shortName": "NodeGroupController" - }, - "shortName": "GetNodeGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetNodeGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.NodeGroup", - "shortName": "get_node_group" - }, - "description": "Sample for GetNodeGroup", - "file": "dataproc_v1_generated_node_group_controller_get_node_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_NodeGroupController_GetNodeGroup_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_node_group_controller_get_node_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerClient", - "shortName": "NodeGroupControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerClient.get_node_group", - "method": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController.GetNodeGroup", - "service": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController", - "shortName": "NodeGroupController" - }, - "shortName": "GetNodeGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetNodeGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.NodeGroup", - "shortName": "get_node_group" - }, - "description": "Sample for GetNodeGroup", - "file": "dataproc_v1_generated_node_group_controller_get_node_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_NodeGroupController_GetNodeGroup_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_node_group_controller_get_node_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerAsyncClient", - "shortName": "NodeGroupControllerAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerAsyncClient.resize_node_group", - "method": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController.ResizeNodeGroup", - "service": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController", - "shortName": "NodeGroupController" - }, - "shortName": "ResizeNodeGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ResizeNodeGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "size", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "resize_node_group" - }, - "description": "Sample for ResizeNodeGroup", - "file": "dataproc_v1_generated_node_group_controller_resize_node_group_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_NodeGroupController_ResizeNodeGroup_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_node_group_controller_resize_node_group_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerClient", - "shortName": "NodeGroupControllerClient" - }, - "fullName": "google.cloud.dataproc_v1.NodeGroupControllerClient.resize_node_group", - "method": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController.ResizeNodeGroup", - "service": { - "fullName": "google.cloud.dataproc.v1.NodeGroupController", - "shortName": "NodeGroupController" - }, - "shortName": "ResizeNodeGroup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ResizeNodeGroupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "size", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "resize_node_group" - }, - "description": "Sample for ResizeNodeGroup", - "file": "dataproc_v1_generated_node_group_controller_resize_node_group_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_NodeGroupController_ResizeNodeGroup_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_node_group_controller_resize_node_group_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", - "shortName": "WorkflowTemplateServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.create_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "CreateWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "template", - "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", - "shortName": "create_workflow_template" - }, - "description": "Sample for CreateWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_create_workflow_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_create_workflow_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", - "shortName": "WorkflowTemplateServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.create_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "CreateWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.CreateWorkflowTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "template", - "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", - "shortName": "create_workflow_template" - }, - "description": "Sample for CreateWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_create_workflow_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_create_workflow_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", - "shortName": "WorkflowTemplateServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.delete_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "DeleteWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_workflow_template" - }, - "description": "Sample for DeleteWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", - "shortName": "WorkflowTemplateServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.delete_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "DeleteWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.DeleteWorkflowTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_workflow_template" - }, - "description": "Sample for DeleteWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_delete_workflow_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", - "shortName": "WorkflowTemplateServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.get_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.GetWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "GetWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", - "shortName": "get_workflow_template" - }, - "description": "Sample for GetWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_get_workflow_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_get_workflow_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", - "shortName": "WorkflowTemplateServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.get_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.GetWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "GetWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.GetWorkflowTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", - "shortName": "get_workflow_template" - }, - "description": "Sample for GetWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_get_workflow_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_get_workflow_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", - "shortName": "WorkflowTemplateServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.instantiate_inline_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "InstantiateInlineWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "template", - "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "instantiate_inline_workflow_template" - }, - "description": "Sample for InstantiateInlineWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", - "shortName": "WorkflowTemplateServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.instantiate_inline_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "InstantiateInlineWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.InstantiateInlineWorkflowTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "template", - "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "instantiate_inline_workflow_template" - }, - "description": "Sample for InstantiateInlineWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_instantiate_inline_workflow_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", - "shortName": "WorkflowTemplateServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.instantiate_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "InstantiateWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "parameters", - "type": "MutableMapping[str, str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "instantiate_workflow_template" - }, - "description": "Sample for InstantiateWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", - "shortName": "WorkflowTemplateServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.instantiate_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "InstantiateWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "parameters", - "type": "MutableMapping[str, str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "instantiate_workflow_template" - }, - "description": "Sample for InstantiateWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_instantiate_workflow_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", - "shortName": "WorkflowTemplateServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.list_workflow_templates", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.ListWorkflowTemplates", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "ListWorkflowTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesAsyncPager", - "shortName": "list_workflow_templates" - }, - "description": "Sample for ListWorkflowTemplates", - "file": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", - "shortName": "WorkflowTemplateServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.list_workflow_templates", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.ListWorkflowTemplates", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "ListWorkflowTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.ListWorkflowTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.services.workflow_template_service.pagers.ListWorkflowTemplatesPager", - "shortName": "list_workflow_templates" - }, - "description": "Sample for ListWorkflowTemplates", - "file": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_list_workflow_templates_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient", - "shortName": "WorkflowTemplateServiceAsyncClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceAsyncClient.update_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.UpdateWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "UpdateWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest" - }, - { - "name": "template", - "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", - "shortName": "update_workflow_template" - }, - "description": "Sample for UpdateWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_update_workflow_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_update_workflow_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient", - "shortName": "WorkflowTemplateServiceClient" - }, - "fullName": "google.cloud.dataproc_v1.WorkflowTemplateServiceClient.update_workflow_template", - "method": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.UpdateWorkflowTemplate", - "service": { - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService", - "shortName": "WorkflowTemplateService" - }, - "shortName": "UpdateWorkflowTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataproc_v1.types.UpdateWorkflowTemplateRequest" - }, - { - "name": "template", - "type": "google.cloud.dataproc_v1.types.WorkflowTemplate" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataproc_v1.types.WorkflowTemplate", - "shortName": "update_workflow_template" - }, - "description": "Sample for UpdateWorkflowTemplate", - "file": "dataproc_v1_generated_workflow_template_service_update_workflow_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataproc_v1_generated_workflow_template_service_update_workflow_template_sync.py" - } - ] -} diff --git a/owl-bot-staging/v1/scripts/fixup_dataproc_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_dataproc_v1_keywords.py deleted file mode 100644 index 593c9de3..00000000 --- a/owl-bot-staging/v1/scripts/fixup_dataproc_v1_keywords.py +++ /dev/null @@ -1,209 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class dataprocCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'cancel_job': ('project_id', 'region', 'job_id', ), - 'create_autoscaling_policy': ('parent', 'policy', ), - 'create_batch': ('parent', 'batch', 'batch_id', 'request_id', ), - 'create_cluster': ('project_id', 'region', 'cluster', 'request_id', 'action_on_failed_primary_workers', ), - 'create_node_group': ('parent', 'node_group', 'node_group_id', 'request_id', ), - 'create_workflow_template': ('parent', 'template', ), - 'delete_autoscaling_policy': ('name', ), - 'delete_batch': ('name', ), - 'delete_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), - 'delete_job': ('project_id', 'region', 'job_id', ), - 'delete_workflow_template': ('name', 'version', ), - 'diagnose_cluster': ('project_id', 'region', 'cluster_name', ), - 'get_autoscaling_policy': ('name', ), - 'get_batch': ('name', ), - 'get_cluster': ('project_id', 'region', 'cluster_name', ), - 'get_job': ('project_id', 'region', 'job_id', ), - 'get_node_group': ('name', ), - 'get_workflow_template': ('name', 'version', ), - 'instantiate_inline_workflow_template': ('parent', 'template', 'request_id', ), - 'instantiate_workflow_template': ('name', 'version', 'request_id', 'parameters', ), - 'list_autoscaling_policies': ('parent', 'page_size', 'page_token', ), - 'list_batches': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_clusters': ('project_id', 'region', 'filter', 'page_size', 'page_token', ), - 'list_jobs': ('project_id', 'region', 'page_size', 'page_token', 'cluster_name', 'job_state_matcher', 'filter', ), - 'list_workflow_templates': ('parent', 'page_size', 'page_token', ), - 'resize_node_group': ('name', 'size', 'request_id', 'graceful_decommission_timeout', ), - 'start_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), - 'stop_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), - 'submit_job': ('project_id', 'region', 'job', 'request_id', ), - 'submit_job_as_operation': ('project_id', 'region', 'job', 'request_id', ), - 'update_autoscaling_policy': ('policy', ), - 'update_cluster': ('project_id', 'region', 'cluster_name', 'cluster', 'update_mask', 'graceful_decommission_timeout', 'request_id', ), - 'update_job': ('project_id', 'region', 'job_id', 'job', 'update_mask', ), - 'update_workflow_template': ('template', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=dataprocCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the dataproc client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py deleted file mode 100644 index bb92c15a..00000000 --- a/owl-bot-staging/v1/setup.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-dataproc' - - -description = "Google Cloud Dataproc API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/dataproc/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", -] -url = "https://github.com/googleapis/python-dataproc" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google", "google.cloud"] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt deleted file mode 100644 index 2beecf99..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 -grpc-google-iam-v1==0.12.4 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa5..00000000 --- a/owl-bot-staging/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/__init__.py deleted file mode 100644 index 1b4db446..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py deleted file mode 100644 index e2c89e3e..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ /dev/null @@ -1,5146 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataproc_v1.services.autoscaling_policy_service import AutoscalingPolicyServiceAsyncClient -from google.cloud.dataproc_v1.services.autoscaling_policy_service import AutoscalingPolicyServiceClient -from google.cloud.dataproc_v1.services.autoscaling_policy_service import pagers -from google.cloud.dataproc_v1.services.autoscaling_policy_service import transports -from google.cloud.dataproc_v1.types import autoscaling_policies -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AutoscalingPolicyServiceClient._get_default_mtls_endpoint(None) is None - assert AutoscalingPolicyServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AutoscalingPolicyServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AutoscalingPolicyServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AutoscalingPolicyServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AutoscalingPolicyServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AutoscalingPolicyServiceClient, "grpc"), - (AutoscalingPolicyServiceAsyncClient, "grpc_asyncio"), - (AutoscalingPolicyServiceClient, "rest"), -]) -def test_autoscaling_policy_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AutoscalingPolicyServiceGrpcTransport, "grpc"), - (transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AutoscalingPolicyServiceRestTransport, "rest"), -]) -def test_autoscaling_policy_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AutoscalingPolicyServiceClient, "grpc"), - (AutoscalingPolicyServiceAsyncClient, "grpc_asyncio"), - (AutoscalingPolicyServiceClient, "rest"), -]) -def test_autoscaling_policy_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -def test_autoscaling_policy_service_client_get_transport_class(): - transport = AutoscalingPolicyServiceClient.get_transport_class() - available_transports = [ - transports.AutoscalingPolicyServiceGrpcTransport, - transports.AutoscalingPolicyServiceRestTransport, - ] - assert transport in available_transports - - transport = AutoscalingPolicyServiceClient.get_transport_class("grpc") - assert transport == transports.AutoscalingPolicyServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceGrpcTransport, "grpc"), - (AutoscalingPolicyServiceAsyncClient, transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceRestTransport, "rest"), -]) -@mock.patch.object(AutoscalingPolicyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoscalingPolicyServiceClient)) -@mock.patch.object(AutoscalingPolicyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoscalingPolicyServiceAsyncClient)) -def test_autoscaling_policy_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AutoscalingPolicyServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AutoscalingPolicyServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceGrpcTransport, "grpc", "true"), - (AutoscalingPolicyServiceAsyncClient, transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceGrpcTransport, "grpc", "false"), - (AutoscalingPolicyServiceAsyncClient, transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceRestTransport, "rest", "true"), - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AutoscalingPolicyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoscalingPolicyServiceClient)) -@mock.patch.object(AutoscalingPolicyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoscalingPolicyServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_autoscaling_policy_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient -]) -@mock.patch.object(AutoscalingPolicyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoscalingPolicyServiceClient)) -@mock.patch.object(AutoscalingPolicyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoscalingPolicyServiceAsyncClient)) -def test_autoscaling_policy_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceGrpcTransport, "grpc"), - (AutoscalingPolicyServiceAsyncClient, transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceRestTransport, "rest"), -]) -def test_autoscaling_policy_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceGrpcTransport, "grpc", grpc_helpers), - (AutoscalingPolicyServiceAsyncClient, transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceRestTransport, "rest", None), -]) -def test_autoscaling_policy_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_autoscaling_policy_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AutoscalingPolicyServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceGrpcTransport, "grpc", grpc_helpers), - (AutoscalingPolicyServiceAsyncClient, transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_autoscaling_policy_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - autoscaling_policies.CreateAutoscalingPolicyRequest, - dict, -]) -def test_create_autoscaling_policy(request_type, transport: str = 'grpc'): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.AutoscalingPolicy( - id='id_value', - name='name_value', - ) - response = client.create_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == 'id_value' - assert response.name == 'name_value' - - -def test_create_autoscaling_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_autoscaling_policy), - '__call__') as call: - client.create_autoscaling_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() - -@pytest.mark.asyncio -async def test_create_autoscaling_policy_async(transport: str = 'grpc_asyncio', request_type=autoscaling_policies.CreateAutoscalingPolicyRequest): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.AutoscalingPolicy( - id='id_value', - name='name_value', - )) - response = await client.create_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == 'id_value' - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_autoscaling_policy_async_from_dict(): - await test_create_autoscaling_policy_async(request_type=dict) - - -def test_create_autoscaling_policy_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = autoscaling_policies.CreateAutoscalingPolicyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_autoscaling_policy), - '__call__') as call: - call.return_value = autoscaling_policies.AutoscalingPolicy() - client.create_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_autoscaling_policy_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = autoscaling_policies.CreateAutoscalingPolicyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_autoscaling_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.AutoscalingPolicy()) - await client.create_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_autoscaling_policy_flattened(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.AutoscalingPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_autoscaling_policy( - parent='parent_value', - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].policy - mock_val = autoscaling_policies.AutoscalingPolicy(id='id_value') - assert arg == mock_val - - -def test_create_autoscaling_policy_flattened_error(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_autoscaling_policy( - autoscaling_policies.CreateAutoscalingPolicyRequest(), - parent='parent_value', - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - -@pytest.mark.asyncio -async def test_create_autoscaling_policy_flattened_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.AutoscalingPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.AutoscalingPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_autoscaling_policy( - parent='parent_value', - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].policy - mock_val = autoscaling_policies.AutoscalingPolicy(id='id_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_autoscaling_policy_flattened_error_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_autoscaling_policy( - autoscaling_policies.CreateAutoscalingPolicyRequest(), - parent='parent_value', - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - autoscaling_policies.UpdateAutoscalingPolicyRequest, - dict, -]) -def test_update_autoscaling_policy(request_type, transport: str = 'grpc'): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.AutoscalingPolicy( - id='id_value', - name='name_value', - ) - response = client.update_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == 'id_value' - assert response.name == 'name_value' - - -def test_update_autoscaling_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_autoscaling_policy), - '__call__') as call: - client.update_autoscaling_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() - -@pytest.mark.asyncio -async def test_update_autoscaling_policy_async(transport: str = 'grpc_asyncio', request_type=autoscaling_policies.UpdateAutoscalingPolicyRequest): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.AutoscalingPolicy( - id='id_value', - name='name_value', - )) - response = await client.update_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == 'id_value' - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_update_autoscaling_policy_async_from_dict(): - await test_update_autoscaling_policy_async(request_type=dict) - - -def test_update_autoscaling_policy_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = autoscaling_policies.UpdateAutoscalingPolicyRequest() - - request.policy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_autoscaling_policy), - '__call__') as call: - call.return_value = autoscaling_policies.AutoscalingPolicy() - client.update_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'policy.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_autoscaling_policy_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = autoscaling_policies.UpdateAutoscalingPolicyRequest() - - request.policy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_autoscaling_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.AutoscalingPolicy()) - await client.update_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'policy.name=name_value', - ) in kw['metadata'] - - -def test_update_autoscaling_policy_flattened(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.AutoscalingPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_autoscaling_policy( - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].policy - mock_val = autoscaling_policies.AutoscalingPolicy(id='id_value') - assert arg == mock_val - - -def test_update_autoscaling_policy_flattened_error(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_autoscaling_policy( - autoscaling_policies.UpdateAutoscalingPolicyRequest(), - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - -@pytest.mark.asyncio -async def test_update_autoscaling_policy_flattened_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.AutoscalingPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.AutoscalingPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_autoscaling_policy( - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].policy - mock_val = autoscaling_policies.AutoscalingPolicy(id='id_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_autoscaling_policy_flattened_error_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_autoscaling_policy( - autoscaling_policies.UpdateAutoscalingPolicyRequest(), - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - autoscaling_policies.GetAutoscalingPolicyRequest, - dict, -]) -def test_get_autoscaling_policy(request_type, transport: str = 'grpc'): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.AutoscalingPolicy( - id='id_value', - name='name_value', - ) - response = client.get_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == 'id_value' - assert response.name == 'name_value' - - -def test_get_autoscaling_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_autoscaling_policy), - '__call__') as call: - client.get_autoscaling_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() - -@pytest.mark.asyncio -async def test_get_autoscaling_policy_async(transport: str = 'grpc_asyncio', request_type=autoscaling_policies.GetAutoscalingPolicyRequest): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.AutoscalingPolicy( - id='id_value', - name='name_value', - )) - response = await client.get_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == 'id_value' - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_autoscaling_policy_async_from_dict(): - await test_get_autoscaling_policy_async(request_type=dict) - - -def test_get_autoscaling_policy_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = autoscaling_policies.GetAutoscalingPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_autoscaling_policy), - '__call__') as call: - call.return_value = autoscaling_policies.AutoscalingPolicy() - client.get_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_autoscaling_policy_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = autoscaling_policies.GetAutoscalingPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_autoscaling_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.AutoscalingPolicy()) - await client.get_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_autoscaling_policy_flattened(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.AutoscalingPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_autoscaling_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_autoscaling_policy_flattened_error(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_autoscaling_policy( - autoscaling_policies.GetAutoscalingPolicyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_autoscaling_policy_flattened_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.AutoscalingPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.AutoscalingPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_autoscaling_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_autoscaling_policy_flattened_error_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_autoscaling_policy( - autoscaling_policies.GetAutoscalingPolicyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - autoscaling_policies.ListAutoscalingPoliciesRequest, - dict, -]) -def test_list_autoscaling_policies(request_type, transport: str = 'grpc'): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_autoscaling_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_autoscaling_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutoscalingPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_autoscaling_policies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_autoscaling_policies), - '__call__') as call: - client.list_autoscaling_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() - -@pytest.mark.asyncio -async def test_list_autoscaling_policies_async(transport: str = 'grpc_asyncio', request_type=autoscaling_policies.ListAutoscalingPoliciesRequest): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_autoscaling_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.ListAutoscalingPoliciesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_autoscaling_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutoscalingPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_autoscaling_policies_async_from_dict(): - await test_list_autoscaling_policies_async(request_type=dict) - - -def test_list_autoscaling_policies_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = autoscaling_policies.ListAutoscalingPoliciesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_autoscaling_policies), - '__call__') as call: - call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() - client.list_autoscaling_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_autoscaling_policies_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = autoscaling_policies.ListAutoscalingPoliciesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_autoscaling_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.ListAutoscalingPoliciesResponse()) - await client.list_autoscaling_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_autoscaling_policies_flattened(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_autoscaling_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_autoscaling_policies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_autoscaling_policies_flattened_error(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_autoscaling_policies( - autoscaling_policies.ListAutoscalingPoliciesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_autoscaling_policies_flattened_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_autoscaling_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(autoscaling_policies.ListAutoscalingPoliciesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_autoscaling_policies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_autoscaling_policies_flattened_error_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_autoscaling_policies( - autoscaling_policies.ListAutoscalingPoliciesRequest(), - parent='parent_value', - ) - - -def test_list_autoscaling_policies_pager(transport_name: str = "grpc"): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_autoscaling_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - ], - next_page_token='abc', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[], - next_page_token='def', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - ], - next_page_token='ghi', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_autoscaling_policies(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, autoscaling_policies.AutoscalingPolicy) - for i in results) -def test_list_autoscaling_policies_pages(transport_name: str = "grpc"): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_autoscaling_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - ], - next_page_token='abc', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[], - next_page_token='def', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - ], - next_page_token='ghi', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - ], - ), - RuntimeError, - ) - pages = list(client.list_autoscaling_policies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_autoscaling_policies_async_pager(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_autoscaling_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - ], - next_page_token='abc', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[], - next_page_token='def', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - ], - next_page_token='ghi', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_autoscaling_policies(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, autoscaling_policies.AutoscalingPolicy) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_autoscaling_policies_async_pages(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_autoscaling_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - ], - next_page_token='abc', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[], - next_page_token='def', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - ], - next_page_token='ghi', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_autoscaling_policies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - autoscaling_policies.DeleteAutoscalingPolicyRequest, - dict, -]) -def test_delete_autoscaling_policy(request_type, transport: str = 'grpc'): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_autoscaling_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_autoscaling_policy), - '__call__') as call: - client.delete_autoscaling_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() - -@pytest.mark.asyncio -async def test_delete_autoscaling_policy_async(transport: str = 'grpc_asyncio', request_type=autoscaling_policies.DeleteAutoscalingPolicyRequest): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_autoscaling_policy_async_from_dict(): - await test_delete_autoscaling_policy_async(request_type=dict) - - -def test_delete_autoscaling_policy_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = autoscaling_policies.DeleteAutoscalingPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_autoscaling_policy), - '__call__') as call: - call.return_value = None - client.delete_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_autoscaling_policy_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = autoscaling_policies.DeleteAutoscalingPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_autoscaling_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_autoscaling_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_autoscaling_policy_flattened(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_autoscaling_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_autoscaling_policy_flattened_error(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_autoscaling_policy( - autoscaling_policies.DeleteAutoscalingPolicyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_autoscaling_policy_flattened_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_autoscaling_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_autoscaling_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_autoscaling_policy_flattened_error_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_autoscaling_policy( - autoscaling_policies.DeleteAutoscalingPolicyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - autoscaling_policies.CreateAutoscalingPolicyRequest, - dict, -]) -def test_create_autoscaling_policy_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["policy"] = {'id': 'id_value', 'name': 'name_value', 'basic_algorithm': {'yarn_config': {'graceful_decommission_timeout': {'seconds': 751, 'nanos': 543}, 'scale_up_factor': 0.1578, 'scale_down_factor': 0.1789, 'scale_up_min_worker_fraction': 0.2973, 'scale_down_min_worker_fraction': 0.3184}, 'cooldown_period': {}}, 'worker_config': {'min_instances': 1387, 'max_instances': 1389, 'weight': 648}, 'secondary_worker_config': {}, 'labels': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.AutoscalingPolicy( - id='id_value', - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_autoscaling_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == 'id_value' - assert response.name == 'name_value' - - -def test_create_autoscaling_policy_rest_required_fields(request_type=autoscaling_policies.CreateAutoscalingPolicyRequest): - transport_class = transports.AutoscalingPolicyServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_autoscaling_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_autoscaling_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.AutoscalingPolicy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_autoscaling_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_autoscaling_policy_rest_unset_required_fields(): - transport = transports.AutoscalingPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_autoscaling_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "policy", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_autoscaling_policy_rest_interceptors(null_interceptor): - transport = transports.AutoscalingPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoscalingPolicyServiceRestInterceptor(), - ) - client = AutoscalingPolicyServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoscalingPolicyServiceRestInterceptor, "post_create_autoscaling_policy") as post, \ - mock.patch.object(transports.AutoscalingPolicyServiceRestInterceptor, "pre_create_autoscaling_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = autoscaling_policies.CreateAutoscalingPolicyRequest.pb(autoscaling_policies.CreateAutoscalingPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = autoscaling_policies.AutoscalingPolicy.to_json(autoscaling_policies.AutoscalingPolicy()) - - request = autoscaling_policies.CreateAutoscalingPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = autoscaling_policies.AutoscalingPolicy() - - client.create_autoscaling_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_autoscaling_policy_rest_bad_request(transport: str = 'rest', request_type=autoscaling_policies.CreateAutoscalingPolicyRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["policy"] = {'id': 'id_value', 'name': 'name_value', 'basic_algorithm': {'yarn_config': {'graceful_decommission_timeout': {'seconds': 751, 'nanos': 543}, 'scale_up_factor': 0.1578, 'scale_down_factor': 0.1789, 'scale_up_min_worker_fraction': 0.2973, 'scale_down_min_worker_fraction': 0.3184}, 'cooldown_period': {}}, 'worker_config': {'min_instances': 1387, 'max_instances': 1389, 'weight': 648}, 'secondary_worker_config': {}, 'labels': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_autoscaling_policy(request) - - -def test_create_autoscaling_policy_rest_flattened(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.AutoscalingPolicy() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_autoscaling_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autoscalingPolicies" % client.transport._host, args[1]) - - -def test_create_autoscaling_policy_rest_flattened_error(transport: str = 'rest'): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_autoscaling_policy( - autoscaling_policies.CreateAutoscalingPolicyRequest(), - parent='parent_value', - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - - -def test_create_autoscaling_policy_rest_error(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - autoscaling_policies.UpdateAutoscalingPolicyRequest, - dict, -]) -def test_update_autoscaling_policy_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'policy': {'name': 'projects/sample1/locations/sample2/autoscalingPolicies/sample3'}} - request_init["policy"] = {'id': 'id_value', 'name': 'projects/sample1/locations/sample2/autoscalingPolicies/sample3', 'basic_algorithm': {'yarn_config': {'graceful_decommission_timeout': {'seconds': 751, 'nanos': 543}, 'scale_up_factor': 0.1578, 'scale_down_factor': 0.1789, 'scale_up_min_worker_fraction': 0.2973, 'scale_down_min_worker_fraction': 0.3184}, 'cooldown_period': {}}, 'worker_config': {'min_instances': 1387, 'max_instances': 1389, 'weight': 648}, 'secondary_worker_config': {}, 'labels': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.AutoscalingPolicy( - id='id_value', - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_autoscaling_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == 'id_value' - assert response.name == 'name_value' - - -def test_update_autoscaling_policy_rest_required_fields(request_type=autoscaling_policies.UpdateAutoscalingPolicyRequest): - transport_class = transports.AutoscalingPolicyServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_autoscaling_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_autoscaling_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.AutoscalingPolicy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "put", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_autoscaling_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_autoscaling_policy_rest_unset_required_fields(): - transport = transports.AutoscalingPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_autoscaling_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("policy", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_autoscaling_policy_rest_interceptors(null_interceptor): - transport = transports.AutoscalingPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoscalingPolicyServiceRestInterceptor(), - ) - client = AutoscalingPolicyServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoscalingPolicyServiceRestInterceptor, "post_update_autoscaling_policy") as post, \ - mock.patch.object(transports.AutoscalingPolicyServiceRestInterceptor, "pre_update_autoscaling_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = autoscaling_policies.UpdateAutoscalingPolicyRequest.pb(autoscaling_policies.UpdateAutoscalingPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = autoscaling_policies.AutoscalingPolicy.to_json(autoscaling_policies.AutoscalingPolicy()) - - request = autoscaling_policies.UpdateAutoscalingPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = autoscaling_policies.AutoscalingPolicy() - - client.update_autoscaling_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_autoscaling_policy_rest_bad_request(transport: str = 'rest', request_type=autoscaling_policies.UpdateAutoscalingPolicyRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'policy': {'name': 'projects/sample1/locations/sample2/autoscalingPolicies/sample3'}} - request_init["policy"] = {'id': 'id_value', 'name': 'projects/sample1/locations/sample2/autoscalingPolicies/sample3', 'basic_algorithm': {'yarn_config': {'graceful_decommission_timeout': {'seconds': 751, 'nanos': 543}, 'scale_up_factor': 0.1578, 'scale_down_factor': 0.1789, 'scale_up_min_worker_fraction': 0.2973, 'scale_down_min_worker_fraction': 0.3184}, 'cooldown_period': {}}, 'worker_config': {'min_instances': 1387, 'max_instances': 1389, 'weight': 648}, 'secondary_worker_config': {}, 'labels': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_autoscaling_policy(request) - - -def test_update_autoscaling_policy_rest_flattened(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.AutoscalingPolicy() - - # get arguments that satisfy an http rule for this method - sample_request = {'policy': {'name': 'projects/sample1/locations/sample2/autoscalingPolicies/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_autoscaling_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}" % client.transport._host, args[1]) - - -def test_update_autoscaling_policy_rest_flattened_error(transport: str = 'rest'): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_autoscaling_policy( - autoscaling_policies.UpdateAutoscalingPolicyRequest(), - policy=autoscaling_policies.AutoscalingPolicy(id='id_value'), - ) - - -def test_update_autoscaling_policy_rest_error(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - autoscaling_policies.GetAutoscalingPolicyRequest, - dict, -]) -def test_get_autoscaling_policy_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autoscalingPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.AutoscalingPolicy( - id='id_value', - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_autoscaling_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == 'id_value' - assert response.name == 'name_value' - - -def test_get_autoscaling_policy_rest_required_fields(request_type=autoscaling_policies.GetAutoscalingPolicyRequest): - transport_class = transports.AutoscalingPolicyServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_autoscaling_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_autoscaling_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.AutoscalingPolicy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_autoscaling_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_autoscaling_policy_rest_unset_required_fields(): - transport = transports.AutoscalingPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_autoscaling_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_autoscaling_policy_rest_interceptors(null_interceptor): - transport = transports.AutoscalingPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoscalingPolicyServiceRestInterceptor(), - ) - client = AutoscalingPolicyServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoscalingPolicyServiceRestInterceptor, "post_get_autoscaling_policy") as post, \ - mock.patch.object(transports.AutoscalingPolicyServiceRestInterceptor, "pre_get_autoscaling_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = autoscaling_policies.GetAutoscalingPolicyRequest.pb(autoscaling_policies.GetAutoscalingPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = autoscaling_policies.AutoscalingPolicy.to_json(autoscaling_policies.AutoscalingPolicy()) - - request = autoscaling_policies.GetAutoscalingPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = autoscaling_policies.AutoscalingPolicy() - - client.get_autoscaling_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_autoscaling_policy_rest_bad_request(transport: str = 'rest', request_type=autoscaling_policies.GetAutoscalingPolicyRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autoscalingPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_autoscaling_policy(request) - - -def test_get_autoscaling_policy_rest_flattened(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.AutoscalingPolicy() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/autoscalingPolicies/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = autoscaling_policies.AutoscalingPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_autoscaling_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" % client.transport._host, args[1]) - - -def test_get_autoscaling_policy_rest_flattened_error(transport: str = 'rest'): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_autoscaling_policy( - autoscaling_policies.GetAutoscalingPolicyRequest(), - name='name_value', - ) - - -def test_get_autoscaling_policy_rest_error(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - autoscaling_policies.ListAutoscalingPoliciesRequest, - dict, -]) -def test_list_autoscaling_policies_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.ListAutoscalingPoliciesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = autoscaling_policies.ListAutoscalingPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_autoscaling_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutoscalingPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_autoscaling_policies_rest_required_fields(request_type=autoscaling_policies.ListAutoscalingPoliciesRequest): - transport_class = transports.AutoscalingPolicyServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autoscaling_policies._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_autoscaling_policies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = autoscaling_policies.ListAutoscalingPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_autoscaling_policies(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_autoscaling_policies_rest_unset_required_fields(): - transport = transports.AutoscalingPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_autoscaling_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_autoscaling_policies_rest_interceptors(null_interceptor): - transport = transports.AutoscalingPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoscalingPolicyServiceRestInterceptor(), - ) - client = AutoscalingPolicyServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoscalingPolicyServiceRestInterceptor, "post_list_autoscaling_policies") as post, \ - mock.patch.object(transports.AutoscalingPolicyServiceRestInterceptor, "pre_list_autoscaling_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = autoscaling_policies.ListAutoscalingPoliciesRequest.pb(autoscaling_policies.ListAutoscalingPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = autoscaling_policies.ListAutoscalingPoliciesResponse.to_json(autoscaling_policies.ListAutoscalingPoliciesResponse()) - - request = autoscaling_policies.ListAutoscalingPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() - - client.list_autoscaling_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_autoscaling_policies_rest_bad_request(transport: str = 'rest', request_type=autoscaling_policies.ListAutoscalingPoliciesRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_autoscaling_policies(request) - - -def test_list_autoscaling_policies_rest_flattened(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = autoscaling_policies.ListAutoscalingPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_autoscaling_policies(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/autoscalingPolicies" % client.transport._host, args[1]) - - -def test_list_autoscaling_policies_rest_flattened_error(transport: str = 'rest'): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_autoscaling_policies( - autoscaling_policies.ListAutoscalingPoliciesRequest(), - parent='parent_value', - ) - - -def test_list_autoscaling_policies_rest_pager(transport: str = 'rest'): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - ], - next_page_token='abc', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[], - next_page_token='def', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - ], - next_page_token='ghi', - ), - autoscaling_policies.ListAutoscalingPoliciesResponse( - policies=[ - autoscaling_policies.AutoscalingPolicy(), - autoscaling_policies.AutoscalingPolicy(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(autoscaling_policies.ListAutoscalingPoliciesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_autoscaling_policies(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, autoscaling_policies.AutoscalingPolicy) - for i in results) - - pages = list(client.list_autoscaling_policies(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - autoscaling_policies.DeleteAutoscalingPolicyRequest, - dict, -]) -def test_delete_autoscaling_policy_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autoscalingPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_autoscaling_policy(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_autoscaling_policy_rest_required_fields(request_type=autoscaling_policies.DeleteAutoscalingPolicyRequest): - transport_class = transports.AutoscalingPolicyServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_autoscaling_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_autoscaling_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_autoscaling_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_autoscaling_policy_rest_unset_required_fields(): - transport = transports.AutoscalingPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_autoscaling_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_autoscaling_policy_rest_interceptors(null_interceptor): - transport = transports.AutoscalingPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoscalingPolicyServiceRestInterceptor(), - ) - client = AutoscalingPolicyServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoscalingPolicyServiceRestInterceptor, "pre_delete_autoscaling_policy") as pre: - pre.assert_not_called() - pb_message = autoscaling_policies.DeleteAutoscalingPolicyRequest.pb(autoscaling_policies.DeleteAutoscalingPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = autoscaling_policies.DeleteAutoscalingPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_autoscaling_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_autoscaling_policy_rest_bad_request(transport: str = 'rest', request_type=autoscaling_policies.DeleteAutoscalingPolicyRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/autoscalingPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_autoscaling_policy(request) - - -def test_delete_autoscaling_policy_rest_flattened(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/autoscalingPolicies/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_autoscaling_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" % client.transport._host, args[1]) - - -def test_delete_autoscaling_policy_rest_flattened_error(transport: str = 'rest'): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_autoscaling_policy( - autoscaling_policies.DeleteAutoscalingPolicyRequest(), - name='name_value', - ) - - -def test_delete_autoscaling_policy_rest_error(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AutoscalingPolicyServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AutoscalingPolicyServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AutoscalingPolicyServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AutoscalingPolicyServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AutoscalingPolicyServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AutoscalingPolicyServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AutoscalingPolicyServiceGrpcTransport, - transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, - transports.AutoscalingPolicyServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = AutoscalingPolicyServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AutoscalingPolicyServiceGrpcTransport, - ) - -def test_autoscaling_policy_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AutoscalingPolicyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_autoscaling_policy_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AutoscalingPolicyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_autoscaling_policy', - 'update_autoscaling_policy', - 'get_autoscaling_policy', - 'list_autoscaling_policies', - 'delete_autoscaling_policy', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_autoscaling_policy_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AutoscalingPolicyServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_autoscaling_policy_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AutoscalingPolicyServiceTransport() - adc.assert_called_once() - - -def test_autoscaling_policy_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AutoscalingPolicyServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AutoscalingPolicyServiceGrpcTransport, - transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, - ], -) -def test_autoscaling_policy_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AutoscalingPolicyServiceGrpcTransport, - transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, - transports.AutoscalingPolicyServiceRestTransport, - ], -) -def test_autoscaling_policy_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AutoscalingPolicyServiceGrpcTransport, grpc_helpers), - (transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_autoscaling_policy_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AutoscalingPolicyServiceGrpcTransport, transports.AutoscalingPolicyServiceGrpcAsyncIOTransport]) -def test_autoscaling_policy_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_autoscaling_policy_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AutoscalingPolicyServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_autoscaling_policy_service_host_no_port(transport_name): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_autoscaling_policy_service_host_with_port(transport_name): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_autoscaling_policy_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AutoscalingPolicyServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AutoscalingPolicyServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_autoscaling_policy._session - session2 = client2.transport.create_autoscaling_policy._session - assert session1 != session2 - session1 = client1.transport.update_autoscaling_policy._session - session2 = client2.transport.update_autoscaling_policy._session - assert session1 != session2 - session1 = client1.transport.get_autoscaling_policy._session - session2 = client2.transport.get_autoscaling_policy._session - assert session1 != session2 - session1 = client1.transport.list_autoscaling_policies._session - session2 = client2.transport.list_autoscaling_policies._session - assert session1 != session2 - session1 = client1.transport.delete_autoscaling_policy._session - session2 = client2.transport.delete_autoscaling_policy._session - assert session1 != session2 -def test_autoscaling_policy_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AutoscalingPolicyServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_autoscaling_policy_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AutoscalingPolicyServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AutoscalingPolicyServiceGrpcTransport, transports.AutoscalingPolicyServiceGrpcAsyncIOTransport]) -def test_autoscaling_policy_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AutoscalingPolicyServiceGrpcTransport, transports.AutoscalingPolicyServiceGrpcAsyncIOTransport]) -def test_autoscaling_policy_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_autoscaling_policy_path(): - project = "squid" - location = "clam" - autoscaling_policy = "whelk" - expected = "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}".format(project=project, location=location, autoscaling_policy=autoscaling_policy, ) - actual = AutoscalingPolicyServiceClient.autoscaling_policy_path(project, location, autoscaling_policy) - assert expected == actual - - -def test_parse_autoscaling_policy_path(): - expected = { - "project": "octopus", - "location": "oyster", - "autoscaling_policy": "nudibranch", - } - path = AutoscalingPolicyServiceClient.autoscaling_policy_path(**expected) - - # Check that the path construction is reversible. - actual = AutoscalingPolicyServiceClient.parse_autoscaling_policy_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AutoscalingPolicyServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = AutoscalingPolicyServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AutoscalingPolicyServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = AutoscalingPolicyServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = AutoscalingPolicyServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AutoscalingPolicyServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AutoscalingPolicyServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = AutoscalingPolicyServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AutoscalingPolicyServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = AutoscalingPolicyServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = AutoscalingPolicyServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AutoscalingPolicyServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AutoscalingPolicyServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = AutoscalingPolicyServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AutoscalingPolicyServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AutoscalingPolicyServiceTransport, '_prep_wrapped_messages') as prep: - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AutoscalingPolicyServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AutoscalingPolicyServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = AutoscalingPolicyServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AutoscalingPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AutoscalingPolicyServiceClient, transports.AutoscalingPolicyServiceGrpcTransport), - (AutoscalingPolicyServiceAsyncClient, transports.AutoscalingPolicyServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_batch_controller.py deleted file mode 100644 index 8d28f1d9..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_batch_controller.py +++ /dev/null @@ -1,4781 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataproc_v1.services.batch_controller import BatchControllerAsyncClient -from google.cloud.dataproc_v1.services.batch_controller import BatchControllerClient -from google.cloud.dataproc_v1.services.batch_controller import pagers -from google.cloud.dataproc_v1.services.batch_controller import transports -from google.cloud.dataproc_v1.types import batches -from google.cloud.dataproc_v1.types import operations -from google.cloud.dataproc_v1.types import shared -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert BatchControllerClient._get_default_mtls_endpoint(None) is None - assert BatchControllerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert BatchControllerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert BatchControllerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert BatchControllerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert BatchControllerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (BatchControllerClient, "grpc"), - (BatchControllerAsyncClient, "grpc_asyncio"), - (BatchControllerClient, "rest"), -]) -def test_batch_controller_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.BatchControllerGrpcTransport, "grpc"), - (transports.BatchControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.BatchControllerRestTransport, "rest"), -]) -def test_batch_controller_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (BatchControllerClient, "grpc"), - (BatchControllerAsyncClient, "grpc_asyncio"), - (BatchControllerClient, "rest"), -]) -def test_batch_controller_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -def test_batch_controller_client_get_transport_class(): - transport = BatchControllerClient.get_transport_class() - available_transports = [ - transports.BatchControllerGrpcTransport, - transports.BatchControllerRestTransport, - ] - assert transport in available_transports - - transport = BatchControllerClient.get_transport_class("grpc") - assert transport == transports.BatchControllerGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BatchControllerClient, transports.BatchControllerGrpcTransport, "grpc"), - (BatchControllerAsyncClient, transports.BatchControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (BatchControllerClient, transports.BatchControllerRestTransport, "rest"), -]) -@mock.patch.object(BatchControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BatchControllerClient)) -@mock.patch.object(BatchControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BatchControllerAsyncClient)) -def test_batch_controller_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(BatchControllerClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(BatchControllerClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (BatchControllerClient, transports.BatchControllerGrpcTransport, "grpc", "true"), - (BatchControllerAsyncClient, transports.BatchControllerGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (BatchControllerClient, transports.BatchControllerGrpcTransport, "grpc", "false"), - (BatchControllerAsyncClient, transports.BatchControllerGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (BatchControllerClient, transports.BatchControllerRestTransport, "rest", "true"), - (BatchControllerClient, transports.BatchControllerRestTransport, "rest", "false"), -]) -@mock.patch.object(BatchControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BatchControllerClient)) -@mock.patch.object(BatchControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BatchControllerAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_batch_controller_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - BatchControllerClient, BatchControllerAsyncClient -]) -@mock.patch.object(BatchControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BatchControllerClient)) -@mock.patch.object(BatchControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BatchControllerAsyncClient)) -def test_batch_controller_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BatchControllerClient, transports.BatchControllerGrpcTransport, "grpc"), - (BatchControllerAsyncClient, transports.BatchControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (BatchControllerClient, transports.BatchControllerRestTransport, "rest"), -]) -def test_batch_controller_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BatchControllerClient, transports.BatchControllerGrpcTransport, "grpc", grpc_helpers), - (BatchControllerAsyncClient, transports.BatchControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (BatchControllerClient, transports.BatchControllerRestTransport, "rest", None), -]) -def test_batch_controller_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_batch_controller_client_client_options_from_dict(): - with mock.patch('google.cloud.dataproc_v1.services.batch_controller.transports.BatchControllerGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = BatchControllerClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BatchControllerClient, transports.BatchControllerGrpcTransport, "grpc", grpc_helpers), - (BatchControllerAsyncClient, transports.BatchControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_batch_controller_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - batches.CreateBatchRequest, - dict, -]) -def test_create_batch(request_type, transport: str = 'grpc'): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == batches.CreateBatchRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_batch_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_batch), - '__call__') as call: - client.create_batch() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batches.CreateBatchRequest() - -@pytest.mark.asyncio -async def test_create_batch_async(transport: str = 'grpc_asyncio', request_type=batches.CreateBatchRequest): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == batches.CreateBatchRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_batch_async_from_dict(): - await test_create_batch_async(request_type=dict) - - -def test_create_batch_field_headers(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batches.CreateBatchRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_batch), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_batch_field_headers_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batches.CreateBatchRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_batch), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_batch_flattened(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_batch( - parent='parent_value', - batch=batches.Batch(name='name_value'), - batch_id='batch_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].batch - mock_val = batches.Batch(name='name_value') - assert arg == mock_val - arg = args[0].batch_id - mock_val = 'batch_id_value' - assert arg == mock_val - - -def test_create_batch_flattened_error(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_batch( - batches.CreateBatchRequest(), - parent='parent_value', - batch=batches.Batch(name='name_value'), - batch_id='batch_id_value', - ) - -@pytest.mark.asyncio -async def test_create_batch_flattened_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_batch( - parent='parent_value', - batch=batches.Batch(name='name_value'), - batch_id='batch_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].batch - mock_val = batches.Batch(name='name_value') - assert arg == mock_val - arg = args[0].batch_id - mock_val = 'batch_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_batch_flattened_error_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_batch( - batches.CreateBatchRequest(), - parent='parent_value', - batch=batches.Batch(name='name_value'), - batch_id='batch_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batches.GetBatchRequest, - dict, -]) -def test_get_batch(request_type, transport: str = 'grpc'): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batches.Batch( - name='name_value', - uuid='uuid_value', - state=batches.Batch.State.PENDING, - state_message='state_message_value', - creator='creator_value', - operation='operation_value', - ) - response = client.get_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == batches.GetBatchRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, batches.Batch) - assert response.name == 'name_value' - assert response.uuid == 'uuid_value' - assert response.state == batches.Batch.State.PENDING - assert response.state_message == 'state_message_value' - assert response.creator == 'creator_value' - assert response.operation == 'operation_value' - - -def test_get_batch_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_batch), - '__call__') as call: - client.get_batch() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batches.GetBatchRequest() - -@pytest.mark.asyncio -async def test_get_batch_async(transport: str = 'grpc_asyncio', request_type=batches.GetBatchRequest): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(batches.Batch( - name='name_value', - uuid='uuid_value', - state=batches.Batch.State.PENDING, - state_message='state_message_value', - creator='creator_value', - operation='operation_value', - )) - response = await client.get_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == batches.GetBatchRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, batches.Batch) - assert response.name == 'name_value' - assert response.uuid == 'uuid_value' - assert response.state == batches.Batch.State.PENDING - assert response.state_message == 'state_message_value' - assert response.creator == 'creator_value' - assert response.operation == 'operation_value' - - -@pytest.mark.asyncio -async def test_get_batch_async_from_dict(): - await test_get_batch_async(request_type=dict) - - -def test_get_batch_field_headers(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batches.GetBatchRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_batch), - '__call__') as call: - call.return_value = batches.Batch() - client.get_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_batch_field_headers_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batches.GetBatchRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_batch), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batches.Batch()) - await client.get_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_batch_flattened(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batches.Batch() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_batch( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_batch_flattened_error(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_batch( - batches.GetBatchRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_batch_flattened_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batches.Batch() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batches.Batch()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_batch( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_batch_flattened_error_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_batch( - batches.GetBatchRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batches.ListBatchesRequest, - dict, -]) -def test_list_batches(request_type, transport: str = 'grpc'): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_batches), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batches.ListBatchesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_batches(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == batches.ListBatchesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBatchesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_batches_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_batches), - '__call__') as call: - client.list_batches() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batches.ListBatchesRequest() - -@pytest.mark.asyncio -async def test_list_batches_async(transport: str = 'grpc_asyncio', request_type=batches.ListBatchesRequest): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_batches), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(batches.ListBatchesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_batches(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == batches.ListBatchesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBatchesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_batches_async_from_dict(): - await test_list_batches_async(request_type=dict) - - -def test_list_batches_field_headers(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batches.ListBatchesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_batches), - '__call__') as call: - call.return_value = batches.ListBatchesResponse() - client.list_batches(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_batches_field_headers_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batches.ListBatchesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_batches), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batches.ListBatchesResponse()) - await client.list_batches(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_batches_flattened(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_batches), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batches.ListBatchesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_batches( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_batches_flattened_error(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_batches( - batches.ListBatchesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_batches_flattened_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_batches), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batches.ListBatchesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batches.ListBatchesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_batches( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_batches_flattened_error_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_batches( - batches.ListBatchesRequest(), - parent='parent_value', - ) - - -def test_list_batches_pager(transport_name: str = "grpc"): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_batches), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - batches.Batch(), - batches.Batch(), - ], - next_page_token='abc', - ), - batches.ListBatchesResponse( - batches=[], - next_page_token='def', - ), - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - ], - next_page_token='ghi', - ), - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - batches.Batch(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_batches(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, batches.Batch) - for i in results) -def test_list_batches_pages(transport_name: str = "grpc"): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_batches), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - batches.Batch(), - batches.Batch(), - ], - next_page_token='abc', - ), - batches.ListBatchesResponse( - batches=[], - next_page_token='def', - ), - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - ], - next_page_token='ghi', - ), - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - batches.Batch(), - ], - ), - RuntimeError, - ) - pages = list(client.list_batches(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_batches_async_pager(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_batches), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - batches.Batch(), - batches.Batch(), - ], - next_page_token='abc', - ), - batches.ListBatchesResponse( - batches=[], - next_page_token='def', - ), - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - ], - next_page_token='ghi', - ), - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - batches.Batch(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_batches(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, batches.Batch) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_batches_async_pages(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_batches), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - batches.Batch(), - batches.Batch(), - ], - next_page_token='abc', - ), - batches.ListBatchesResponse( - batches=[], - next_page_token='def', - ), - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - ], - next_page_token='ghi', - ), - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - batches.Batch(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_batches(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - batches.DeleteBatchRequest, - dict, -]) -def test_delete_batch(request_type, transport: str = 'grpc'): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == batches.DeleteBatchRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_batch_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_batch), - '__call__') as call: - client.delete_batch() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batches.DeleteBatchRequest() - -@pytest.mark.asyncio -async def test_delete_batch_async(transport: str = 'grpc_asyncio', request_type=batches.DeleteBatchRequest): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == batches.DeleteBatchRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_batch_async_from_dict(): - await test_delete_batch_async(request_type=dict) - - -def test_delete_batch_field_headers(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batches.DeleteBatchRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_batch), - '__call__') as call: - call.return_value = None - client.delete_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_batch_field_headers_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batches.DeleteBatchRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_batch), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_batch(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_batch_flattened(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_batch( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_batch_flattened_error(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_batch( - batches.DeleteBatchRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_batch_flattened_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_batch), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_batch( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_batch_flattened_error_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_batch( - batches.DeleteBatchRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batches.CreateBatchRequest, - dict, -]) -def test_create_batch_rest(request_type): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["batch"] = {'name': 'name_value', 'uuid': 'uuid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'pyspark_batch': {'main_python_file_uri': 'main_python_file_uri_value', 'args': ['args_value1', 'args_value2'], 'python_file_uris': ['python_file_uris_value1', 'python_file_uris_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2']}, 'spark_batch': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2']}, 'spark_r_batch': {'main_r_file_uri': 'main_r_file_uri_value', 'args': ['args_value1', 'args_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2']}, 'spark_sql_batch': {'query_file_uri': 'query_file_uri_value', 'query_variables': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2']}, 'runtime_info': {'endpoints': {}, 'output_uri': 'output_uri_value', 'diagnostic_output_uri': 'diagnostic_output_uri_value', 'approximate_usage': {'milli_dcu_seconds': 1792, 'shuffle_storage_gb_seconds': 2743}, 'current_usage': {'milli_dcu': 946, 'shuffle_storage_gb': 1897, 'snapshot_time': {}}}, 'state': 1, 'state_message': 'state_message_value', 'state_time': {}, 'creator': 'creator_value', 'labels': {}, 'runtime_config': {'version': 'version_value', 'container_image': 'container_image_value', 'properties': {}}, 'environment_config': {'execution_config': {'service_account': 'service_account_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'network_tags': ['network_tags_value1', 'network_tags_value2'], 'kms_key': 'kms_key_value', 'ttl': {'seconds': 751, 'nanos': 543}, 'staging_bucket': 'staging_bucket_value'}, 'peripherals_config': {'metastore_service': 'metastore_service_value', 'spark_history_server_config': {'dataproc_cluster': 'dataproc_cluster_value'}}}, 'operation': 'operation_value', 'state_history': [{'state': 1, 'state_message': 'state_message_value', 'state_start_time': {}}]} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_batch(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_batch_rest_required_fields(request_type=batches.CreateBatchRequest): - transport_class = transports.BatchControllerRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_batch._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_batch._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("batch_id", "request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_batch(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_batch_rest_unset_required_fields(): - transport = transports.BatchControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_batch._get_unset_required_fields({}) - assert set(unset_fields) == (set(("batchId", "requestId", )) & set(("parent", "batch", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_batch_rest_interceptors(null_interceptor): - transport = transports.BatchControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchControllerRestInterceptor(), - ) - client = BatchControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BatchControllerRestInterceptor, "post_create_batch") as post, \ - mock.patch.object(transports.BatchControllerRestInterceptor, "pre_create_batch") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = batches.CreateBatchRequest.pb(batches.CreateBatchRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = batches.CreateBatchRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_batch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_batch_rest_bad_request(transport: str = 'rest', request_type=batches.CreateBatchRequest): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["batch"] = {'name': 'name_value', 'uuid': 'uuid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'pyspark_batch': {'main_python_file_uri': 'main_python_file_uri_value', 'args': ['args_value1', 'args_value2'], 'python_file_uris': ['python_file_uris_value1', 'python_file_uris_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2']}, 'spark_batch': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2']}, 'spark_r_batch': {'main_r_file_uri': 'main_r_file_uri_value', 'args': ['args_value1', 'args_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2']}, 'spark_sql_batch': {'query_file_uri': 'query_file_uri_value', 'query_variables': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2']}, 'runtime_info': {'endpoints': {}, 'output_uri': 'output_uri_value', 'diagnostic_output_uri': 'diagnostic_output_uri_value', 'approximate_usage': {'milli_dcu_seconds': 1792, 'shuffle_storage_gb_seconds': 2743}, 'current_usage': {'milli_dcu': 946, 'shuffle_storage_gb': 1897, 'snapshot_time': {}}}, 'state': 1, 'state_message': 'state_message_value', 'state_time': {}, 'creator': 'creator_value', 'labels': {}, 'runtime_config': {'version': 'version_value', 'container_image': 'container_image_value', 'properties': {}}, 'environment_config': {'execution_config': {'service_account': 'service_account_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'network_tags': ['network_tags_value1', 'network_tags_value2'], 'kms_key': 'kms_key_value', 'ttl': {'seconds': 751, 'nanos': 543}, 'staging_bucket': 'staging_bucket_value'}, 'peripherals_config': {'metastore_service': 'metastore_service_value', 'spark_history_server_config': {'dataproc_cluster': 'dataproc_cluster_value'}}}, 'operation': 'operation_value', 'state_history': [{'state': 1, 'state_message': 'state_message_value', 'state_start_time': {}}]} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_batch(request) - - -def test_create_batch_rest_flattened(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - batch=batches.Batch(name='name_value'), - batch_id='batch_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_batch(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/batches" % client.transport._host, args[1]) - - -def test_create_batch_rest_flattened_error(transport: str = 'rest'): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_batch( - batches.CreateBatchRequest(), - parent='parent_value', - batch=batches.Batch(name='name_value'), - batch_id='batch_id_value', - ) - - -def test_create_batch_rest_error(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - batches.GetBatchRequest, - dict, -]) -def test_get_batch_rest(request_type): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/batches/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batches.Batch( - name='name_value', - uuid='uuid_value', - state=batches.Batch.State.PENDING, - state_message='state_message_value', - creator='creator_value', - operation='operation_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = batches.Batch.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_batch(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, batches.Batch) - assert response.name == 'name_value' - assert response.uuid == 'uuid_value' - assert response.state == batches.Batch.State.PENDING - assert response.state_message == 'state_message_value' - assert response.creator == 'creator_value' - assert response.operation == 'operation_value' - - -def test_get_batch_rest_required_fields(request_type=batches.GetBatchRequest): - transport_class = transports.BatchControllerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_batch._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_batch._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = batches.Batch() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = batches.Batch.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_batch(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_batch_rest_unset_required_fields(): - transport = transports.BatchControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_batch._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_batch_rest_interceptors(null_interceptor): - transport = transports.BatchControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchControllerRestInterceptor(), - ) - client = BatchControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchControllerRestInterceptor, "post_get_batch") as post, \ - mock.patch.object(transports.BatchControllerRestInterceptor, "pre_get_batch") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = batches.GetBatchRequest.pb(batches.GetBatchRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = batches.Batch.to_json(batches.Batch()) - - request = batches.GetBatchRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = batches.Batch() - - client.get_batch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_batch_rest_bad_request(transport: str = 'rest', request_type=batches.GetBatchRequest): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/batches/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_batch(request) - - -def test_get_batch_rest_flattened(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batches.Batch() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/batches/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = batches.Batch.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_batch(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/batches/*}" % client.transport._host, args[1]) - - -def test_get_batch_rest_flattened_error(transport: str = 'rest'): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_batch( - batches.GetBatchRequest(), - name='name_value', - ) - - -def test_get_batch_rest_error(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - batches.ListBatchesRequest, - dict, -]) -def test_list_batches_rest(request_type): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batches.ListBatchesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = batches.ListBatchesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_batches(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBatchesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_batches_rest_required_fields(request_type=batches.ListBatchesRequest): - transport_class = transports.BatchControllerRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_batches._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_batches._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = batches.ListBatchesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = batches.ListBatchesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_batches(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_batches_rest_unset_required_fields(): - transport = transports.BatchControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_batches._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_batches_rest_interceptors(null_interceptor): - transport = transports.BatchControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchControllerRestInterceptor(), - ) - client = BatchControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchControllerRestInterceptor, "post_list_batches") as post, \ - mock.patch.object(transports.BatchControllerRestInterceptor, "pre_list_batches") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = batches.ListBatchesRequest.pb(batches.ListBatchesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = batches.ListBatchesResponse.to_json(batches.ListBatchesResponse()) - - request = batches.ListBatchesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = batches.ListBatchesResponse() - - client.list_batches(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_batches_rest_bad_request(transport: str = 'rest', request_type=batches.ListBatchesRequest): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_batches(request) - - -def test_list_batches_rest_flattened(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batches.ListBatchesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = batches.ListBatchesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_batches(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/batches" % client.transport._host, args[1]) - - -def test_list_batches_rest_flattened_error(transport: str = 'rest'): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_batches( - batches.ListBatchesRequest(), - parent='parent_value', - ) - - -def test_list_batches_rest_pager(transport: str = 'rest'): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - batches.Batch(), - batches.Batch(), - ], - next_page_token='abc', - ), - batches.ListBatchesResponse( - batches=[], - next_page_token='def', - ), - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - ], - next_page_token='ghi', - ), - batches.ListBatchesResponse( - batches=[ - batches.Batch(), - batches.Batch(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(batches.ListBatchesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_batches(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, batches.Batch) - for i in results) - - pages = list(client.list_batches(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - batches.DeleteBatchRequest, - dict, -]) -def test_delete_batch_rest(request_type): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/batches/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_batch(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_batch_rest_required_fields(request_type=batches.DeleteBatchRequest): - transport_class = transports.BatchControllerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_batch._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_batch._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_batch(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_batch_rest_unset_required_fields(): - transport = transports.BatchControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_batch._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_batch_rest_interceptors(null_interceptor): - transport = transports.BatchControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchControllerRestInterceptor(), - ) - client = BatchControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchControllerRestInterceptor, "pre_delete_batch") as pre: - pre.assert_not_called() - pb_message = batches.DeleteBatchRequest.pb(batches.DeleteBatchRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = batches.DeleteBatchRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_batch(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_batch_rest_bad_request(transport: str = 'rest', request_type=batches.DeleteBatchRequest): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/batches/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_batch(request) - - -def test_delete_batch_rest_flattened(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/batches/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_batch(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/batches/*}" % client.transport._host, args[1]) - - -def test_delete_batch_rest_flattened_error(transport: str = 'rest'): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_batch( - batches.DeleteBatchRequest(), - name='name_value', - ) - - -def test_delete_batch_rest_error(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.BatchControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.BatchControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BatchControllerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.BatchControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BatchControllerClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BatchControllerClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.BatchControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BatchControllerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.BatchControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = BatchControllerClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.BatchControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.BatchControllerGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.BatchControllerGrpcTransport, - transports.BatchControllerGrpcAsyncIOTransport, - transports.BatchControllerRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = BatchControllerClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.BatchControllerGrpcTransport, - ) - -def test_batch_controller_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.BatchControllerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_batch_controller_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataproc_v1.services.batch_controller.transports.BatchControllerTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.BatchControllerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_batch', - 'get_batch', - 'list_batches', - 'delete_batch', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_batch_controller_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataproc_v1.services.batch_controller.transports.BatchControllerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BatchControllerTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_batch_controller_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataproc_v1.services.batch_controller.transports.BatchControllerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BatchControllerTransport() - adc.assert_called_once() - - -def test_batch_controller_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BatchControllerClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BatchControllerGrpcTransport, - transports.BatchControllerGrpcAsyncIOTransport, - ], -) -def test_batch_controller_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BatchControllerGrpcTransport, - transports.BatchControllerGrpcAsyncIOTransport, - transports.BatchControllerRestTransport, - ], -) -def test_batch_controller_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.BatchControllerGrpcTransport, grpc_helpers), - (transports.BatchControllerGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_batch_controller_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.BatchControllerGrpcTransport, transports.BatchControllerGrpcAsyncIOTransport]) -def test_batch_controller_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_batch_controller_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.BatchControllerRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_batch_controller_rest_lro_client(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_batch_controller_host_no_port(transport_name): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_batch_controller_host_with_port(transport_name): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_batch_controller_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = BatchControllerClient( - credentials=creds1, - transport=transport_name, - ) - client2 = BatchControllerClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_batch._session - session2 = client2.transport.create_batch._session - assert session1 != session2 - session1 = client1.transport.get_batch._session - session2 = client2.transport.get_batch._session - assert session1 != session2 - session1 = client1.transport.list_batches._session - session2 = client2.transport.list_batches._session - assert session1 != session2 - session1 = client1.transport.delete_batch._session - session2 = client2.transport.delete_batch._session - assert session1 != session2 -def test_batch_controller_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BatchControllerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_batch_controller_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BatchControllerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BatchControllerGrpcTransport, transports.BatchControllerGrpcAsyncIOTransport]) -def test_batch_controller_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BatchControllerGrpcTransport, transports.BatchControllerGrpcAsyncIOTransport]) -def test_batch_controller_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_batch_controller_grpc_lro_client(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_batch_controller_grpc_lro_async_client(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_batch_path(): - project = "squid" - location = "clam" - batch = "whelk" - expected = "projects/{project}/locations/{location}/batches/{batch}".format(project=project, location=location, batch=batch, ) - actual = BatchControllerClient.batch_path(project, location, batch) - assert expected == actual - - -def test_parse_batch_path(): - expected = { - "project": "octopus", - "location": "oyster", - "batch": "nudibranch", - } - path = BatchControllerClient.batch_path(**expected) - - # Check that the path construction is reversible. - actual = BatchControllerClient.parse_batch_path(path) - assert expected == actual - -def test_service_path(): - project = "cuttlefish" - location = "mussel" - service = "winkle" - expected = "projects/{project}/locations/{location}/services/{service}".format(project=project, location=location, service=service, ) - actual = BatchControllerClient.service_path(project, location, service) - assert expected == actual - - -def test_parse_service_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "service": "abalone", - } - path = BatchControllerClient.service_path(**expected) - - # Check that the path construction is reversible. - actual = BatchControllerClient.parse_service_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = BatchControllerClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = BatchControllerClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = BatchControllerClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = BatchControllerClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = BatchControllerClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = BatchControllerClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = BatchControllerClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = BatchControllerClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = BatchControllerClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = BatchControllerClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = BatchControllerClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = BatchControllerClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = BatchControllerClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = BatchControllerClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = BatchControllerClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.BatchControllerTransport, '_prep_wrapped_messages') as prep: - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.BatchControllerTransport, '_prep_wrapped_messages') as prep: - transport_class = BatchControllerClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = BatchControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = BatchControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (BatchControllerClient, transports.BatchControllerGrpcTransport), - (BatchControllerAsyncClient, transports.BatchControllerGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_cluster_controller.py deleted file mode 100644 index f6c33f3a..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ /dev/null @@ -1,6611 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataproc_v1.services.cluster_controller import ClusterControllerAsyncClient -from google.cloud.dataproc_v1.services.cluster_controller import ClusterControllerClient -from google.cloud.dataproc_v1.services.cluster_controller import pagers -from google.cloud.dataproc_v1.services.cluster_controller import transports -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import operations -from google.cloud.dataproc_v1.types import shared -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert ClusterControllerClient._get_default_mtls_endpoint(None) is None - assert ClusterControllerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ClusterControllerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert ClusterControllerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert ClusterControllerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert ClusterControllerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (ClusterControllerClient, "grpc"), - (ClusterControllerAsyncClient, "grpc_asyncio"), - (ClusterControllerClient, "rest"), -]) -def test_cluster_controller_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.ClusterControllerGrpcTransport, "grpc"), - (transports.ClusterControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.ClusterControllerRestTransport, "rest"), -]) -def test_cluster_controller_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (ClusterControllerClient, "grpc"), - (ClusterControllerAsyncClient, "grpc_asyncio"), - (ClusterControllerClient, "rest"), -]) -def test_cluster_controller_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -def test_cluster_controller_client_get_transport_class(): - transport = ClusterControllerClient.get_transport_class() - available_transports = [ - transports.ClusterControllerGrpcTransport, - transports.ClusterControllerRestTransport, - ] - assert transport in available_transports - - transport = ClusterControllerClient.get_transport_class("grpc") - assert transport == transports.ClusterControllerGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ClusterControllerClient, transports.ClusterControllerGrpcTransport, "grpc"), - (ClusterControllerAsyncClient, transports.ClusterControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (ClusterControllerClient, transports.ClusterControllerRestTransport, "rest"), -]) -@mock.patch.object(ClusterControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ClusterControllerClient)) -@mock.patch.object(ClusterControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ClusterControllerAsyncClient)) -def test_cluster_controller_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(ClusterControllerClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(ClusterControllerClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (ClusterControllerClient, transports.ClusterControllerGrpcTransport, "grpc", "true"), - (ClusterControllerAsyncClient, transports.ClusterControllerGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (ClusterControllerClient, transports.ClusterControllerGrpcTransport, "grpc", "false"), - (ClusterControllerAsyncClient, transports.ClusterControllerGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (ClusterControllerClient, transports.ClusterControllerRestTransport, "rest", "true"), - (ClusterControllerClient, transports.ClusterControllerRestTransport, "rest", "false"), -]) -@mock.patch.object(ClusterControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ClusterControllerClient)) -@mock.patch.object(ClusterControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ClusterControllerAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cluster_controller_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - ClusterControllerClient, ClusterControllerAsyncClient -]) -@mock.patch.object(ClusterControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ClusterControllerClient)) -@mock.patch.object(ClusterControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ClusterControllerAsyncClient)) -def test_cluster_controller_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ClusterControllerClient, transports.ClusterControllerGrpcTransport, "grpc"), - (ClusterControllerAsyncClient, transports.ClusterControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (ClusterControllerClient, transports.ClusterControllerRestTransport, "rest"), -]) -def test_cluster_controller_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ClusterControllerClient, transports.ClusterControllerGrpcTransport, "grpc", grpc_helpers), - (ClusterControllerAsyncClient, transports.ClusterControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (ClusterControllerClient, transports.ClusterControllerRestTransport, "rest", None), -]) -def test_cluster_controller_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_cluster_controller_client_client_options_from_dict(): - with mock.patch('google.cloud.dataproc_v1.services.cluster_controller.transports.ClusterControllerGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = ClusterControllerClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ClusterControllerClient, transports.ClusterControllerGrpcTransport, "grpc", grpc_helpers), - (ClusterControllerAsyncClient, transports.ClusterControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_cluster_controller_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.CreateClusterRequest, - dict, -]) -def test_create_cluster(request_type, transport: str = 'grpc'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.CreateClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_cluster_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_cluster), - '__call__') as call: - client.create_cluster() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.CreateClusterRequest() - -@pytest.mark.asyncio -async def test_create_cluster_async(transport: str = 'grpc_asyncio', request_type=clusters.CreateClusterRequest): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.CreateClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_cluster_async_from_dict(): - await test_create_cluster_async(request_type=dict) - - -def test_create_cluster_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.CreateClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_cluster), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_cluster_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.CreateClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_cluster), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value', - ) in kw['metadata'] - - -def test_create_cluster_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_cluster( - project_id='project_id_value', - region='region_value', - cluster=clusters.Cluster(project_id='project_id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].cluster - mock_val = clusters.Cluster(project_id='project_id_value') - assert arg == mock_val - - -def test_create_cluster_flattened_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_cluster( - clusters.CreateClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster=clusters.Cluster(project_id='project_id_value'), - ) - -@pytest.mark.asyncio -async def test_create_cluster_flattened_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_cluster( - project_id='project_id_value', - region='region_value', - cluster=clusters.Cluster(project_id='project_id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].cluster - mock_val = clusters.Cluster(project_id='project_id_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_cluster_flattened_error_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_cluster( - clusters.CreateClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster=clusters.Cluster(project_id='project_id_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.UpdateClusterRequest, - dict, -]) -def test_update_cluster(request_type, transport: str = 'grpc'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.UpdateClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_cluster_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cluster), - '__call__') as call: - client.update_cluster() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.UpdateClusterRequest() - -@pytest.mark.asyncio -async def test_update_cluster_async(transport: str = 'grpc_asyncio', request_type=clusters.UpdateClusterRequest): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.UpdateClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_cluster_async_from_dict(): - await test_update_cluster_async(request_type=dict) - - -def test_update_cluster_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.UpdateClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cluster), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_cluster_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.UpdateClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cluster), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -def test_update_cluster_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_cluster( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - cluster=clusters.Cluster(project_id='project_id_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].cluster_name - mock_val = 'cluster_name_value' - assert arg == mock_val - arg = args[0].cluster - mock_val = clusters.Cluster(project_id='project_id_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_cluster_flattened_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_cluster( - clusters.UpdateClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - cluster=clusters.Cluster(project_id='project_id_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_cluster_flattened_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_cluster( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - cluster=clusters.Cluster(project_id='project_id_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].cluster_name - mock_val = 'cluster_name_value' - assert arg == mock_val - arg = args[0].cluster - mock_val = clusters.Cluster(project_id='project_id_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_cluster_flattened_error_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_cluster( - clusters.UpdateClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - cluster=clusters.Cluster(project_id='project_id_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.StopClusterRequest, - dict, -]) -def test_stop_cluster(request_type, transport: str = 'grpc'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.stop_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.StopClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_stop_cluster_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_cluster), - '__call__') as call: - client.stop_cluster() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.StopClusterRequest() - -@pytest.mark.asyncio -async def test_stop_cluster_async(transport: str = 'grpc_asyncio', request_type=clusters.StopClusterRequest): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.stop_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.StopClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_stop_cluster_async_from_dict(): - await test_stop_cluster_async(request_type=dict) - - -def test_stop_cluster_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.StopClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_cluster), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.stop_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_stop_cluster_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.StopClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_cluster), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.stop_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clusters.StartClusterRequest, - dict, -]) -def test_start_cluster(request_type, transport: str = 'grpc'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.start_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.StartClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_start_cluster_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_cluster), - '__call__') as call: - client.start_cluster() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.StartClusterRequest() - -@pytest.mark.asyncio -async def test_start_cluster_async(transport: str = 'grpc_asyncio', request_type=clusters.StartClusterRequest): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.start_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.StartClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_start_cluster_async_from_dict(): - await test_start_cluster_async(request_type=dict) - - -def test_start_cluster_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.StartClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_cluster), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.start_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_start_cluster_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.StartClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_cluster), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.start_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - clusters.DeleteClusterRequest, - dict, -]) -def test_delete_cluster(request_type, transport: str = 'grpc'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DeleteClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_cluster_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_cluster), - '__call__') as call: - client.delete_cluster() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DeleteClusterRequest() - -@pytest.mark.asyncio -async def test_delete_cluster_async(transport: str = 'grpc_asyncio', request_type=clusters.DeleteClusterRequest): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DeleteClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_cluster_async_from_dict(): - await test_delete_cluster_async(request_type=dict) - - -def test_delete_cluster_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.DeleteClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_cluster), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_cluster_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.DeleteClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_cluster), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -def test_delete_cluster_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_cluster( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].cluster_name - mock_val = 'cluster_name_value' - assert arg == mock_val - - -def test_delete_cluster_flattened_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_cluster( - clusters.DeleteClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - -@pytest.mark.asyncio -async def test_delete_cluster_flattened_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_cluster( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].cluster_name - mock_val = 'cluster_name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_cluster_flattened_error_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_cluster( - clusters.DeleteClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.GetClusterRequest, - dict, -]) -def test_get_cluster(request_type, transport: str = 'grpc'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clusters.Cluster( - project_id='project_id_value', - cluster_name='cluster_name_value', - cluster_uuid='cluster_uuid_value', - ) - response = client.get_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.GetClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clusters.Cluster) - assert response.project_id == 'project_id_value' - assert response.cluster_name == 'cluster_name_value' - assert response.cluster_uuid == 'cluster_uuid_value' - - -def test_get_cluster_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cluster), - '__call__') as call: - client.get_cluster() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.GetClusterRequest() - -@pytest.mark.asyncio -async def test_get_cluster_async(transport: str = 'grpc_asyncio', request_type=clusters.GetClusterRequest): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clusters.Cluster( - project_id='project_id_value', - cluster_name='cluster_name_value', - cluster_uuid='cluster_uuid_value', - )) - response = await client.get_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.GetClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clusters.Cluster) - assert response.project_id == 'project_id_value' - assert response.cluster_name == 'cluster_name_value' - assert response.cluster_uuid == 'cluster_uuid_value' - - -@pytest.mark.asyncio -async def test_get_cluster_async_from_dict(): - await test_get_cluster_async(request_type=dict) - - -def test_get_cluster_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.GetClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cluster), - '__call__') as call: - call.return_value = clusters.Cluster() - client.get_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_cluster_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.GetClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cluster), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.Cluster()) - await client.get_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -def test_get_cluster_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clusters.Cluster() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_cluster( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].cluster_name - mock_val = 'cluster_name_value' - assert arg == mock_val - - -def test_get_cluster_flattened_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_cluster( - clusters.GetClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - -@pytest.mark.asyncio -async def test_get_cluster_flattened_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clusters.Cluster() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.Cluster()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_cluster( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].cluster_name - mock_val = 'cluster_name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_cluster_flattened_error_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_cluster( - clusters.GetClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.ListClustersRequest, - dict, -]) -def test_list_clusters(request_type, transport: str = 'grpc'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_clusters), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clusters.ListClustersResponse( - next_page_token='next_page_token_value', - ) - response = client.list_clusters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.ListClustersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClustersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_clusters_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_clusters), - '__call__') as call: - client.list_clusters() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.ListClustersRequest() - -@pytest.mark.asyncio -async def test_list_clusters_async(transport: str = 'grpc_asyncio', request_type=clusters.ListClustersRequest): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_clusters), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clusters.ListClustersResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_clusters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.ListClustersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClustersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_clusters_async_from_dict(): - await test_list_clusters_async(request_type=dict) - - -def test_list_clusters_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.ListClustersRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_clusters), - '__call__') as call: - call.return_value = clusters.ListClustersResponse() - client.list_clusters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_clusters_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.ListClustersRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_clusters), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.ListClustersResponse()) - await client.list_clusters(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value', - ) in kw['metadata'] - - -def test_list_clusters_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_clusters), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clusters.ListClustersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_clusters( - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - - -def test_list_clusters_flattened_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_clusters( - clusters.ListClustersRequest(), - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - -@pytest.mark.asyncio -async def test_list_clusters_flattened_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_clusters), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clusters.ListClustersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.ListClustersResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_clusters( - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_clusters_flattened_error_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_clusters( - clusters.ListClustersRequest(), - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - - -def test_list_clusters_pager(transport_name: str = "grpc"): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_clusters), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - clusters.Cluster(), - clusters.Cluster(), - ], - next_page_token='abc', - ), - clusters.ListClustersResponse( - clusters=[], - next_page_token='def', - ), - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - ], - next_page_token='ghi', - ), - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - clusters.Cluster(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('project_id', ''), - ('region', ''), - )), - ) - pager = client.list_clusters(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, clusters.Cluster) - for i in results) -def test_list_clusters_pages(transport_name: str = "grpc"): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_clusters), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - clusters.Cluster(), - clusters.Cluster(), - ], - next_page_token='abc', - ), - clusters.ListClustersResponse( - clusters=[], - next_page_token='def', - ), - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - ], - next_page_token='ghi', - ), - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - clusters.Cluster(), - ], - ), - RuntimeError, - ) - pages = list(client.list_clusters(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_clusters_async_pager(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_clusters), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - clusters.Cluster(), - clusters.Cluster(), - ], - next_page_token='abc', - ), - clusters.ListClustersResponse( - clusters=[], - next_page_token='def', - ), - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - ], - next_page_token='ghi', - ), - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - clusters.Cluster(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_clusters(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, clusters.Cluster) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_clusters_async_pages(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_clusters), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - clusters.Cluster(), - clusters.Cluster(), - ], - next_page_token='abc', - ), - clusters.ListClustersResponse( - clusters=[], - next_page_token='def', - ), - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - ], - next_page_token='ghi', - ), - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - clusters.Cluster(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_clusters(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - clusters.DiagnoseClusterRequest, - dict, -]) -def test_diagnose_cluster(request_type, transport: str = 'grpc'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.diagnose_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.diagnose_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DiagnoseClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_diagnose_cluster_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.diagnose_cluster), - '__call__') as call: - client.diagnose_cluster() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DiagnoseClusterRequest() - -@pytest.mark.asyncio -async def test_diagnose_cluster_async(transport: str = 'grpc_asyncio', request_type=clusters.DiagnoseClusterRequest): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.diagnose_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.diagnose_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DiagnoseClusterRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_diagnose_cluster_async_from_dict(): - await test_diagnose_cluster_async(request_type=dict) - - -def test_diagnose_cluster_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.DiagnoseClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.diagnose_cluster), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.diagnose_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_diagnose_cluster_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = clusters.DiagnoseClusterRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.cluster_name = 'cluster_name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.diagnose_cluster), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.diagnose_cluster(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&cluster_name=cluster_name_value', - ) in kw['metadata'] - - -def test_diagnose_cluster_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.diagnose_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.diagnose_cluster( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].cluster_name - mock_val = 'cluster_name_value' - assert arg == mock_val - - -def test_diagnose_cluster_flattened_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.diagnose_cluster( - clusters.DiagnoseClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - -@pytest.mark.asyncio -async def test_diagnose_cluster_flattened_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.diagnose_cluster), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.diagnose_cluster( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].cluster_name - mock_val = 'cluster_name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_diagnose_cluster_flattened_error_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.diagnose_cluster( - clusters.DiagnoseClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.CreateClusterRequest, - dict, -]) -def test_create_cluster_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2'} - request_init["cluster"] = {'project_id': 'project_id_value', 'cluster_name': 'cluster_name_value', 'config': {'config_bucket': 'config_bucket_value', 'temp_bucket': 'temp_bucket_value', 'gce_cluster_config': {'zone_uri': 'zone_uri_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'internal_ip_only': True, 'private_ipv6_google_access': 1, 'service_account': 'service_account_value', 'service_account_scopes': ['service_account_scopes_value1', 'service_account_scopes_value2'], 'tags': ['tags_value1', 'tags_value2'], 'metadata': {}, 'reservation_affinity': {'consume_reservation_type': 1, 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'node_group_affinity': {'node_group_uri': 'node_group_uri_value'}, 'shielded_instance_config': {'enable_secure_boot': True, 'enable_vtpm': True, 'enable_integrity_monitoring': True}, 'confidential_instance_config': {'enable_confidential_compute': True}}, 'master_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'worker_config': {}, 'secondary_worker_config': {}, 'software_config': {'image_version': 'image_version_value', 'properties': {}, 'optional_components': [5]}, 'initialization_actions': [{'executable_file': 'executable_file_value', 'execution_timeout': {'seconds': 751, 'nanos': 543}}], 'encryption_config': {'gce_pd_kms_key_name': 'gce_pd_kms_key_name_value'}, 'autoscaling_config': {'policy_uri': 'policy_uri_value'}, 'security_config': {'kerberos_config': {'enable_kerberos': True, 'root_principal_password_uri': 'root_principal_password_uri_value', 'kms_key_uri': 'kms_key_uri_value', 'keystore_uri': 'keystore_uri_value', 'truststore_uri': 'truststore_uri_value', 'keystore_password_uri': 'keystore_password_uri_value', 'key_password_uri': 'key_password_uri_value', 'truststore_password_uri': 'truststore_password_uri_value', 'cross_realm_trust_realm': 'cross_realm_trust_realm_value', 'cross_realm_trust_kdc': 'cross_realm_trust_kdc_value', 'cross_realm_trust_admin_server': 'cross_realm_trust_admin_server_value', 'cross_realm_trust_shared_password_uri': 'cross_realm_trust_shared_password_uri_value', 'kdc_db_key_uri': 'kdc_db_key_uri_value', 'tgt_lifetime_hours': 1933, 'realm': 'realm_value'}, 'identity_config': {'user_service_account_mapping': {}}}, 'lifecycle_config': {'idle_delete_ttl': {}, 'auto_delete_time': {'seconds': 751, 'nanos': 543}, 'auto_delete_ttl': {}, 'idle_start_time': {}}, 'endpoint_config': {'http_ports': {}, 'enable_http_port_access': True}, 'metastore_config': {'dataproc_metastore_service': 'dataproc_metastore_service_value'}, 'dataproc_metric_config': {'metrics': [{'metric_source': 1, 'metric_overrides': ['metric_overrides_value1', 'metric_overrides_value2']}]}, 'auxiliary_node_groups': [{'node_group': {'name': 'name_value', 'roles': [1], 'node_group_config': {}, 'labels': {}}, 'node_group_id': 'node_group_id_value'}]}, 'virtual_cluster_config': {'staging_bucket': 'staging_bucket_value', 'kubernetes_cluster_config': {'kubernetes_namespace': 'kubernetes_namespace_value', 'gke_cluster_config': {'gke_cluster_target': 'gke_cluster_target_value', 'node_pool_target': [{'node_pool': 'node_pool_value', 'roles': [1], 'node_pool_config': {'config': {'machine_type': 'machine_type_value', 'local_ssd_count': 1596, 'preemptible': True, 'accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value', 'gpu_partition_size': 'gpu_partition_size_value'}], 'min_cpu_platform': 'min_cpu_platform_value', 'boot_disk_kms_key': 'boot_disk_kms_key_value', 'spot': True}, 'locations': ['locations_value1', 'locations_value2'], 'autoscaling': {'min_node_count': 1489, 'max_node_count': 1491}}}]}, 'kubernetes_software_config': {'component_version': {}, 'properties': {}}}, 'auxiliary_services_config': {'metastore_config': {}, 'spark_history_server_config': {'dataproc_cluster': 'dataproc_cluster_value'}}}, 'labels': {}, 'status': {'state': 1, 'detail': 'detail_value', 'state_start_time': {}, 'substate': 1}, 'status_history': {}, 'cluster_uuid': 'cluster_uuid_value', 'metrics': {'hdfs_metrics': {}, 'yarn_metrics': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_cluster(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_cluster_rest_required_fields(request_type=clusters.CreateClusterRequest): - transport_class = transports.ClusterControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cluster._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_cluster._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("action_on_failed_primary_workers", "request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_cluster(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_cluster_rest_unset_required_fields(): - transport = transports.ClusterControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("actionOnFailedPrimaryWorkers", "requestId", )) & set(("projectId", "region", "cluster", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_cluster_rest_interceptors(null_interceptor): - transport = transports.ClusterControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClusterControllerRestInterceptor(), - ) - client = ClusterControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "post_create_cluster") as post, \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "pre_create_cluster") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = clusters.CreateClusterRequest.pb(clusters.CreateClusterRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = clusters.CreateClusterRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_cluster_rest_bad_request(transport: str = 'rest', request_type=clusters.CreateClusterRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2'} - request_init["cluster"] = {'project_id': 'project_id_value', 'cluster_name': 'cluster_name_value', 'config': {'config_bucket': 'config_bucket_value', 'temp_bucket': 'temp_bucket_value', 'gce_cluster_config': {'zone_uri': 'zone_uri_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'internal_ip_only': True, 'private_ipv6_google_access': 1, 'service_account': 'service_account_value', 'service_account_scopes': ['service_account_scopes_value1', 'service_account_scopes_value2'], 'tags': ['tags_value1', 'tags_value2'], 'metadata': {}, 'reservation_affinity': {'consume_reservation_type': 1, 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'node_group_affinity': {'node_group_uri': 'node_group_uri_value'}, 'shielded_instance_config': {'enable_secure_boot': True, 'enable_vtpm': True, 'enable_integrity_monitoring': True}, 'confidential_instance_config': {'enable_confidential_compute': True}}, 'master_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'worker_config': {}, 'secondary_worker_config': {}, 'software_config': {'image_version': 'image_version_value', 'properties': {}, 'optional_components': [5]}, 'initialization_actions': [{'executable_file': 'executable_file_value', 'execution_timeout': {'seconds': 751, 'nanos': 543}}], 'encryption_config': {'gce_pd_kms_key_name': 'gce_pd_kms_key_name_value'}, 'autoscaling_config': {'policy_uri': 'policy_uri_value'}, 'security_config': {'kerberos_config': {'enable_kerberos': True, 'root_principal_password_uri': 'root_principal_password_uri_value', 'kms_key_uri': 'kms_key_uri_value', 'keystore_uri': 'keystore_uri_value', 'truststore_uri': 'truststore_uri_value', 'keystore_password_uri': 'keystore_password_uri_value', 'key_password_uri': 'key_password_uri_value', 'truststore_password_uri': 'truststore_password_uri_value', 'cross_realm_trust_realm': 'cross_realm_trust_realm_value', 'cross_realm_trust_kdc': 'cross_realm_trust_kdc_value', 'cross_realm_trust_admin_server': 'cross_realm_trust_admin_server_value', 'cross_realm_trust_shared_password_uri': 'cross_realm_trust_shared_password_uri_value', 'kdc_db_key_uri': 'kdc_db_key_uri_value', 'tgt_lifetime_hours': 1933, 'realm': 'realm_value'}, 'identity_config': {'user_service_account_mapping': {}}}, 'lifecycle_config': {'idle_delete_ttl': {}, 'auto_delete_time': {'seconds': 751, 'nanos': 543}, 'auto_delete_ttl': {}, 'idle_start_time': {}}, 'endpoint_config': {'http_ports': {}, 'enable_http_port_access': True}, 'metastore_config': {'dataproc_metastore_service': 'dataproc_metastore_service_value'}, 'dataproc_metric_config': {'metrics': [{'metric_source': 1, 'metric_overrides': ['metric_overrides_value1', 'metric_overrides_value2']}]}, 'auxiliary_node_groups': [{'node_group': {'name': 'name_value', 'roles': [1], 'node_group_config': {}, 'labels': {}}, 'node_group_id': 'node_group_id_value'}]}, 'virtual_cluster_config': {'staging_bucket': 'staging_bucket_value', 'kubernetes_cluster_config': {'kubernetes_namespace': 'kubernetes_namespace_value', 'gke_cluster_config': {'gke_cluster_target': 'gke_cluster_target_value', 'node_pool_target': [{'node_pool': 'node_pool_value', 'roles': [1], 'node_pool_config': {'config': {'machine_type': 'machine_type_value', 'local_ssd_count': 1596, 'preemptible': True, 'accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value', 'gpu_partition_size': 'gpu_partition_size_value'}], 'min_cpu_platform': 'min_cpu_platform_value', 'boot_disk_kms_key': 'boot_disk_kms_key_value', 'spot': True}, 'locations': ['locations_value1', 'locations_value2'], 'autoscaling': {'min_node_count': 1489, 'max_node_count': 1491}}}]}, 'kubernetes_software_config': {'component_version': {}, 'properties': {}}}, 'auxiliary_services_config': {'metastore_config': {}, 'spark_history_server_config': {'dataproc_cluster': 'dataproc_cluster_value'}}}, 'labels': {}, 'status': {'state': 1, 'detail': 'detail_value', 'state_start_time': {}, 'substate': 1}, 'status_history': {}, 'cluster_uuid': 'cluster_uuid_value', 'metrics': {'hdfs_metrics': {}, 'yarn_metrics': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_cluster(request) - - -def test_create_cluster_rest_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - cluster=clusters.Cluster(project_id='project_id_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_cluster(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/clusters" % client.transport._host, args[1]) - - -def test_create_cluster_rest_flattened_error(transport: str = 'rest'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_cluster( - clusters.CreateClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster=clusters.Cluster(project_id='project_id_value'), - ) - - -def test_create_cluster_rest_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.UpdateClusterRequest, - dict, -]) -def test_update_cluster_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request_init["cluster"] = {'project_id': 'project_id_value', 'cluster_name': 'cluster_name_value', 'config': {'config_bucket': 'config_bucket_value', 'temp_bucket': 'temp_bucket_value', 'gce_cluster_config': {'zone_uri': 'zone_uri_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'internal_ip_only': True, 'private_ipv6_google_access': 1, 'service_account': 'service_account_value', 'service_account_scopes': ['service_account_scopes_value1', 'service_account_scopes_value2'], 'tags': ['tags_value1', 'tags_value2'], 'metadata': {}, 'reservation_affinity': {'consume_reservation_type': 1, 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'node_group_affinity': {'node_group_uri': 'node_group_uri_value'}, 'shielded_instance_config': {'enable_secure_boot': True, 'enable_vtpm': True, 'enable_integrity_monitoring': True}, 'confidential_instance_config': {'enable_confidential_compute': True}}, 'master_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'worker_config': {}, 'secondary_worker_config': {}, 'software_config': {'image_version': 'image_version_value', 'properties': {}, 'optional_components': [5]}, 'initialization_actions': [{'executable_file': 'executable_file_value', 'execution_timeout': {'seconds': 751, 'nanos': 543}}], 'encryption_config': {'gce_pd_kms_key_name': 'gce_pd_kms_key_name_value'}, 'autoscaling_config': {'policy_uri': 'policy_uri_value'}, 'security_config': {'kerberos_config': {'enable_kerberos': True, 'root_principal_password_uri': 'root_principal_password_uri_value', 'kms_key_uri': 'kms_key_uri_value', 'keystore_uri': 'keystore_uri_value', 'truststore_uri': 'truststore_uri_value', 'keystore_password_uri': 'keystore_password_uri_value', 'key_password_uri': 'key_password_uri_value', 'truststore_password_uri': 'truststore_password_uri_value', 'cross_realm_trust_realm': 'cross_realm_trust_realm_value', 'cross_realm_trust_kdc': 'cross_realm_trust_kdc_value', 'cross_realm_trust_admin_server': 'cross_realm_trust_admin_server_value', 'cross_realm_trust_shared_password_uri': 'cross_realm_trust_shared_password_uri_value', 'kdc_db_key_uri': 'kdc_db_key_uri_value', 'tgt_lifetime_hours': 1933, 'realm': 'realm_value'}, 'identity_config': {'user_service_account_mapping': {}}}, 'lifecycle_config': {'idle_delete_ttl': {}, 'auto_delete_time': {'seconds': 751, 'nanos': 543}, 'auto_delete_ttl': {}, 'idle_start_time': {}}, 'endpoint_config': {'http_ports': {}, 'enable_http_port_access': True}, 'metastore_config': {'dataproc_metastore_service': 'dataproc_metastore_service_value'}, 'dataproc_metric_config': {'metrics': [{'metric_source': 1, 'metric_overrides': ['metric_overrides_value1', 'metric_overrides_value2']}]}, 'auxiliary_node_groups': [{'node_group': {'name': 'name_value', 'roles': [1], 'node_group_config': {}, 'labels': {}}, 'node_group_id': 'node_group_id_value'}]}, 'virtual_cluster_config': {'staging_bucket': 'staging_bucket_value', 'kubernetes_cluster_config': {'kubernetes_namespace': 'kubernetes_namespace_value', 'gke_cluster_config': {'gke_cluster_target': 'gke_cluster_target_value', 'node_pool_target': [{'node_pool': 'node_pool_value', 'roles': [1], 'node_pool_config': {'config': {'machine_type': 'machine_type_value', 'local_ssd_count': 1596, 'preemptible': True, 'accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value', 'gpu_partition_size': 'gpu_partition_size_value'}], 'min_cpu_platform': 'min_cpu_platform_value', 'boot_disk_kms_key': 'boot_disk_kms_key_value', 'spot': True}, 'locations': ['locations_value1', 'locations_value2'], 'autoscaling': {'min_node_count': 1489, 'max_node_count': 1491}}}]}, 'kubernetes_software_config': {'component_version': {}, 'properties': {}}}, 'auxiliary_services_config': {'metastore_config': {}, 'spark_history_server_config': {'dataproc_cluster': 'dataproc_cluster_value'}}}, 'labels': {}, 'status': {'state': 1, 'detail': 'detail_value', 'state_start_time': {}, 'substate': 1}, 'status_history': {}, 'cluster_uuid': 'cluster_uuid_value', 'metrics': {'hdfs_metrics': {}, 'yarn_metrics': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_cluster(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_update_cluster_rest_required_fields(request_type=clusters.UpdateClusterRequest): - transport_class = transports.ClusterControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request_init["cluster_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_cluster._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - jsonified_request["clusterName"] = 'cluster_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_cluster._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("graceful_decommission_timeout", "request_id", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - assert "clusterName" in jsonified_request - assert jsonified_request["clusterName"] == 'cluster_name_value' - - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_cluster(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_cluster_rest_unset_required_fields(): - transport = transports.ClusterControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("gracefulDecommissionTimeout", "requestId", "updateMask", )) & set(("projectId", "region", "clusterName", "cluster", "updateMask", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_cluster_rest_interceptors(null_interceptor): - transport = transports.ClusterControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClusterControllerRestInterceptor(), - ) - client = ClusterControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "post_update_cluster") as post, \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "pre_update_cluster") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = clusters.UpdateClusterRequest.pb(clusters.UpdateClusterRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = clusters.UpdateClusterRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_cluster_rest_bad_request(transport: str = 'rest', request_type=clusters.UpdateClusterRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request_init["cluster"] = {'project_id': 'project_id_value', 'cluster_name': 'cluster_name_value', 'config': {'config_bucket': 'config_bucket_value', 'temp_bucket': 'temp_bucket_value', 'gce_cluster_config': {'zone_uri': 'zone_uri_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'internal_ip_only': True, 'private_ipv6_google_access': 1, 'service_account': 'service_account_value', 'service_account_scopes': ['service_account_scopes_value1', 'service_account_scopes_value2'], 'tags': ['tags_value1', 'tags_value2'], 'metadata': {}, 'reservation_affinity': {'consume_reservation_type': 1, 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'node_group_affinity': {'node_group_uri': 'node_group_uri_value'}, 'shielded_instance_config': {'enable_secure_boot': True, 'enable_vtpm': True, 'enable_integrity_monitoring': True}, 'confidential_instance_config': {'enable_confidential_compute': True}}, 'master_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'worker_config': {}, 'secondary_worker_config': {}, 'software_config': {'image_version': 'image_version_value', 'properties': {}, 'optional_components': [5]}, 'initialization_actions': [{'executable_file': 'executable_file_value', 'execution_timeout': {'seconds': 751, 'nanos': 543}}], 'encryption_config': {'gce_pd_kms_key_name': 'gce_pd_kms_key_name_value'}, 'autoscaling_config': {'policy_uri': 'policy_uri_value'}, 'security_config': {'kerberos_config': {'enable_kerberos': True, 'root_principal_password_uri': 'root_principal_password_uri_value', 'kms_key_uri': 'kms_key_uri_value', 'keystore_uri': 'keystore_uri_value', 'truststore_uri': 'truststore_uri_value', 'keystore_password_uri': 'keystore_password_uri_value', 'key_password_uri': 'key_password_uri_value', 'truststore_password_uri': 'truststore_password_uri_value', 'cross_realm_trust_realm': 'cross_realm_trust_realm_value', 'cross_realm_trust_kdc': 'cross_realm_trust_kdc_value', 'cross_realm_trust_admin_server': 'cross_realm_trust_admin_server_value', 'cross_realm_trust_shared_password_uri': 'cross_realm_trust_shared_password_uri_value', 'kdc_db_key_uri': 'kdc_db_key_uri_value', 'tgt_lifetime_hours': 1933, 'realm': 'realm_value'}, 'identity_config': {'user_service_account_mapping': {}}}, 'lifecycle_config': {'idle_delete_ttl': {}, 'auto_delete_time': {'seconds': 751, 'nanos': 543}, 'auto_delete_ttl': {}, 'idle_start_time': {}}, 'endpoint_config': {'http_ports': {}, 'enable_http_port_access': True}, 'metastore_config': {'dataproc_metastore_service': 'dataproc_metastore_service_value'}, 'dataproc_metric_config': {'metrics': [{'metric_source': 1, 'metric_overrides': ['metric_overrides_value1', 'metric_overrides_value2']}]}, 'auxiliary_node_groups': [{'node_group': {'name': 'name_value', 'roles': [1], 'node_group_config': {}, 'labels': {}}, 'node_group_id': 'node_group_id_value'}]}, 'virtual_cluster_config': {'staging_bucket': 'staging_bucket_value', 'kubernetes_cluster_config': {'kubernetes_namespace': 'kubernetes_namespace_value', 'gke_cluster_config': {'gke_cluster_target': 'gke_cluster_target_value', 'node_pool_target': [{'node_pool': 'node_pool_value', 'roles': [1], 'node_pool_config': {'config': {'machine_type': 'machine_type_value', 'local_ssd_count': 1596, 'preemptible': True, 'accelerators': [{'accelerator_count': 1805, 'accelerator_type': 'accelerator_type_value', 'gpu_partition_size': 'gpu_partition_size_value'}], 'min_cpu_platform': 'min_cpu_platform_value', 'boot_disk_kms_key': 'boot_disk_kms_key_value', 'spot': True}, 'locations': ['locations_value1', 'locations_value2'], 'autoscaling': {'min_node_count': 1489, 'max_node_count': 1491}}}]}, 'kubernetes_software_config': {'component_version': {}, 'properties': {}}}, 'auxiliary_services_config': {'metastore_config': {}, 'spark_history_server_config': {'dataproc_cluster': 'dataproc_cluster_value'}}}, 'labels': {}, 'status': {'state': 1, 'detail': 'detail_value', 'state_start_time': {}, 'substate': 1}, 'status_history': {}, 'cluster_uuid': 'cluster_uuid_value', 'metrics': {'hdfs_metrics': {}, 'yarn_metrics': {}}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_cluster(request) - - -def test_update_cluster_rest_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - cluster=clusters.Cluster(project_id='project_id_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_cluster(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" % client.transport._host, args[1]) - - -def test_update_cluster_rest_flattened_error(transport: str = 'rest'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_cluster( - clusters.UpdateClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - cluster=clusters.Cluster(project_id='project_id_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_update_cluster_rest_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.StopClusterRequest, - dict, -]) -def test_stop_cluster_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.stop_cluster(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_stop_cluster_rest_required_fields(request_type=clusters.StopClusterRequest): - transport_class = transports.ClusterControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request_init["cluster_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_cluster._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - jsonified_request["clusterName"] = 'cluster_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_cluster._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - assert "clusterName" in jsonified_request - assert jsonified_request["clusterName"] == 'cluster_name_value' - - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.stop_cluster(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_stop_cluster_rest_unset_required_fields(): - transport = transports.ClusterControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.stop_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "region", "clusterName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_stop_cluster_rest_interceptors(null_interceptor): - transport = transports.ClusterControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClusterControllerRestInterceptor(), - ) - client = ClusterControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "post_stop_cluster") as post, \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "pre_stop_cluster") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = clusters.StopClusterRequest.pb(clusters.StopClusterRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = clusters.StopClusterRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.stop_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_stop_cluster_rest_bad_request(transport: str = 'rest', request_type=clusters.StopClusterRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.stop_cluster(request) - - -def test_stop_cluster_rest_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.StartClusterRequest, - dict, -]) -def test_start_cluster_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.start_cluster(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_start_cluster_rest_required_fields(request_type=clusters.StartClusterRequest): - transport_class = transports.ClusterControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request_init["cluster_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_cluster._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - jsonified_request["clusterName"] = 'cluster_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_cluster._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - assert "clusterName" in jsonified_request - assert jsonified_request["clusterName"] == 'cluster_name_value' - - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.start_cluster(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_start_cluster_rest_unset_required_fields(): - transport = transports.ClusterControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.start_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "region", "clusterName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_start_cluster_rest_interceptors(null_interceptor): - transport = transports.ClusterControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClusterControllerRestInterceptor(), - ) - client = ClusterControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "post_start_cluster") as post, \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "pre_start_cluster") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = clusters.StartClusterRequest.pb(clusters.StartClusterRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = clusters.StartClusterRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.start_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_start_cluster_rest_bad_request(transport: str = 'rest', request_type=clusters.StartClusterRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.start_cluster(request) - - -def test_start_cluster_rest_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.DeleteClusterRequest, - dict, -]) -def test_delete_cluster_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_cluster(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_delete_cluster_rest_required_fields(request_type=clusters.DeleteClusterRequest): - transport_class = transports.ClusterControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request_init["cluster_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cluster._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - jsonified_request["clusterName"] = 'cluster_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_cluster._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("cluster_uuid", "request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - assert "clusterName" in jsonified_request - assert jsonified_request["clusterName"] == 'cluster_name_value' - - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_cluster(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_cluster_rest_unset_required_fields(): - transport = transports.ClusterControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(("clusterUuid", "requestId", )) & set(("projectId", "region", "clusterName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_cluster_rest_interceptors(null_interceptor): - transport = transports.ClusterControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClusterControllerRestInterceptor(), - ) - client = ClusterControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "post_delete_cluster") as post, \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "pre_delete_cluster") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = clusters.DeleteClusterRequest.pb(clusters.DeleteClusterRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = clusters.DeleteClusterRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_cluster_rest_bad_request(transport: str = 'rest', request_type=clusters.DeleteClusterRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_cluster(request) - - -def test_delete_cluster_rest_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_cluster(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" % client.transport._host, args[1]) - - -def test_delete_cluster_rest_flattened_error(transport: str = 'rest'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_cluster( - clusters.DeleteClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - -def test_delete_cluster_rest_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.GetClusterRequest, - dict, -]) -def test_get_cluster_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = clusters.Cluster( - project_id='project_id_value', - cluster_name='cluster_name_value', - cluster_uuid='cluster_uuid_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = clusters.Cluster.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_cluster(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, clusters.Cluster) - assert response.project_id == 'project_id_value' - assert response.cluster_name == 'cluster_name_value' - assert response.cluster_uuid == 'cluster_uuid_value' - - -def test_get_cluster_rest_required_fields(request_type=clusters.GetClusterRequest): - transport_class = transports.ClusterControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request_init["cluster_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cluster._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - jsonified_request["clusterName"] = 'cluster_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cluster._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - assert "clusterName" in jsonified_request - assert jsonified_request["clusterName"] == 'cluster_name_value' - - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = clusters.Cluster() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = clusters.Cluster.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_cluster(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_cluster_rest_unset_required_fields(): - transport = transports.ClusterControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "region", "clusterName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_cluster_rest_interceptors(null_interceptor): - transport = transports.ClusterControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClusterControllerRestInterceptor(), - ) - client = ClusterControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "post_get_cluster") as post, \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "pre_get_cluster") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = clusters.GetClusterRequest.pb(clusters.GetClusterRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = clusters.Cluster.to_json(clusters.Cluster()) - - request = clusters.GetClusterRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = clusters.Cluster() - - client.get_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_cluster_rest_bad_request(transport: str = 'rest', request_type=clusters.GetClusterRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_cluster(request) - - -def test_get_cluster_rest_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = clusters.Cluster() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = clusters.Cluster.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_cluster(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" % client.transport._host, args[1]) - - -def test_get_cluster_rest_flattened_error(transport: str = 'rest'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_cluster( - clusters.GetClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - -def test_get_cluster_rest_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - clusters.ListClustersRequest, - dict, -]) -def test_list_clusters_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = clusters.ListClustersResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = clusters.ListClustersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_clusters(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClustersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_clusters_rest_required_fields(request_type=clusters.ListClustersRequest): - transport_class = transports.ClusterControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_clusters._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_clusters._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = clusters.ListClustersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = clusters.ListClustersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_clusters(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_clusters_rest_unset_required_fields(): - transport = transports.ClusterControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_clusters._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("projectId", "region", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_clusters_rest_interceptors(null_interceptor): - transport = transports.ClusterControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClusterControllerRestInterceptor(), - ) - client = ClusterControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "post_list_clusters") as post, \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "pre_list_clusters") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = clusters.ListClustersRequest.pb(clusters.ListClustersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = clusters.ListClustersResponse.to_json(clusters.ListClustersResponse()) - - request = clusters.ListClustersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = clusters.ListClustersResponse() - - client.list_clusters(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_clusters_rest_bad_request(transport: str = 'rest', request_type=clusters.ListClustersRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_clusters(request) - - -def test_list_clusters_rest_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = clusters.ListClustersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = clusters.ListClustersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_clusters(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/clusters" % client.transport._host, args[1]) - - -def test_list_clusters_rest_flattened_error(transport: str = 'rest'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_clusters( - clusters.ListClustersRequest(), - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - - -def test_list_clusters_rest_pager(transport: str = 'rest'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - clusters.Cluster(), - clusters.Cluster(), - ], - next_page_token='abc', - ), - clusters.ListClustersResponse( - clusters=[], - next_page_token='def', - ), - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - ], - next_page_token='ghi', - ), - clusters.ListClustersResponse( - clusters=[ - clusters.Cluster(), - clusters.Cluster(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(clusters.ListClustersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1', 'region': 'sample2'} - - pager = client.list_clusters(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, clusters.Cluster) - for i in results) - - pages = list(client.list_clusters(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - clusters.DiagnoseClusterRequest, - dict, -]) -def test_diagnose_cluster_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.diagnose_cluster(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_diagnose_cluster_rest_required_fields(request_type=clusters.DiagnoseClusterRequest): - transport_class = transports.ClusterControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request_init["cluster_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).diagnose_cluster._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - jsonified_request["clusterName"] = 'cluster_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).diagnose_cluster._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - assert "clusterName" in jsonified_request - assert jsonified_request["clusterName"] == 'cluster_name_value' - - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.diagnose_cluster(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_diagnose_cluster_rest_unset_required_fields(): - transport = transports.ClusterControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.diagnose_cluster._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "region", "clusterName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_diagnose_cluster_rest_interceptors(null_interceptor): - transport = transports.ClusterControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClusterControllerRestInterceptor(), - ) - client = ClusterControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "post_diagnose_cluster") as post, \ - mock.patch.object(transports.ClusterControllerRestInterceptor, "pre_diagnose_cluster") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = clusters.DiagnoseClusterRequest.pb(clusters.DiagnoseClusterRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = clusters.DiagnoseClusterRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.diagnose_cluster(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_diagnose_cluster_rest_bad_request(transport: str = 'rest', request_type=clusters.DiagnoseClusterRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.diagnose_cluster(request) - - -def test_diagnose_cluster_rest_flattened(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2', 'cluster_name': 'sample3'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.diagnose_cluster(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose" % client.transport._host, args[1]) - - -def test_diagnose_cluster_rest_flattened_error(transport: str = 'rest'): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.diagnose_cluster( - clusters.DiagnoseClusterRequest(), - project_id='project_id_value', - region='region_value', - cluster_name='cluster_name_value', - ) - - -def test_diagnose_cluster_rest_error(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ClusterControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ClusterControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ClusterControllerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ClusterControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ClusterControllerClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ClusterControllerClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ClusterControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ClusterControllerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ClusterControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ClusterControllerClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ClusterControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ClusterControllerGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.ClusterControllerGrpcTransport, - transports.ClusterControllerGrpcAsyncIOTransport, - transports.ClusterControllerRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = ClusterControllerClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ClusterControllerGrpcTransport, - ) - -def test_cluster_controller_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ClusterControllerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_cluster_controller_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataproc_v1.services.cluster_controller.transports.ClusterControllerTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.ClusterControllerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_cluster', - 'update_cluster', - 'stop_cluster', - 'start_cluster', - 'delete_cluster', - 'get_cluster', - 'list_clusters', - 'diagnose_cluster', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_cluster_controller_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataproc_v1.services.cluster_controller.transports.ClusterControllerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ClusterControllerTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_cluster_controller_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataproc_v1.services.cluster_controller.transports.ClusterControllerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ClusterControllerTransport() - adc.assert_called_once() - - -def test_cluster_controller_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ClusterControllerClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ClusterControllerGrpcTransport, - transports.ClusterControllerGrpcAsyncIOTransport, - ], -) -def test_cluster_controller_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ClusterControllerGrpcTransport, - transports.ClusterControllerGrpcAsyncIOTransport, - transports.ClusterControllerRestTransport, - ], -) -def test_cluster_controller_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ClusterControllerGrpcTransport, grpc_helpers), - (transports.ClusterControllerGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_cluster_controller_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.ClusterControllerGrpcTransport, transports.ClusterControllerGrpcAsyncIOTransport]) -def test_cluster_controller_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_cluster_controller_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.ClusterControllerRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_cluster_controller_rest_lro_client(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_cluster_controller_host_no_port(transport_name): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_cluster_controller_host_with_port(transport_name): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_cluster_controller_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ClusterControllerClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ClusterControllerClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_cluster._session - session2 = client2.transport.create_cluster._session - assert session1 != session2 - session1 = client1.transport.update_cluster._session - session2 = client2.transport.update_cluster._session - assert session1 != session2 - session1 = client1.transport.stop_cluster._session - session2 = client2.transport.stop_cluster._session - assert session1 != session2 - session1 = client1.transport.start_cluster._session - session2 = client2.transport.start_cluster._session - assert session1 != session2 - session1 = client1.transport.delete_cluster._session - session2 = client2.transport.delete_cluster._session - assert session1 != session2 - session1 = client1.transport.get_cluster._session - session2 = client2.transport.get_cluster._session - assert session1 != session2 - session1 = client1.transport.list_clusters._session - session2 = client2.transport.list_clusters._session - assert session1 != session2 - session1 = client1.transport.diagnose_cluster._session - session2 = client2.transport.diagnose_cluster._session - assert session1 != session2 -def test_cluster_controller_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ClusterControllerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_cluster_controller_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ClusterControllerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ClusterControllerGrpcTransport, transports.ClusterControllerGrpcAsyncIOTransport]) -def test_cluster_controller_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ClusterControllerGrpcTransport, transports.ClusterControllerGrpcAsyncIOTransport]) -def test_cluster_controller_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_cluster_controller_grpc_lro_client(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_cluster_controller_grpc_lro_async_client(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_cluster_path(): - project = "squid" - location = "clam" - cluster = "whelk" - expected = "projects/{project}/locations/{location}/clusters/{cluster}".format(project=project, location=location, cluster=cluster, ) - actual = ClusterControllerClient.cluster_path(project, location, cluster) - assert expected == actual - - -def test_parse_cluster_path(): - expected = { - "project": "octopus", - "location": "oyster", - "cluster": "nudibranch", - } - path = ClusterControllerClient.cluster_path(**expected) - - # Check that the path construction is reversible. - actual = ClusterControllerClient.parse_cluster_path(path) - assert expected == actual - -def test_node_group_path(): - project = "cuttlefish" - region = "mussel" - cluster = "winkle" - node_group = "nautilus" - expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format(project=project, region=region, cluster=cluster, node_group=node_group, ) - actual = ClusterControllerClient.node_group_path(project, region, cluster, node_group) - assert expected == actual - - -def test_parse_node_group_path(): - expected = { - "project": "scallop", - "region": "abalone", - "cluster": "squid", - "node_group": "clam", - } - path = ClusterControllerClient.node_group_path(**expected) - - # Check that the path construction is reversible. - actual = ClusterControllerClient.parse_node_group_path(path) - assert expected == actual - -def test_service_path(): - project = "whelk" - location = "octopus" - service = "oyster" - expected = "projects/{project}/locations/{location}/services/{service}".format(project=project, location=location, service=service, ) - actual = ClusterControllerClient.service_path(project, location, service) - assert expected == actual - - -def test_parse_service_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "service": "mussel", - } - path = ClusterControllerClient.service_path(**expected) - - # Check that the path construction is reversible. - actual = ClusterControllerClient.parse_service_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = ClusterControllerClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = ClusterControllerClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ClusterControllerClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = ClusterControllerClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = ClusterControllerClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ClusterControllerClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = ClusterControllerClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = ClusterControllerClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ClusterControllerClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = ClusterControllerClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = ClusterControllerClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ClusterControllerClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = ClusterControllerClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = ClusterControllerClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ClusterControllerClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.ClusterControllerTransport, '_prep_wrapped_messages') as prep: - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.ClusterControllerTransport, '_prep_wrapped_messages') as prep: - transport_class = ClusterControllerClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = ClusterControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = ClusterControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (ClusterControllerClient, transports.ClusterControllerGrpcTransport), - (ClusterControllerAsyncClient, transports.ClusterControllerGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_job_controller.py b/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_job_controller.py deleted file mode 100644 index f0216ed9..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ /dev/null @@ -1,6242 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataproc_v1.services.job_controller import JobControllerAsyncClient -from google.cloud.dataproc_v1.services.job_controller import JobControllerClient -from google.cloud.dataproc_v1.services.job_controller import pagers -from google.cloud.dataproc_v1.services.job_controller import transports -from google.cloud.dataproc_v1.types import jobs -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert JobControllerClient._get_default_mtls_endpoint(None) is None - assert JobControllerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert JobControllerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert JobControllerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert JobControllerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert JobControllerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (JobControllerClient, "grpc"), - (JobControllerAsyncClient, "grpc_asyncio"), - (JobControllerClient, "rest"), -]) -def test_job_controller_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.JobControllerGrpcTransport, "grpc"), - (transports.JobControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.JobControllerRestTransport, "rest"), -]) -def test_job_controller_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (JobControllerClient, "grpc"), - (JobControllerAsyncClient, "grpc_asyncio"), - (JobControllerClient, "rest"), -]) -def test_job_controller_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -def test_job_controller_client_get_transport_class(): - transport = JobControllerClient.get_transport_class() - available_transports = [ - transports.JobControllerGrpcTransport, - transports.JobControllerRestTransport, - ] - assert transport in available_transports - - transport = JobControllerClient.get_transport_class("grpc") - assert transport == transports.JobControllerGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (JobControllerClient, transports.JobControllerGrpcTransport, "grpc"), - (JobControllerAsyncClient, transports.JobControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (JobControllerClient, transports.JobControllerRestTransport, "rest"), -]) -@mock.patch.object(JobControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobControllerClient)) -@mock.patch.object(JobControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobControllerAsyncClient)) -def test_job_controller_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(JobControllerClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(JobControllerClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (JobControllerClient, transports.JobControllerGrpcTransport, "grpc", "true"), - (JobControllerAsyncClient, transports.JobControllerGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (JobControllerClient, transports.JobControllerGrpcTransport, "grpc", "false"), - (JobControllerAsyncClient, transports.JobControllerGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (JobControllerClient, transports.JobControllerRestTransport, "rest", "true"), - (JobControllerClient, transports.JobControllerRestTransport, "rest", "false"), -]) -@mock.patch.object(JobControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobControllerClient)) -@mock.patch.object(JobControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobControllerAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_job_controller_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - JobControllerClient, JobControllerAsyncClient -]) -@mock.patch.object(JobControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobControllerClient)) -@mock.patch.object(JobControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobControllerAsyncClient)) -def test_job_controller_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (JobControllerClient, transports.JobControllerGrpcTransport, "grpc"), - (JobControllerAsyncClient, transports.JobControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (JobControllerClient, transports.JobControllerRestTransport, "rest"), -]) -def test_job_controller_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (JobControllerClient, transports.JobControllerGrpcTransport, "grpc", grpc_helpers), - (JobControllerAsyncClient, transports.JobControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (JobControllerClient, transports.JobControllerRestTransport, "rest", None), -]) -def test_job_controller_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_job_controller_client_client_options_from_dict(): - with mock.patch('google.cloud.dataproc_v1.services.job_controller.transports.JobControllerGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = JobControllerClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (JobControllerClient, transports.JobControllerGrpcTransport, "grpc", grpc_helpers), - (JobControllerAsyncClient, transports.JobControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_job_controller_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.SubmitJobRequest, - dict, -]) -def test_submit_job(request_type, transport: str = 'grpc'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - ) - response = client.submit_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -def test_submit_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job), - '__call__') as call: - client.submit_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() - -@pytest.mark.asyncio -async def test_submit_job_async(transport: str = 'grpc_asyncio', request_type=jobs.SubmitJobRequest): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - )) - response = await client.submit_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -@pytest.mark.asyncio -async def test_submit_job_async_from_dict(): - await test_submit_job_async(request_type=dict) - - -def test_submit_job_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.SubmitJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job), - '__call__') as call: - call.return_value = jobs.Job() - client.submit_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_submit_job_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.SubmitJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) - await client.submit_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value', - ) in kw['metadata'] - - -def test_submit_job_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.submit_job( - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].job - mock_val = jobs.Job(reference=jobs.JobReference(project_id='project_id_value')) - assert arg == mock_val - - -def test_submit_job_flattened_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.submit_job( - jobs.SubmitJobRequest(), - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - -@pytest.mark.asyncio -async def test_submit_job_flattened_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.submit_job( - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].job - mock_val = jobs.Job(reference=jobs.JobReference(project_id='project_id_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_submit_job_flattened_error_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.submit_job( - jobs.SubmitJobRequest(), - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.SubmitJobRequest, - dict, -]) -def test_submit_job_as_operation(request_type, transport: str = 'grpc'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job_as_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.submit_job_as_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_submit_job_as_operation_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job_as_operation), - '__call__') as call: - client.submit_job_as_operation() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() - -@pytest.mark.asyncio -async def test_submit_job_as_operation_async(transport: str = 'grpc_asyncio', request_type=jobs.SubmitJobRequest): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job_as_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.submit_job_as_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_submit_job_as_operation_async_from_dict(): - await test_submit_job_as_operation_async(request_type=dict) - - -def test_submit_job_as_operation_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.SubmitJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job_as_operation), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.submit_job_as_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_submit_job_as_operation_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.SubmitJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job_as_operation), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.submit_job_as_operation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value', - ) in kw['metadata'] - - -def test_submit_job_as_operation_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job_as_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.submit_job_as_operation( - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].job - mock_val = jobs.Job(reference=jobs.JobReference(project_id='project_id_value')) - assert arg == mock_val - - -def test_submit_job_as_operation_flattened_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.submit_job_as_operation( - jobs.SubmitJobRequest(), - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - -@pytest.mark.asyncio -async def test_submit_job_as_operation_flattened_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_job_as_operation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.submit_job_as_operation( - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].job - mock_val = jobs.Job(reference=jobs.JobReference(project_id='project_id_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_submit_job_as_operation_flattened_error_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.submit_job_as_operation( - jobs.SubmitJobRequest(), - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.GetJobRequest, - dict, -]) -def test_get_job(request_type, transport: str = 'grpc'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - ) - response = client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -def test_get_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - client.get_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() - -@pytest.mark.asyncio -async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=jobs.GetJobRequest): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - )) - response = await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -@pytest.mark.asyncio -async def test_get_job_async_from_dict(): - await test_get_job_async(request_type=dict) - - -def test_get_job_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.GetJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = jobs.Job() - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.GetJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&job_id=job_id_value', - ) in kw['metadata'] - - -def test_get_job_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job( - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - - -def test_get_job_flattened_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - jobs.GetJobRequest(), - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - -@pytest.mark.asyncio -async def test_get_job_flattened_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job( - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_flattened_error_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job( - jobs.GetJobRequest(), - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.ListJobsRequest, - dict, -]) -def test_list_jobs(request_type, transport: str = 'grpc'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.ListJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - client.list_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() - -@pytest.mark.asyncio -async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=jobs.ListJobsRequest): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.ListJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) - - -def test_list_jobs_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.ListJobsRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = jobs.ListJobsResponse() - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.ListJobsRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.ListJobsResponse()) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value', - ) in kw['metadata'] - - -def test_list_jobs_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.ListJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_jobs( - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - - -def test_list_jobs_flattened_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - jobs.ListJobsRequest(), - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - -@pytest.mark.asyncio -async def test_list_jobs_flattened_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.ListJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.ListJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_jobs( - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_jobs( - jobs.ListJobsRequest(), - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - - -def test_list_jobs_pager(transport_name: str = "grpc"): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('project_id', ''), - ('region', ''), - )), - ) - pager = client.list_jobs(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, jobs.Job) - for i in results) -def test_list_jobs_pages(transport_name: str = "grpc"): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, jobs.Job) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - jobs.UpdateJobRequest, - dict, -]) -def test_update_job(request_type, transport: str = 'grpc'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - ) - response = client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -def test_update_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - client.update_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() - -@pytest.mark.asyncio -async def test_update_job_async(transport: str = 'grpc_asyncio', request_type=jobs.UpdateJobRequest): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - )) - response = await client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -@pytest.mark.asyncio -async def test_update_job_async_from_dict(): - await test_update_job_async(request_type=dict) - - -def test_update_job_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.UpdateJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - call.return_value = jobs.Job() - client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_job_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.UpdateJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) - await client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - jobs.CancelJobRequest, - dict, -]) -def test_cancel_job(request_type, transport: str = 'grpc'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - ) - response = client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CancelJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -def test_cancel_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - client.cancel_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CancelJobRequest() - -@pytest.mark.asyncio -async def test_cancel_job_async(transport: str = 'grpc_asyncio', request_type=jobs.CancelJobRequest): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - )) - response = await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CancelJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -@pytest.mark.asyncio -async def test_cancel_job_async_from_dict(): - await test_cancel_job_async(request_type=dict) - - -def test_cancel_job_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.CancelJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = jobs.Job() - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_job_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.CancelJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) - await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&job_id=job_id_value', - ) in kw['metadata'] - - -def test_cancel_job_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.cancel_job( - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - - -def test_cancel_job_flattened_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_job( - jobs.CancelJobRequest(), - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - -@pytest.mark.asyncio -async def test_cancel_job_flattened_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.cancel_job( - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_cancel_job_flattened_error_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.cancel_job( - jobs.CancelJobRequest(), - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.DeleteJobRequest, - dict, -]) -def test_delete_job(request_type, transport: str = 'grpc'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.DeleteJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - client.delete_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.DeleteJobRequest() - -@pytest.mark.asyncio -async def test_delete_job_async(transport: str = 'grpc_asyncio', request_type=jobs.DeleteJobRequest): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.DeleteJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_async_from_dict(): - await test_delete_job_async(request_type=dict) - - -def test_delete_job_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.DeleteJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = None - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.DeleteJobRequest() - - request.project_id = 'project_id_value' - request.region = 'region_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value®ion=region_value&job_id=job_id_value', - ) in kw['metadata'] - - -def test_delete_job_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job( - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - - -def test_delete_job_flattened_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - jobs.DeleteJobRequest(), - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - -@pytest.mark.asyncio -async def test_delete_job_flattened_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job( - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project_id - mock_val = 'project_id_value' - assert arg == mock_val - arg = args[0].region - mock_val = 'region_value' - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_job_flattened_error_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job( - jobs.DeleteJobRequest(), - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.SubmitJobRequest, - dict, -]) -def test_submit_job_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.submit_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -def test_submit_job_rest_required_fields(request_type=jobs.SubmitJobRequest): - transport_class = transports.JobControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).submit_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).submit_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = jobs.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.submit_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_submit_job_rest_unset_required_fields(): - transport = transports.JobControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.submit_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "region", "job", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_submit_job_rest_interceptors(null_interceptor): - transport = transports.JobControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobControllerRestInterceptor(), - ) - client = JobControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobControllerRestInterceptor, "post_submit_job") as post, \ - mock.patch.object(transports.JobControllerRestInterceptor, "pre_submit_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.SubmitJobRequest.pb(jobs.SubmitJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = jobs.Job.to_json(jobs.Job()) - - request = jobs.SubmitJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = jobs.Job() - - client.submit_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_submit_job_rest_bad_request(transport: str = 'rest', request_type=jobs.SubmitJobRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.submit_job(request) - - -def test_submit_job_rest_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.Job() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.submit_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/jobs:submit" % client.transport._host, args[1]) - - -def test_submit_job_rest_flattened_error(transport: str = 'rest'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.submit_job( - jobs.SubmitJobRequest(), - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - - -def test_submit_job_rest_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.SubmitJobRequest, - dict, -]) -def test_submit_job_as_operation_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.submit_job_as_operation(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_submit_job_as_operation_rest_required_fields(request_type=jobs.SubmitJobRequest): - transport_class = transports.JobControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).submit_job_as_operation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).submit_job_as_operation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.submit_job_as_operation(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_submit_job_as_operation_rest_unset_required_fields(): - transport = transports.JobControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.submit_job_as_operation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "region", "job", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_submit_job_as_operation_rest_interceptors(null_interceptor): - transport = transports.JobControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobControllerRestInterceptor(), - ) - client = JobControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.JobControllerRestInterceptor, "post_submit_job_as_operation") as post, \ - mock.patch.object(transports.JobControllerRestInterceptor, "pre_submit_job_as_operation") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.SubmitJobRequest.pb(jobs.SubmitJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = jobs.SubmitJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.submit_job_as_operation(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_submit_job_as_operation_rest_bad_request(transport: str = 'rest', request_type=jobs.SubmitJobRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.submit_job_as_operation(request) - - -def test_submit_job_as_operation_rest_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.submit_job_as_operation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/jobs:submitAsOperation" % client.transport._host, args[1]) - - -def test_submit_job_as_operation_rest_flattened_error(transport: str = 'rest'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.submit_job_as_operation( - jobs.SubmitJobRequest(), - project_id='project_id_value', - region='region_value', - job=jobs.Job(reference=jobs.JobReference(project_id='project_id_value')), - ) - - -def test_submit_job_as_operation_rest_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.GetJobRequest, - dict, -]) -def test_get_job_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -def test_get_job_rest_required_fields(request_type=jobs.GetJobRequest): - transport_class = transports.JobControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request_init["job_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - jsonified_request["jobId"] = 'job_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - assert "jobId" in jsonified_request - assert jsonified_request["jobId"] == 'job_id_value' - - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = jobs.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_job_rest_unset_required_fields(): - transport = transports.JobControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "region", "jobId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_rest_interceptors(null_interceptor): - transport = transports.JobControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobControllerRestInterceptor(), - ) - client = JobControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobControllerRestInterceptor, "post_get_job") as post, \ - mock.patch.object(transports.JobControllerRestInterceptor, "pre_get_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.GetJobRequest.pb(jobs.GetJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = jobs.Job.to_json(jobs.Job()) - - request = jobs.GetJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = jobs.Job() - - client.get_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_job_rest_bad_request(transport: str = 'rest', request_type=jobs.GetJobRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_job(request) - - -def test_get_job_rest_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.Job() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2', 'job_id': 'sample3'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" % client.transport._host, args[1]) - - -def test_get_job_rest_flattened_error(transport: str = 'rest'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - jobs.GetJobRequest(), - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - -def test_get_job_rest_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.ListJobsRequest, - dict, -]) -def test_list_jobs_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.ListJobsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_jobs_rest_required_fields(request_type=jobs.ListJobsRequest): - transport_class = transports.JobControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_jobs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_jobs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("cluster_name", "filter", "job_state_matcher", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = jobs.ListJobsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = jobs.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_jobs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_jobs_rest_unset_required_fields(): - transport = transports.JobControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_jobs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("clusterName", "filter", "jobStateMatcher", "pageSize", "pageToken", )) & set(("projectId", "region", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_jobs_rest_interceptors(null_interceptor): - transport = transports.JobControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobControllerRestInterceptor(), - ) - client = JobControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobControllerRestInterceptor, "post_list_jobs") as post, \ - mock.patch.object(transports.JobControllerRestInterceptor, "pre_list_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = jobs.ListJobsResponse.to_json(jobs.ListJobsResponse()) - - request = jobs.ListJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = jobs.ListJobsResponse() - - client.list_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_jobs_rest_bad_request(transport: str = 'rest', request_type=jobs.ListJobsRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_jobs(request) - - -def test_list_jobs_rest_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.ListJobsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/jobs" % client.transport._host, args[1]) - - -def test_list_jobs_rest_flattened_error(transport: str = 'rest'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - jobs.ListJobsRequest(), - project_id='project_id_value', - region='region_value', - filter='filter_value', - ) - - -def test_list_jobs_rest_pager(transport: str = 'rest'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(jobs.ListJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1', 'region': 'sample2'} - - pager = client.list_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, jobs.Job) - for i in results) - - pages = list(client.list_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - jobs.UpdateJobRequest, - dict, -]) -def test_update_job_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'job_id': 'sample3'} - request_init["job"] = {'reference': {'project_id': 'project_id_value', 'job_id': 'job_id_value'}, 'placement': {'cluster_name': 'cluster_name_value', 'cluster_uuid': 'cluster_uuid_value', 'cluster_labels': {}}, 'hadoop_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {'driver_log_levels': {}}}, 'spark_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'pyspark_job': {'main_python_file_uri': 'main_python_file_uri_value', 'args': ['args_value1', 'args_value2'], 'python_file_uris': ['python_file_uris_value1', 'python_file_uris_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'hive_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {'queries': ['queries_value1', 'queries_value2']}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2']}, 'pig_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'spark_r_job': {'main_r_file_uri': 'main_r_file_uri_value', 'args': ['args_value1', 'args_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'spark_sql_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'presto_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'output_format': 'output_format_value', 'client_tags': ['client_tags_value1', 'client_tags_value2'], 'properties': {}, 'logging_config': {}}, 'trino_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'output_format': 'output_format_value', 'client_tags': ['client_tags_value1', 'client_tags_value2'], 'properties': {}, 'logging_config': {}}, 'status': {'state': 1, 'details': 'details_value', 'state_start_time': {'seconds': 751, 'nanos': 543}, 'substate': 1}, 'status_history': {}, 'yarn_applications': [{'name': 'name_value', 'state': 1, 'progress': 0.885, 'tracking_url': 'tracking_url_value'}], 'driver_output_resource_uri': 'driver_output_resource_uri_value', 'driver_control_files_uri': 'driver_control_files_uri_value', 'labels': {}, 'scheduling': {'max_failures_per_hour': 2243, 'max_failures_total': 1923}, 'job_uuid': 'job_uuid_value', 'done': True, 'driver_scheduling_config': {'memory_mb': 967, 'vcores': 658}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -def test_update_job_rest_required_fields(request_type=jobs.UpdateJobRequest): - transport_class = transports.JobControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request_init["job_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - jsonified_request["jobId"] = 'job_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - assert "jobId" in jsonified_request - assert jsonified_request["jobId"] == 'job_id_value' - - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = jobs.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_job_rest_unset_required_fields(): - transport = transports.JobControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("projectId", "region", "jobId", "job", "updateMask", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_job_rest_interceptors(null_interceptor): - transport = transports.JobControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobControllerRestInterceptor(), - ) - client = JobControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobControllerRestInterceptor, "post_update_job") as post, \ - mock.patch.object(transports.JobControllerRestInterceptor, "pre_update_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.UpdateJobRequest.pb(jobs.UpdateJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = jobs.Job.to_json(jobs.Job()) - - request = jobs.UpdateJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = jobs.Job() - - client.update_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_job_rest_bad_request(transport: str = 'rest', request_type=jobs.UpdateJobRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'job_id': 'sample3'} - request_init["job"] = {'reference': {'project_id': 'project_id_value', 'job_id': 'job_id_value'}, 'placement': {'cluster_name': 'cluster_name_value', 'cluster_uuid': 'cluster_uuid_value', 'cluster_labels': {}}, 'hadoop_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {'driver_log_levels': {}}}, 'spark_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'pyspark_job': {'main_python_file_uri': 'main_python_file_uri_value', 'args': ['args_value1', 'args_value2'], 'python_file_uris': ['python_file_uris_value1', 'python_file_uris_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'hive_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {'queries': ['queries_value1', 'queries_value2']}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2']}, 'pig_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'spark_r_job': {'main_r_file_uri': 'main_r_file_uri_value', 'args': ['args_value1', 'args_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'spark_sql_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'presto_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'output_format': 'output_format_value', 'client_tags': ['client_tags_value1', 'client_tags_value2'], 'properties': {}, 'logging_config': {}}, 'trino_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'output_format': 'output_format_value', 'client_tags': ['client_tags_value1', 'client_tags_value2'], 'properties': {}, 'logging_config': {}}, 'status': {'state': 1, 'details': 'details_value', 'state_start_time': {'seconds': 751, 'nanos': 543}, 'substate': 1}, 'status_history': {}, 'yarn_applications': [{'name': 'name_value', 'state': 1, 'progress': 0.885, 'tracking_url': 'tracking_url_value'}], 'driver_output_resource_uri': 'driver_output_resource_uri_value', 'driver_control_files_uri': 'driver_control_files_uri_value', 'labels': {}, 'scheduling': {'max_failures_per_hour': 2243, 'max_failures_total': 1923}, 'job_uuid': 'job_uuid_value', 'done': True, 'driver_scheduling_config': {'memory_mb': 967, 'vcores': 658}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_job(request) - - -def test_update_job_rest_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.CancelJobRequest, - dict, -]) -def test_cancel_job_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.Job( - driver_output_resource_uri='driver_output_resource_uri_value', - driver_control_files_uri='driver_control_files_uri_value', - job_uuid='job_uuid_value', - done=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.cancel_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == 'driver_output_resource_uri_value' - assert response.driver_control_files_uri == 'driver_control_files_uri_value' - assert response.job_uuid == 'job_uuid_value' - assert response.done is True - - -def test_cancel_job_rest_required_fields(request_type=jobs.CancelJobRequest): - transport_class = transports.JobControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request_init["job_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - jsonified_request["jobId"] = 'job_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - assert "jobId" in jsonified_request - assert jsonified_request["jobId"] == 'job_id_value' - - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = jobs.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_job_rest_unset_required_fields(): - transport = transports.JobControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "region", "jobId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_job_rest_interceptors(null_interceptor): - transport = transports.JobControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobControllerRestInterceptor(), - ) - client = JobControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobControllerRestInterceptor, "post_cancel_job") as post, \ - mock.patch.object(transports.JobControllerRestInterceptor, "pre_cancel_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.CancelJobRequest.pb(jobs.CancelJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = jobs.Job.to_json(jobs.Job()) - - request = jobs.CancelJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = jobs.Job() - - client.cancel_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_cancel_job_rest_bad_request(transport: str = 'rest', request_type=jobs.CancelJobRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_job(request) - - -def test_cancel_job_rest_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.Job() - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2', 'job_id': 'sample3'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.cancel_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" % client.transport._host, args[1]) - - -def test_cancel_job_rest_flattened_error(transport: str = 'rest'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_job( - jobs.CancelJobRequest(), - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - -def test_cancel_job_rest_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.DeleteJobRequest, - dict, -]) -def test_delete_job_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_job(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_rest_required_fields(request_type=jobs.DeleteJobRequest): - transport_class = transports.JobControllerRestTransport - - request_init = {} - request_init["project_id"] = "" - request_init["region"] = "" - request_init["job_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["projectId"] = 'project_id_value' - jsonified_request["region"] = 'region_value' - jsonified_request["jobId"] = 'job_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == 'project_id_value' - assert "region" in jsonified_request - assert jsonified_request["region"] == 'region_value' - assert "jobId" in jsonified_request - assert jsonified_request["jobId"] == 'job_id_value' - - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_job_rest_unset_required_fields(): - transport = transports.JobControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectId", "region", "jobId", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_job_rest_interceptors(null_interceptor): - transport = transports.JobControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobControllerRestInterceptor(), - ) - client = JobControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobControllerRestInterceptor, "pre_delete_job") as pre: - pre.assert_not_called() - pb_message = jobs.DeleteJobRequest.pb(jobs.DeleteJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = jobs.DeleteJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_job_rest_bad_request(transport: str = 'rest', request_type=jobs.DeleteJobRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'region': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_job(request) - - -def test_delete_job_rest_flattened(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'project_id': 'sample1', 'region': 'sample2', 'job_id': 'sample3'} - - # get truthy value for each flattened field - mock_args = dict( - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" % client.transport._host, args[1]) - - -def test_delete_job_rest_flattened_error(transport: str = 'rest'): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - jobs.DeleteJobRequest(), - project_id='project_id_value', - region='region_value', - job_id='job_id_value', - ) - - -def test_delete_job_rest_error(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.JobControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.JobControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = JobControllerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.JobControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = JobControllerClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = JobControllerClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.JobControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = JobControllerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.JobControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = JobControllerClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.JobControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.JobControllerGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.JobControllerGrpcTransport, - transports.JobControllerGrpcAsyncIOTransport, - transports.JobControllerRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = JobControllerClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.JobControllerGrpcTransport, - ) - -def test_job_controller_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.JobControllerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_job_controller_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataproc_v1.services.job_controller.transports.JobControllerTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.JobControllerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'submit_job', - 'submit_job_as_operation', - 'get_job', - 'list_jobs', - 'update_job', - 'cancel_job', - 'delete_job', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_job_controller_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataproc_v1.services.job_controller.transports.JobControllerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.JobControllerTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_job_controller_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataproc_v1.services.job_controller.transports.JobControllerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.JobControllerTransport() - adc.assert_called_once() - - -def test_job_controller_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - JobControllerClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.JobControllerGrpcTransport, - transports.JobControllerGrpcAsyncIOTransport, - ], -) -def test_job_controller_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.JobControllerGrpcTransport, - transports.JobControllerGrpcAsyncIOTransport, - transports.JobControllerRestTransport, - ], -) -def test_job_controller_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.JobControllerGrpcTransport, grpc_helpers), - (transports.JobControllerGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_job_controller_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.JobControllerGrpcTransport, transports.JobControllerGrpcAsyncIOTransport]) -def test_job_controller_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_job_controller_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.JobControllerRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_job_controller_rest_lro_client(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_job_controller_host_no_port(transport_name): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_job_controller_host_with_port(transport_name): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_job_controller_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = JobControllerClient( - credentials=creds1, - transport=transport_name, - ) - client2 = JobControllerClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.submit_job._session - session2 = client2.transport.submit_job._session - assert session1 != session2 - session1 = client1.transport.submit_job_as_operation._session - session2 = client2.transport.submit_job_as_operation._session - assert session1 != session2 - session1 = client1.transport.get_job._session - session2 = client2.transport.get_job._session - assert session1 != session2 - session1 = client1.transport.list_jobs._session - session2 = client2.transport.list_jobs._session - assert session1 != session2 - session1 = client1.transport.update_job._session - session2 = client2.transport.update_job._session - assert session1 != session2 - session1 = client1.transport.cancel_job._session - session2 = client2.transport.cancel_job._session - assert session1 != session2 - session1 = client1.transport.delete_job._session - session2 = client2.transport.delete_job._session - assert session1 != session2 -def test_job_controller_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.JobControllerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_job_controller_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.JobControllerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.JobControllerGrpcTransport, transports.JobControllerGrpcAsyncIOTransport]) -def test_job_controller_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.JobControllerGrpcTransport, transports.JobControllerGrpcAsyncIOTransport]) -def test_job_controller_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_job_controller_grpc_lro_client(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_job_controller_grpc_lro_async_client(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = JobControllerClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = JobControllerClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = JobControllerClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = JobControllerClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = JobControllerClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = JobControllerClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = JobControllerClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = JobControllerClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = JobControllerClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = JobControllerClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = JobControllerClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = JobControllerClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = JobControllerClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = JobControllerClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = JobControllerClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.JobControllerTransport, '_prep_wrapped_messages') as prep: - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.JobControllerTransport, '_prep_wrapped_messages') as prep: - transport_class = JobControllerClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = JobControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = JobControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (JobControllerClient, transports.JobControllerGrpcTransport), - (JobControllerAsyncClient, transports.JobControllerGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_node_group_controller.py b/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_node_group_controller.py deleted file mode 100644 index 364e368c..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_node_group_controller.py +++ /dev/null @@ -1,4045 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataproc_v1.services.node_group_controller import NodeGroupControllerAsyncClient -from google.cloud.dataproc_v1.services.node_group_controller import NodeGroupControllerClient -from google.cloud.dataproc_v1.services.node_group_controller import transports -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import node_groups -from google.cloud.dataproc_v1.types import operations -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert NodeGroupControllerClient._get_default_mtls_endpoint(None) is None - assert NodeGroupControllerClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert NodeGroupControllerClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert NodeGroupControllerClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert NodeGroupControllerClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert NodeGroupControllerClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (NodeGroupControllerClient, "grpc"), - (NodeGroupControllerAsyncClient, "grpc_asyncio"), - (NodeGroupControllerClient, "rest"), -]) -def test_node_group_controller_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.NodeGroupControllerGrpcTransport, "grpc"), - (transports.NodeGroupControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.NodeGroupControllerRestTransport, "rest"), -]) -def test_node_group_controller_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (NodeGroupControllerClient, "grpc"), - (NodeGroupControllerAsyncClient, "grpc_asyncio"), - (NodeGroupControllerClient, "rest"), -]) -def test_node_group_controller_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -def test_node_group_controller_client_get_transport_class(): - transport = NodeGroupControllerClient.get_transport_class() - available_transports = [ - transports.NodeGroupControllerGrpcTransport, - transports.NodeGroupControllerRestTransport, - ] - assert transport in available_transports - - transport = NodeGroupControllerClient.get_transport_class("grpc") - assert transport == transports.NodeGroupControllerGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (NodeGroupControllerClient, transports.NodeGroupControllerGrpcTransport, "grpc"), - (NodeGroupControllerAsyncClient, transports.NodeGroupControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (NodeGroupControllerClient, transports.NodeGroupControllerRestTransport, "rest"), -]) -@mock.patch.object(NodeGroupControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeGroupControllerClient)) -@mock.patch.object(NodeGroupControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeGroupControllerAsyncClient)) -def test_node_group_controller_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(NodeGroupControllerClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(NodeGroupControllerClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (NodeGroupControllerClient, transports.NodeGroupControllerGrpcTransport, "grpc", "true"), - (NodeGroupControllerAsyncClient, transports.NodeGroupControllerGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (NodeGroupControllerClient, transports.NodeGroupControllerGrpcTransport, "grpc", "false"), - (NodeGroupControllerAsyncClient, transports.NodeGroupControllerGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (NodeGroupControllerClient, transports.NodeGroupControllerRestTransport, "rest", "true"), - (NodeGroupControllerClient, transports.NodeGroupControllerRestTransport, "rest", "false"), -]) -@mock.patch.object(NodeGroupControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeGroupControllerClient)) -@mock.patch.object(NodeGroupControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeGroupControllerAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_node_group_controller_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - NodeGroupControllerClient, NodeGroupControllerAsyncClient -]) -@mock.patch.object(NodeGroupControllerClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeGroupControllerClient)) -@mock.patch.object(NodeGroupControllerAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(NodeGroupControllerAsyncClient)) -def test_node_group_controller_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (NodeGroupControllerClient, transports.NodeGroupControllerGrpcTransport, "grpc"), - (NodeGroupControllerAsyncClient, transports.NodeGroupControllerGrpcAsyncIOTransport, "grpc_asyncio"), - (NodeGroupControllerClient, transports.NodeGroupControllerRestTransport, "rest"), -]) -def test_node_group_controller_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (NodeGroupControllerClient, transports.NodeGroupControllerGrpcTransport, "grpc", grpc_helpers), - (NodeGroupControllerAsyncClient, transports.NodeGroupControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (NodeGroupControllerClient, transports.NodeGroupControllerRestTransport, "rest", None), -]) -def test_node_group_controller_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_node_group_controller_client_client_options_from_dict(): - with mock.patch('google.cloud.dataproc_v1.services.node_group_controller.transports.NodeGroupControllerGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = NodeGroupControllerClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (NodeGroupControllerClient, transports.NodeGroupControllerGrpcTransport, "grpc", grpc_helpers), - (NodeGroupControllerAsyncClient, transports.NodeGroupControllerGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_node_group_controller_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - node_groups.CreateNodeGroupRequest, - dict, -]) -def test_create_node_group(request_type, transport: str = 'grpc'): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == node_groups.CreateNodeGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_node_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_node_group), - '__call__') as call: - client.create_node_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == node_groups.CreateNodeGroupRequest() - -@pytest.mark.asyncio -async def test_create_node_group_async(transport: str = 'grpc_asyncio', request_type=node_groups.CreateNodeGroupRequest): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == node_groups.CreateNodeGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_node_group_async_from_dict(): - await test_create_node_group_async(request_type=dict) - - -def test_create_node_group_field_headers(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = node_groups.CreateNodeGroupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_node_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_node_group_field_headers_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = node_groups.CreateNodeGroupRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_node_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_node_group_flattened(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_node_group( - parent='parent_value', - node_group=clusters.NodeGroup(name='name_value'), - node_group_id='node_group_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].node_group - mock_val = clusters.NodeGroup(name='name_value') - assert arg == mock_val - arg = args[0].node_group_id - mock_val = 'node_group_id_value' - assert arg == mock_val - - -def test_create_node_group_flattened_error(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_node_group( - node_groups.CreateNodeGroupRequest(), - parent='parent_value', - node_group=clusters.NodeGroup(name='name_value'), - node_group_id='node_group_id_value', - ) - -@pytest.mark.asyncio -async def test_create_node_group_flattened_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_node_group( - parent='parent_value', - node_group=clusters.NodeGroup(name='name_value'), - node_group_id='node_group_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].node_group - mock_val = clusters.NodeGroup(name='name_value') - assert arg == mock_val - arg = args[0].node_group_id - mock_val = 'node_group_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_node_group_flattened_error_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_node_group( - node_groups.CreateNodeGroupRequest(), - parent='parent_value', - node_group=clusters.NodeGroup(name='name_value'), - node_group_id='node_group_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - node_groups.ResizeNodeGroupRequest, - dict, -]) -def test_resize_node_group(request_type, transport: str = 'grpc'): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.resize_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == node_groups.ResizeNodeGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_resize_node_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_node_group), - '__call__') as call: - client.resize_node_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == node_groups.ResizeNodeGroupRequest() - -@pytest.mark.asyncio -async def test_resize_node_group_async(transport: str = 'grpc_asyncio', request_type=node_groups.ResizeNodeGroupRequest): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.resize_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == node_groups.ResizeNodeGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_resize_node_group_async_from_dict(): - await test_resize_node_group_async(request_type=dict) - - -def test_resize_node_group_field_headers(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = node_groups.ResizeNodeGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_node_group), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.resize_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_resize_node_group_field_headers_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = node_groups.ResizeNodeGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_node_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.resize_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_resize_node_group_flattened(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.resize_node_group( - name='name_value', - size=443, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].size - mock_val = 443 - assert arg == mock_val - - -def test_resize_node_group_flattened_error(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.resize_node_group( - node_groups.ResizeNodeGroupRequest(), - name='name_value', - size=443, - ) - -@pytest.mark.asyncio -async def test_resize_node_group_flattened_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.resize_node_group( - name='name_value', - size=443, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].size - mock_val = 443 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_resize_node_group_flattened_error_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.resize_node_group( - node_groups.ResizeNodeGroupRequest(), - name='name_value', - size=443, - ) - - -@pytest.mark.parametrize("request_type", [ - node_groups.GetNodeGroupRequest, - dict, -]) -def test_get_node_group(request_type, transport: str = 'grpc'): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clusters.NodeGroup( - name='name_value', - roles=[clusters.NodeGroup.Role.DRIVER], - ) - response = client.get_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == node_groups.GetNodeGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clusters.NodeGroup) - assert response.name == 'name_value' - assert response.roles == [clusters.NodeGroup.Role.DRIVER] - - -def test_get_node_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_node_group), - '__call__') as call: - client.get_node_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == node_groups.GetNodeGroupRequest() - -@pytest.mark.asyncio -async def test_get_node_group_async(transport: str = 'grpc_asyncio', request_type=node_groups.GetNodeGroupRequest): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(clusters.NodeGroup( - name='name_value', - roles=[clusters.NodeGroup.Role.DRIVER], - )) - response = await client.get_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == node_groups.GetNodeGroupRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, clusters.NodeGroup) - assert response.name == 'name_value' - assert response.roles == [clusters.NodeGroup.Role.DRIVER] - - -@pytest.mark.asyncio -async def test_get_node_group_async_from_dict(): - await test_get_node_group_async(request_type=dict) - - -def test_get_node_group_field_headers(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = node_groups.GetNodeGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_node_group), - '__call__') as call: - call.return_value = clusters.NodeGroup() - client.get_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_node_group_field_headers_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = node_groups.GetNodeGroupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_node_group), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.NodeGroup()) - await client.get_node_group(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_node_group_flattened(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clusters.NodeGroup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_node_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_node_group_flattened_error(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_node_group( - node_groups.GetNodeGroupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_node_group_flattened_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_node_group), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = clusters.NodeGroup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(clusters.NodeGroup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_node_group( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_node_group_flattened_error_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_node_group( - node_groups.GetNodeGroupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - node_groups.CreateNodeGroupRequest, - dict, -]) -def test_create_node_group_rest(request_type): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/regions/sample2/clusters/sample3'} - request_init["node_group"] = {'name': 'name_value', 'roles': [1], 'node_group_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'labels': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_node_group(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_create_node_group_rest_required_fields(request_type=node_groups.CreateNodeGroupRequest): - transport_class = transports.NodeGroupControllerRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_node_group._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_node_group._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("node_group_id", "request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_node_group(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_node_group_rest_unset_required_fields(): - transport = transports.NodeGroupControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_node_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(("nodeGroupId", "requestId", )) & set(("parent", "nodeGroup", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_node_group_rest_interceptors(null_interceptor): - transport = transports.NodeGroupControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.NodeGroupControllerRestInterceptor(), - ) - client = NodeGroupControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.NodeGroupControllerRestInterceptor, "post_create_node_group") as post, \ - mock.patch.object(transports.NodeGroupControllerRestInterceptor, "pre_create_node_group") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = node_groups.CreateNodeGroupRequest.pb(node_groups.CreateNodeGroupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = node_groups.CreateNodeGroupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_node_group(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_node_group_rest_bad_request(transport: str = 'rest', request_type=node_groups.CreateNodeGroupRequest): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/regions/sample2/clusters/sample3'} - request_init["node_group"] = {'name': 'name_value', 'roles': [1], 'node_group_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'labels': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_node_group(request) - - -def test_create_node_group_rest_flattened(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/regions/sample2/clusters/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - node_group=clusters.NodeGroup(name='name_value'), - node_group_id='node_group_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_node_group(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/regions/*/clusters/*}/nodeGroups" % client.transport._host, args[1]) - - -def test_create_node_group_rest_flattened_error(transport: str = 'rest'): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_node_group( - node_groups.CreateNodeGroupRequest(), - parent='parent_value', - node_group=clusters.NodeGroup(name='name_value'), - node_group_id='node_group_id_value', - ) - - -def test_create_node_group_rest_error(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - node_groups.ResizeNodeGroupRequest, - dict, -]) -def test_resize_node_group_rest(request_type): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.resize_node_group(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_resize_node_group_rest_required_fields(request_type=node_groups.ResizeNodeGroupRequest): - transport_class = transports.NodeGroupControllerRestTransport - - request_init = {} - request_init["name"] = "" - request_init["size"] = 0 - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize_node_group._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["size"] = 443 - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize_node_group._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "size" in jsonified_request - assert jsonified_request["size"] == 443 - - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.resize_node_group(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_resize_node_group_rest_unset_required_fields(): - transport = transports.NodeGroupControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.resize_node_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "size", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_resize_node_group_rest_interceptors(null_interceptor): - transport = transports.NodeGroupControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.NodeGroupControllerRestInterceptor(), - ) - client = NodeGroupControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.NodeGroupControllerRestInterceptor, "post_resize_node_group") as post, \ - mock.patch.object(transports.NodeGroupControllerRestInterceptor, "pre_resize_node_group") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = node_groups.ResizeNodeGroupRequest.pb(node_groups.ResizeNodeGroupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = node_groups.ResizeNodeGroupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.resize_node_group(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_resize_node_group_rest_bad_request(transport: str = 'rest', request_type=node_groups.ResizeNodeGroupRequest): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.resize_node_group(request) - - -def test_resize_node_group_rest_flattened(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - size=443, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.resize_node_group(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/regions/*/clusters/*/nodeGroups/*}:resize" % client.transport._host, args[1]) - - -def test_resize_node_group_rest_flattened_error(transport: str = 'rest'): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.resize_node_group( - node_groups.ResizeNodeGroupRequest(), - name='name_value', - size=443, - ) - - -def test_resize_node_group_rest_error(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - node_groups.GetNodeGroupRequest, - dict, -]) -def test_get_node_group_rest(request_type): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = clusters.NodeGroup( - name='name_value', - roles=[clusters.NodeGroup.Role.DRIVER], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = clusters.NodeGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_node_group(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, clusters.NodeGroup) - assert response.name == 'name_value' - assert response.roles == [clusters.NodeGroup.Role.DRIVER] - - -def test_get_node_group_rest_required_fields(request_type=node_groups.GetNodeGroupRequest): - transport_class = transports.NodeGroupControllerRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_node_group._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_node_group._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = clusters.NodeGroup() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = clusters.NodeGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_node_group(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_node_group_rest_unset_required_fields(): - transport = transports.NodeGroupControllerRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_node_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_node_group_rest_interceptors(null_interceptor): - transport = transports.NodeGroupControllerRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.NodeGroupControllerRestInterceptor(), - ) - client = NodeGroupControllerClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.NodeGroupControllerRestInterceptor, "post_get_node_group") as post, \ - mock.patch.object(transports.NodeGroupControllerRestInterceptor, "pre_get_node_group") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = node_groups.GetNodeGroupRequest.pb(node_groups.GetNodeGroupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = clusters.NodeGroup.to_json(clusters.NodeGroup()) - - request = node_groups.GetNodeGroupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = clusters.NodeGroup() - - client.get_node_group(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_node_group_rest_bad_request(transport: str = 'rest', request_type=node_groups.GetNodeGroupRequest): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_node_group(request) - - -def test_get_node_group_rest_flattened(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = clusters.NodeGroup() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/regions/sample2/clusters/sample3/nodeGroups/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = clusters.NodeGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_node_group(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/regions/*/clusters/*/nodeGroups/*}" % client.transport._host, args[1]) - - -def test_get_node_group_rest_flattened_error(transport: str = 'rest'): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_node_group( - node_groups.GetNodeGroupRequest(), - name='name_value', - ) - - -def test_get_node_group_rest_error(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.NodeGroupControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.NodeGroupControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = NodeGroupControllerClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.NodeGroupControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = NodeGroupControllerClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = NodeGroupControllerClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.NodeGroupControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = NodeGroupControllerClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.NodeGroupControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = NodeGroupControllerClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.NodeGroupControllerGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.NodeGroupControllerGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.NodeGroupControllerGrpcTransport, - transports.NodeGroupControllerGrpcAsyncIOTransport, - transports.NodeGroupControllerRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = NodeGroupControllerClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.NodeGroupControllerGrpcTransport, - ) - -def test_node_group_controller_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.NodeGroupControllerTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_node_group_controller_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataproc_v1.services.node_group_controller.transports.NodeGroupControllerTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.NodeGroupControllerTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_node_group', - 'resize_node_group', - 'get_node_group', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_node_group_controller_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataproc_v1.services.node_group_controller.transports.NodeGroupControllerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.NodeGroupControllerTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_node_group_controller_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataproc_v1.services.node_group_controller.transports.NodeGroupControllerTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.NodeGroupControllerTransport() - adc.assert_called_once() - - -def test_node_group_controller_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - NodeGroupControllerClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.NodeGroupControllerGrpcTransport, - transports.NodeGroupControllerGrpcAsyncIOTransport, - ], -) -def test_node_group_controller_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.NodeGroupControllerGrpcTransport, - transports.NodeGroupControllerGrpcAsyncIOTransport, - transports.NodeGroupControllerRestTransport, - ], -) -def test_node_group_controller_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.NodeGroupControllerGrpcTransport, grpc_helpers), - (transports.NodeGroupControllerGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_node_group_controller_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.NodeGroupControllerGrpcTransport, transports.NodeGroupControllerGrpcAsyncIOTransport]) -def test_node_group_controller_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_node_group_controller_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.NodeGroupControllerRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_node_group_controller_rest_lro_client(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_node_group_controller_host_no_port(transport_name): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_node_group_controller_host_with_port(transport_name): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_node_group_controller_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = NodeGroupControllerClient( - credentials=creds1, - transport=transport_name, - ) - client2 = NodeGroupControllerClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_node_group._session - session2 = client2.transport.create_node_group._session - assert session1 != session2 - session1 = client1.transport.resize_node_group._session - session2 = client2.transport.resize_node_group._session - assert session1 != session2 - session1 = client1.transport.get_node_group._session - session2 = client2.transport.get_node_group._session - assert session1 != session2 -def test_node_group_controller_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.NodeGroupControllerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_node_group_controller_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.NodeGroupControllerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.NodeGroupControllerGrpcTransport, transports.NodeGroupControllerGrpcAsyncIOTransport]) -def test_node_group_controller_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.NodeGroupControllerGrpcTransport, transports.NodeGroupControllerGrpcAsyncIOTransport]) -def test_node_group_controller_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_node_group_controller_grpc_lro_client(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_node_group_controller_grpc_lro_async_client(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_node_group_path(): - project = "squid" - region = "clam" - cluster = "whelk" - node_group = "octopus" - expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format(project=project, region=region, cluster=cluster, node_group=node_group, ) - actual = NodeGroupControllerClient.node_group_path(project, region, cluster, node_group) - assert expected == actual - - -def test_parse_node_group_path(): - expected = { - "project": "oyster", - "region": "nudibranch", - "cluster": "cuttlefish", - "node_group": "mussel", - } - path = NodeGroupControllerClient.node_group_path(**expected) - - # Check that the path construction is reversible. - actual = NodeGroupControllerClient.parse_node_group_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = NodeGroupControllerClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = NodeGroupControllerClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = NodeGroupControllerClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = NodeGroupControllerClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = NodeGroupControllerClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = NodeGroupControllerClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = NodeGroupControllerClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = NodeGroupControllerClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = NodeGroupControllerClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = NodeGroupControllerClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = NodeGroupControllerClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = NodeGroupControllerClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = NodeGroupControllerClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = NodeGroupControllerClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = NodeGroupControllerClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.NodeGroupControllerTransport, '_prep_wrapped_messages') as prep: - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.NodeGroupControllerTransport, '_prep_wrapped_messages') as prep: - transport_class = NodeGroupControllerClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = NodeGroupControllerAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = NodeGroupControllerClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (NodeGroupControllerClient, transports.NodeGroupControllerGrpcTransport), - (NodeGroupControllerAsyncClient, transports.NodeGroupControllerGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py deleted file mode 100644 index 51c75ac5..00000000 --- a/owl-bot-staging/v1/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ /dev/null @@ -1,6229 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataproc_v1.services.workflow_template_service import WorkflowTemplateServiceAsyncClient -from google.cloud.dataproc_v1.services.workflow_template_service import WorkflowTemplateServiceClient -from google.cloud.dataproc_v1.services.workflow_template_service import pagers -from google.cloud.dataproc_v1.services.workflow_template_service import transports -from google.cloud.dataproc_v1.types import clusters -from google.cloud.dataproc_v1.types import jobs -from google.cloud.dataproc_v1.types import shared -from google.cloud.dataproc_v1.types import workflow_templates -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert WorkflowTemplateServiceClient._get_default_mtls_endpoint(None) is None - assert WorkflowTemplateServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert WorkflowTemplateServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert WorkflowTemplateServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert WorkflowTemplateServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert WorkflowTemplateServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (WorkflowTemplateServiceClient, "grpc"), - (WorkflowTemplateServiceAsyncClient, "grpc_asyncio"), - (WorkflowTemplateServiceClient, "rest"), -]) -def test_workflow_template_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.WorkflowTemplateServiceGrpcTransport, "grpc"), - (transports.WorkflowTemplateServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.WorkflowTemplateServiceRestTransport, "rest"), -]) -def test_workflow_template_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (WorkflowTemplateServiceClient, "grpc"), - (WorkflowTemplateServiceAsyncClient, "grpc_asyncio"), - (WorkflowTemplateServiceClient, "rest"), -]) -def test_workflow_template_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataproc.googleapis.com' - ) - - -def test_workflow_template_service_client_get_transport_class(): - transport = WorkflowTemplateServiceClient.get_transport_class() - available_transports = [ - transports.WorkflowTemplateServiceGrpcTransport, - transports.WorkflowTemplateServiceRestTransport, - ] - assert transport in available_transports - - transport = WorkflowTemplateServiceClient.get_transport_class("grpc") - assert transport == transports.WorkflowTemplateServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceGrpcTransport, "grpc"), - (WorkflowTemplateServiceAsyncClient, transports.WorkflowTemplateServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceRestTransport, "rest"), -]) -@mock.patch.object(WorkflowTemplateServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkflowTemplateServiceClient)) -@mock.patch.object(WorkflowTemplateServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkflowTemplateServiceAsyncClient)) -def test_workflow_template_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(WorkflowTemplateServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(WorkflowTemplateServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceGrpcTransport, "grpc", "true"), - (WorkflowTemplateServiceAsyncClient, transports.WorkflowTemplateServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceGrpcTransport, "grpc", "false"), - (WorkflowTemplateServiceAsyncClient, transports.WorkflowTemplateServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceRestTransport, "rest", "true"), - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(WorkflowTemplateServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkflowTemplateServiceClient)) -@mock.patch.object(WorkflowTemplateServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkflowTemplateServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_workflow_template_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient -]) -@mock.patch.object(WorkflowTemplateServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkflowTemplateServiceClient)) -@mock.patch.object(WorkflowTemplateServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(WorkflowTemplateServiceAsyncClient)) -def test_workflow_template_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceGrpcTransport, "grpc"), - (WorkflowTemplateServiceAsyncClient, transports.WorkflowTemplateServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceRestTransport, "rest"), -]) -def test_workflow_template_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceGrpcTransport, "grpc", grpc_helpers), - (WorkflowTemplateServiceAsyncClient, transports.WorkflowTemplateServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceRestTransport, "rest", None), -]) -def test_workflow_template_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_workflow_template_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataproc_v1.services.workflow_template_service.transports.WorkflowTemplateServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = WorkflowTemplateServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceGrpcTransport, "grpc", grpc_helpers), - (WorkflowTemplateServiceAsyncClient, transports.WorkflowTemplateServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_workflow_template_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.CreateWorkflowTemplateRequest, - dict, -]) -def test_create_workflow_template(request_type, transport: str = 'grpc'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.WorkflowTemplate( - id='id_value', - name='name_value', - version=774, - ) - response = client.create_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == 'id_value' - assert response.name == 'name_value' - assert response.version == 774 - - -def test_create_workflow_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workflow_template), - '__call__') as call: - client.create_workflow_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() - -@pytest.mark.asyncio -async def test_create_workflow_template_async(transport: str = 'grpc_asyncio', request_type=workflow_templates.CreateWorkflowTemplateRequest): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.WorkflowTemplate( - id='id_value', - name='name_value', - version=774, - )) - response = await client.create_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == 'id_value' - assert response.name == 'name_value' - assert response.version == 774 - - -@pytest.mark.asyncio -async def test_create_workflow_template_async_from_dict(): - await test_create_workflow_template_async(request_type=dict) - - -def test_create_workflow_template_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.CreateWorkflowTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workflow_template), - '__call__') as call: - call.return_value = workflow_templates.WorkflowTemplate() - client.create_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_workflow_template_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.CreateWorkflowTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workflow_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.WorkflowTemplate()) - await client.create_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_workflow_template_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.WorkflowTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_workflow_template( - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].template - mock_val = workflow_templates.WorkflowTemplate(id='id_value') - assert arg == mock_val - - -def test_create_workflow_template_flattened_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_workflow_template( - workflow_templates.CreateWorkflowTemplateRequest(), - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - -@pytest.mark.asyncio -async def test_create_workflow_template_flattened_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.WorkflowTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.WorkflowTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_workflow_template( - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].template - mock_val = workflow_templates.WorkflowTemplate(id='id_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_workflow_template_flattened_error_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_workflow_template( - workflow_templates.CreateWorkflowTemplateRequest(), - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.GetWorkflowTemplateRequest, - dict, -]) -def test_get_workflow_template(request_type, transport: str = 'grpc'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.WorkflowTemplate( - id='id_value', - name='name_value', - version=774, - ) - response = client.get_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.GetWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == 'id_value' - assert response.name == 'name_value' - assert response.version == 774 - - -def test_get_workflow_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workflow_template), - '__call__') as call: - client.get_workflow_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.GetWorkflowTemplateRequest() - -@pytest.mark.asyncio -async def test_get_workflow_template_async(transport: str = 'grpc_asyncio', request_type=workflow_templates.GetWorkflowTemplateRequest): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.WorkflowTemplate( - id='id_value', - name='name_value', - version=774, - )) - response = await client.get_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.GetWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == 'id_value' - assert response.name == 'name_value' - assert response.version == 774 - - -@pytest.mark.asyncio -async def test_get_workflow_template_async_from_dict(): - await test_get_workflow_template_async(request_type=dict) - - -def test_get_workflow_template_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.GetWorkflowTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workflow_template), - '__call__') as call: - call.return_value = workflow_templates.WorkflowTemplate() - client.get_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_workflow_template_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.GetWorkflowTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workflow_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.WorkflowTemplate()) - await client.get_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_workflow_template_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.WorkflowTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_workflow_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_workflow_template_flattened_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_workflow_template( - workflow_templates.GetWorkflowTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_workflow_template_flattened_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.WorkflowTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.WorkflowTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_workflow_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_workflow_template_flattened_error_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_workflow_template( - workflow_templates.GetWorkflowTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.InstantiateWorkflowTemplateRequest, - dict, -]) -def test_instantiate_workflow_template(request_type, transport: str = 'grpc'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.instantiate_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_instantiate_workflow_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_workflow_template), - '__call__') as call: - client.instantiate_workflow_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() - -@pytest.mark.asyncio -async def test_instantiate_workflow_template_async(transport: str = 'grpc_asyncio', request_type=workflow_templates.InstantiateWorkflowTemplateRequest): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.instantiate_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_instantiate_workflow_template_async_from_dict(): - await test_instantiate_workflow_template_async(request_type=dict) - - -def test_instantiate_workflow_template_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.InstantiateWorkflowTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_workflow_template), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.instantiate_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_instantiate_workflow_template_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.InstantiateWorkflowTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_workflow_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.instantiate_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_instantiate_workflow_template_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.instantiate_workflow_template( - name='name_value', - parameters={'key_value': 'value_value'}, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].parameters - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - - -def test_instantiate_workflow_template_flattened_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.instantiate_workflow_template( - workflow_templates.InstantiateWorkflowTemplateRequest(), - name='name_value', - parameters={'key_value': 'value_value'}, - ) - -@pytest.mark.asyncio -async def test_instantiate_workflow_template_flattened_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.instantiate_workflow_template( - name='name_value', - parameters={'key_value': 'value_value'}, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].parameters - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - -@pytest.mark.asyncio -async def test_instantiate_workflow_template_flattened_error_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.instantiate_workflow_template( - workflow_templates.InstantiateWorkflowTemplateRequest(), - name='name_value', - parameters={'key_value': 'value_value'}, - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.InstantiateInlineWorkflowTemplateRequest, - dict, -]) -def test_instantiate_inline_workflow_template(request_type, transport: str = 'grpc'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_inline_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.instantiate_inline_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_instantiate_inline_workflow_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_inline_workflow_template), - '__call__') as call: - client.instantiate_inline_workflow_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() - -@pytest.mark.asyncio -async def test_instantiate_inline_workflow_template_async(transport: str = 'grpc_asyncio', request_type=workflow_templates.InstantiateInlineWorkflowTemplateRequest): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_inline_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.instantiate_inline_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_instantiate_inline_workflow_template_async_from_dict(): - await test_instantiate_inline_workflow_template_async(request_type=dict) - - -def test_instantiate_inline_workflow_template_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_inline_workflow_template), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.instantiate_inline_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_instantiate_inline_workflow_template_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_inline_workflow_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.instantiate_inline_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_instantiate_inline_workflow_template_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_inline_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.instantiate_inline_workflow_template( - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].template - mock_val = workflow_templates.WorkflowTemplate(id='id_value') - assert arg == mock_val - - -def test_instantiate_inline_workflow_template_flattened_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.instantiate_inline_workflow_template( - workflow_templates.InstantiateInlineWorkflowTemplateRequest(), - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - -@pytest.mark.asyncio -async def test_instantiate_inline_workflow_template_flattened_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.instantiate_inline_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.instantiate_inline_workflow_template( - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].template - mock_val = workflow_templates.WorkflowTemplate(id='id_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_instantiate_inline_workflow_template_flattened_error_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.instantiate_inline_workflow_template( - workflow_templates.InstantiateInlineWorkflowTemplateRequest(), - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.UpdateWorkflowTemplateRequest, - dict, -]) -def test_update_workflow_template(request_type, transport: str = 'grpc'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.WorkflowTemplate( - id='id_value', - name='name_value', - version=774, - ) - response = client.update_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == 'id_value' - assert response.name == 'name_value' - assert response.version == 774 - - -def test_update_workflow_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workflow_template), - '__call__') as call: - client.update_workflow_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() - -@pytest.mark.asyncio -async def test_update_workflow_template_async(transport: str = 'grpc_asyncio', request_type=workflow_templates.UpdateWorkflowTemplateRequest): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.WorkflowTemplate( - id='id_value', - name='name_value', - version=774, - )) - response = await client.update_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == 'id_value' - assert response.name == 'name_value' - assert response.version == 774 - - -@pytest.mark.asyncio -async def test_update_workflow_template_async_from_dict(): - await test_update_workflow_template_async(request_type=dict) - - -def test_update_workflow_template_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.UpdateWorkflowTemplateRequest() - - request.template.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workflow_template), - '__call__') as call: - call.return_value = workflow_templates.WorkflowTemplate() - client.update_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'template.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_workflow_template_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.UpdateWorkflowTemplateRequest() - - request.template.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workflow_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.WorkflowTemplate()) - await client.update_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'template.name=name_value', - ) in kw['metadata'] - - -def test_update_workflow_template_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.WorkflowTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_workflow_template( - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].template - mock_val = workflow_templates.WorkflowTemplate(id='id_value') - assert arg == mock_val - - -def test_update_workflow_template_flattened_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_workflow_template( - workflow_templates.UpdateWorkflowTemplateRequest(), - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - -@pytest.mark.asyncio -async def test_update_workflow_template_flattened_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.WorkflowTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.WorkflowTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_workflow_template( - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].template - mock_val = workflow_templates.WorkflowTemplate(id='id_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_workflow_template_flattened_error_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_workflow_template( - workflow_templates.UpdateWorkflowTemplateRequest(), - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.ListWorkflowTemplatesRequest, - dict, -]) -def test_list_workflow_templates(request_type, transport: str = 'grpc'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workflow_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.ListWorkflowTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_workflow_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkflowTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_workflow_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workflow_templates), - '__call__') as call: - client.list_workflow_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() - -@pytest.mark.asyncio -async def test_list_workflow_templates_async(transport: str = 'grpc_asyncio', request_type=workflow_templates.ListWorkflowTemplatesRequest): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workflow_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.ListWorkflowTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_workflow_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkflowTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_workflow_templates_async_from_dict(): - await test_list_workflow_templates_async(request_type=dict) - - -def test_list_workflow_templates_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.ListWorkflowTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workflow_templates), - '__call__') as call: - call.return_value = workflow_templates.ListWorkflowTemplatesResponse() - client.list_workflow_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_workflow_templates_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.ListWorkflowTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workflow_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.ListWorkflowTemplatesResponse()) - await client.list_workflow_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_workflow_templates_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workflow_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.ListWorkflowTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_workflow_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_workflow_templates_flattened_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_workflow_templates( - workflow_templates.ListWorkflowTemplatesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_workflow_templates_flattened_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workflow_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = workflow_templates.ListWorkflowTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(workflow_templates.ListWorkflowTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_workflow_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_workflow_templates_flattened_error_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_workflow_templates( - workflow_templates.ListWorkflowTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_workflow_templates_pager(transport_name: str = "grpc"): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workflow_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - ], - next_page_token='abc', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[], - next_page_token='def', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - ], - next_page_token='ghi', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_workflow_templates(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, workflow_templates.WorkflowTemplate) - for i in results) -def test_list_workflow_templates_pages(transport_name: str = "grpc"): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workflow_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - ], - next_page_token='abc', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[], - next_page_token='def', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - ], - next_page_token='ghi', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_workflow_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_workflow_templates_async_pager(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workflow_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - ], - next_page_token='abc', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[], - next_page_token='def', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - ], - next_page_token='ghi', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_workflow_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, workflow_templates.WorkflowTemplate) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_workflow_templates_async_pages(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workflow_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - ], - next_page_token='abc', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[], - next_page_token='def', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - ], - next_page_token='ghi', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_workflow_templates(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - workflow_templates.DeleteWorkflowTemplateRequest, - dict, -]) -def test_delete_workflow_template(request_type, transport: str = 'grpc'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_workflow_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workflow_template), - '__call__') as call: - client.delete_workflow_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() - -@pytest.mark.asyncio -async def test_delete_workflow_template_async(transport: str = 'grpc_asyncio', request_type=workflow_templates.DeleteWorkflowTemplateRequest): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_workflow_template_async_from_dict(): - await test_delete_workflow_template_async(request_type=dict) - - -def test_delete_workflow_template_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.DeleteWorkflowTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workflow_template), - '__call__') as call: - call.return_value = None - client.delete_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_workflow_template_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = workflow_templates.DeleteWorkflowTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workflow_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_workflow_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_workflow_template_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_workflow_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_workflow_template_flattened_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_workflow_template( - workflow_templates.DeleteWorkflowTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_workflow_template_flattened_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workflow_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_workflow_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_workflow_template_flattened_error_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_workflow_template( - workflow_templates.DeleteWorkflowTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.CreateWorkflowTemplateRequest, - dict, -]) -def test_create_workflow_template_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["template"] = {'id': 'id_value', 'name': 'name_value', 'version': 774, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'placement': {'managed_cluster': {'cluster_name': 'cluster_name_value', 'config': {'config_bucket': 'config_bucket_value', 'temp_bucket': 'temp_bucket_value', 'gce_cluster_config': {'zone_uri': 'zone_uri_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'internal_ip_only': True, 'private_ipv6_google_access': 1, 'service_account': 'service_account_value', 'service_account_scopes': ['service_account_scopes_value1', 'service_account_scopes_value2'], 'tags': ['tags_value1', 'tags_value2'], 'metadata': {}, 'reservation_affinity': {'consume_reservation_type': 1, 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'node_group_affinity': {'node_group_uri': 'node_group_uri_value'}, 'shielded_instance_config': {'enable_secure_boot': True, 'enable_vtpm': True, 'enable_integrity_monitoring': True}, 'confidential_instance_config': {'enable_confidential_compute': True}}, 'master_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'worker_config': {}, 'secondary_worker_config': {}, 'software_config': {'image_version': 'image_version_value', 'properties': {}, 'optional_components': [5]}, 'initialization_actions': [{'executable_file': 'executable_file_value', 'execution_timeout': {'seconds': 751, 'nanos': 543}}], 'encryption_config': {'gce_pd_kms_key_name': 'gce_pd_kms_key_name_value'}, 'autoscaling_config': {'policy_uri': 'policy_uri_value'}, 'security_config': {'kerberos_config': {'enable_kerberos': True, 'root_principal_password_uri': 'root_principal_password_uri_value', 'kms_key_uri': 'kms_key_uri_value', 'keystore_uri': 'keystore_uri_value', 'truststore_uri': 'truststore_uri_value', 'keystore_password_uri': 'keystore_password_uri_value', 'key_password_uri': 'key_password_uri_value', 'truststore_password_uri': 'truststore_password_uri_value', 'cross_realm_trust_realm': 'cross_realm_trust_realm_value', 'cross_realm_trust_kdc': 'cross_realm_trust_kdc_value', 'cross_realm_trust_admin_server': 'cross_realm_trust_admin_server_value', 'cross_realm_trust_shared_password_uri': 'cross_realm_trust_shared_password_uri_value', 'kdc_db_key_uri': 'kdc_db_key_uri_value', 'tgt_lifetime_hours': 1933, 'realm': 'realm_value'}, 'identity_config': {'user_service_account_mapping': {}}}, 'lifecycle_config': {'idle_delete_ttl': {}, 'auto_delete_time': {}, 'auto_delete_ttl': {}, 'idle_start_time': {}}, 'endpoint_config': {'http_ports': {}, 'enable_http_port_access': True}, 'metastore_config': {'dataproc_metastore_service': 'dataproc_metastore_service_value'}, 'dataproc_metric_config': {'metrics': [{'metric_source': 1, 'metric_overrides': ['metric_overrides_value1', 'metric_overrides_value2']}]}, 'auxiliary_node_groups': [{'node_group': {'name': 'name_value', 'roles': [1], 'node_group_config': {}, 'labels': {}}, 'node_group_id': 'node_group_id_value'}]}, 'labels': {}}, 'cluster_selector': {'zone': 'zone_value', 'cluster_labels': {}}}, 'jobs': [{'step_id': 'step_id_value', 'hadoop_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {'driver_log_levels': {}}}, 'spark_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'pyspark_job': {'main_python_file_uri': 'main_python_file_uri_value', 'args': ['args_value1', 'args_value2'], 'python_file_uris': ['python_file_uris_value1', 'python_file_uris_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'hive_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {'queries': ['queries_value1', 'queries_value2']}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2']}, 'pig_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'spark_r_job': {'main_r_file_uri': 'main_r_file_uri_value', 'args': ['args_value1', 'args_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'spark_sql_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'presto_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'output_format': 'output_format_value', 'client_tags': ['client_tags_value1', 'client_tags_value2'], 'properties': {}, 'logging_config': {}}, 'labels': {}, 'scheduling': {'max_failures_per_hour': 2243, 'max_failures_total': 1923}, 'prerequisite_step_ids': ['prerequisite_step_ids_value1', 'prerequisite_step_ids_value2']}], 'parameters': [{'name': 'name_value', 'fields': ['fields_value1', 'fields_value2'], 'description': 'description_value', 'validation': {'regex': {'regexes': ['regexes_value1', 'regexes_value2']}, 'values': {'values': ['values_value1', 'values_value2']}}}], 'dag_timeout': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = workflow_templates.WorkflowTemplate( - id='id_value', - name='name_value', - version=774, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_workflow_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == 'id_value' - assert response.name == 'name_value' - assert response.version == 774 - - -def test_create_workflow_template_rest_required_fields(request_type=workflow_templates.CreateWorkflowTemplateRequest): - transport_class = transports.WorkflowTemplateServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_workflow_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_workflow_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = workflow_templates.WorkflowTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_workflow_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_workflow_template_rest_unset_required_fields(): - transport = transports.WorkflowTemplateServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_workflow_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "template", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_workflow_template_rest_interceptors(null_interceptor): - transport = transports.WorkflowTemplateServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.WorkflowTemplateServiceRestInterceptor(), - ) - client = WorkflowTemplateServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "post_create_workflow_template") as post, \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "pre_create_workflow_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = workflow_templates.CreateWorkflowTemplateRequest.pb(workflow_templates.CreateWorkflowTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = workflow_templates.WorkflowTemplate.to_json(workflow_templates.WorkflowTemplate()) - - request = workflow_templates.CreateWorkflowTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = workflow_templates.WorkflowTemplate() - - client.create_workflow_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_workflow_template_rest_bad_request(transport: str = 'rest', request_type=workflow_templates.CreateWorkflowTemplateRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["template"] = {'id': 'id_value', 'name': 'name_value', 'version': 774, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'placement': {'managed_cluster': {'cluster_name': 'cluster_name_value', 'config': {'config_bucket': 'config_bucket_value', 'temp_bucket': 'temp_bucket_value', 'gce_cluster_config': {'zone_uri': 'zone_uri_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'internal_ip_only': True, 'private_ipv6_google_access': 1, 'service_account': 'service_account_value', 'service_account_scopes': ['service_account_scopes_value1', 'service_account_scopes_value2'], 'tags': ['tags_value1', 'tags_value2'], 'metadata': {}, 'reservation_affinity': {'consume_reservation_type': 1, 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'node_group_affinity': {'node_group_uri': 'node_group_uri_value'}, 'shielded_instance_config': {'enable_secure_boot': True, 'enable_vtpm': True, 'enable_integrity_monitoring': True}, 'confidential_instance_config': {'enable_confidential_compute': True}}, 'master_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'worker_config': {}, 'secondary_worker_config': {}, 'software_config': {'image_version': 'image_version_value', 'properties': {}, 'optional_components': [5]}, 'initialization_actions': [{'executable_file': 'executable_file_value', 'execution_timeout': {'seconds': 751, 'nanos': 543}}], 'encryption_config': {'gce_pd_kms_key_name': 'gce_pd_kms_key_name_value'}, 'autoscaling_config': {'policy_uri': 'policy_uri_value'}, 'security_config': {'kerberos_config': {'enable_kerberos': True, 'root_principal_password_uri': 'root_principal_password_uri_value', 'kms_key_uri': 'kms_key_uri_value', 'keystore_uri': 'keystore_uri_value', 'truststore_uri': 'truststore_uri_value', 'keystore_password_uri': 'keystore_password_uri_value', 'key_password_uri': 'key_password_uri_value', 'truststore_password_uri': 'truststore_password_uri_value', 'cross_realm_trust_realm': 'cross_realm_trust_realm_value', 'cross_realm_trust_kdc': 'cross_realm_trust_kdc_value', 'cross_realm_trust_admin_server': 'cross_realm_trust_admin_server_value', 'cross_realm_trust_shared_password_uri': 'cross_realm_trust_shared_password_uri_value', 'kdc_db_key_uri': 'kdc_db_key_uri_value', 'tgt_lifetime_hours': 1933, 'realm': 'realm_value'}, 'identity_config': {'user_service_account_mapping': {}}}, 'lifecycle_config': {'idle_delete_ttl': {}, 'auto_delete_time': {}, 'auto_delete_ttl': {}, 'idle_start_time': {}}, 'endpoint_config': {'http_ports': {}, 'enable_http_port_access': True}, 'metastore_config': {'dataproc_metastore_service': 'dataproc_metastore_service_value'}, 'dataproc_metric_config': {'metrics': [{'metric_source': 1, 'metric_overrides': ['metric_overrides_value1', 'metric_overrides_value2']}]}, 'auxiliary_node_groups': [{'node_group': {'name': 'name_value', 'roles': [1], 'node_group_config': {}, 'labels': {}}, 'node_group_id': 'node_group_id_value'}]}, 'labels': {}}, 'cluster_selector': {'zone': 'zone_value', 'cluster_labels': {}}}, 'jobs': [{'step_id': 'step_id_value', 'hadoop_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {'driver_log_levels': {}}}, 'spark_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'pyspark_job': {'main_python_file_uri': 'main_python_file_uri_value', 'args': ['args_value1', 'args_value2'], 'python_file_uris': ['python_file_uris_value1', 'python_file_uris_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'hive_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {'queries': ['queries_value1', 'queries_value2']}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2']}, 'pig_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'spark_r_job': {'main_r_file_uri': 'main_r_file_uri_value', 'args': ['args_value1', 'args_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'spark_sql_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'presto_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'output_format': 'output_format_value', 'client_tags': ['client_tags_value1', 'client_tags_value2'], 'properties': {}, 'logging_config': {}}, 'labels': {}, 'scheduling': {'max_failures_per_hour': 2243, 'max_failures_total': 1923}, 'prerequisite_step_ids': ['prerequisite_step_ids_value1', 'prerequisite_step_ids_value2']}], 'parameters': [{'name': 'name_value', 'fields': ['fields_value1', 'fields_value2'], 'description': 'description_value', 'validation': {'regex': {'regexes': ['regexes_value1', 'regexes_value2']}, 'values': {'values': ['values_value1', 'values_value2']}}}], 'dag_timeout': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_workflow_template(request) - - -def test_create_workflow_template_rest_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = workflow_templates.WorkflowTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_workflow_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/workflowTemplates" % client.transport._host, args[1]) - - -def test_create_workflow_template_rest_flattened_error(transport: str = 'rest'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_workflow_template( - workflow_templates.CreateWorkflowTemplateRequest(), - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - -def test_create_workflow_template_rest_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.GetWorkflowTemplateRequest, - dict, -]) -def test_get_workflow_template_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = workflow_templates.WorkflowTemplate( - id='id_value', - name='name_value', - version=774, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_workflow_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == 'id_value' - assert response.name == 'name_value' - assert response.version == 774 - - -def test_get_workflow_template_rest_required_fields(request_type=workflow_templates.GetWorkflowTemplateRequest): - transport_class = transports.WorkflowTemplateServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_workflow_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_workflow_template._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("version", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = workflow_templates.WorkflowTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_workflow_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_workflow_template_rest_unset_required_fields(): - transport = transports.WorkflowTemplateServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_workflow_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(("version", )) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_workflow_template_rest_interceptors(null_interceptor): - transport = transports.WorkflowTemplateServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.WorkflowTemplateServiceRestInterceptor(), - ) - client = WorkflowTemplateServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "post_get_workflow_template") as post, \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "pre_get_workflow_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = workflow_templates.GetWorkflowTemplateRequest.pb(workflow_templates.GetWorkflowTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = workflow_templates.WorkflowTemplate.to_json(workflow_templates.WorkflowTemplate()) - - request = workflow_templates.GetWorkflowTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = workflow_templates.WorkflowTemplate() - - client.get_workflow_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_workflow_template_rest_bad_request(transport: str = 'rest', request_type=workflow_templates.GetWorkflowTemplateRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_workflow_template(request) - - -def test_get_workflow_template_rest_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = workflow_templates.WorkflowTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_workflow_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/workflowTemplates/*}" % client.transport._host, args[1]) - - -def test_get_workflow_template_rest_flattened_error(transport: str = 'rest'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_workflow_template( - workflow_templates.GetWorkflowTemplateRequest(), - name='name_value', - ) - - -def test_get_workflow_template_rest_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.InstantiateWorkflowTemplateRequest, - dict, -]) -def test_instantiate_workflow_template_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.instantiate_workflow_template(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_instantiate_workflow_template_rest_required_fields(request_type=workflow_templates.InstantiateWorkflowTemplateRequest): - transport_class = transports.WorkflowTemplateServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).instantiate_workflow_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).instantiate_workflow_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.instantiate_workflow_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_instantiate_workflow_template_rest_unset_required_fields(): - transport = transports.WorkflowTemplateServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.instantiate_workflow_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_instantiate_workflow_template_rest_interceptors(null_interceptor): - transport = transports.WorkflowTemplateServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.WorkflowTemplateServiceRestInterceptor(), - ) - client = WorkflowTemplateServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "post_instantiate_workflow_template") as post, \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "pre_instantiate_workflow_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = workflow_templates.InstantiateWorkflowTemplateRequest.pb(workflow_templates.InstantiateWorkflowTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = workflow_templates.InstantiateWorkflowTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.instantiate_workflow_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_instantiate_workflow_template_rest_bad_request(transport: str = 'rest', request_type=workflow_templates.InstantiateWorkflowTemplateRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.instantiate_workflow_template(request) - - -def test_instantiate_workflow_template_rest_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - parameters={'key_value': 'value_value'}, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.instantiate_workflow_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate" % client.transport._host, args[1]) - - -def test_instantiate_workflow_template_rest_flattened_error(transport: str = 'rest'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.instantiate_workflow_template( - workflow_templates.InstantiateWorkflowTemplateRequest(), - name='name_value', - parameters={'key_value': 'value_value'}, - ) - - -def test_instantiate_workflow_template_rest_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.InstantiateInlineWorkflowTemplateRequest, - dict, -]) -def test_instantiate_inline_workflow_template_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["template"] = {'id': 'id_value', 'name': 'name_value', 'version': 774, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'placement': {'managed_cluster': {'cluster_name': 'cluster_name_value', 'config': {'config_bucket': 'config_bucket_value', 'temp_bucket': 'temp_bucket_value', 'gce_cluster_config': {'zone_uri': 'zone_uri_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'internal_ip_only': True, 'private_ipv6_google_access': 1, 'service_account': 'service_account_value', 'service_account_scopes': ['service_account_scopes_value1', 'service_account_scopes_value2'], 'tags': ['tags_value1', 'tags_value2'], 'metadata': {}, 'reservation_affinity': {'consume_reservation_type': 1, 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'node_group_affinity': {'node_group_uri': 'node_group_uri_value'}, 'shielded_instance_config': {'enable_secure_boot': True, 'enable_vtpm': True, 'enable_integrity_monitoring': True}, 'confidential_instance_config': {'enable_confidential_compute': True}}, 'master_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'worker_config': {}, 'secondary_worker_config': {}, 'software_config': {'image_version': 'image_version_value', 'properties': {}, 'optional_components': [5]}, 'initialization_actions': [{'executable_file': 'executable_file_value', 'execution_timeout': {'seconds': 751, 'nanos': 543}}], 'encryption_config': {'gce_pd_kms_key_name': 'gce_pd_kms_key_name_value'}, 'autoscaling_config': {'policy_uri': 'policy_uri_value'}, 'security_config': {'kerberos_config': {'enable_kerberos': True, 'root_principal_password_uri': 'root_principal_password_uri_value', 'kms_key_uri': 'kms_key_uri_value', 'keystore_uri': 'keystore_uri_value', 'truststore_uri': 'truststore_uri_value', 'keystore_password_uri': 'keystore_password_uri_value', 'key_password_uri': 'key_password_uri_value', 'truststore_password_uri': 'truststore_password_uri_value', 'cross_realm_trust_realm': 'cross_realm_trust_realm_value', 'cross_realm_trust_kdc': 'cross_realm_trust_kdc_value', 'cross_realm_trust_admin_server': 'cross_realm_trust_admin_server_value', 'cross_realm_trust_shared_password_uri': 'cross_realm_trust_shared_password_uri_value', 'kdc_db_key_uri': 'kdc_db_key_uri_value', 'tgt_lifetime_hours': 1933, 'realm': 'realm_value'}, 'identity_config': {'user_service_account_mapping': {}}}, 'lifecycle_config': {'idle_delete_ttl': {}, 'auto_delete_time': {}, 'auto_delete_ttl': {}, 'idle_start_time': {}}, 'endpoint_config': {'http_ports': {}, 'enable_http_port_access': True}, 'metastore_config': {'dataproc_metastore_service': 'dataproc_metastore_service_value'}, 'dataproc_metric_config': {'metrics': [{'metric_source': 1, 'metric_overrides': ['metric_overrides_value1', 'metric_overrides_value2']}]}, 'auxiliary_node_groups': [{'node_group': {'name': 'name_value', 'roles': [1], 'node_group_config': {}, 'labels': {}}, 'node_group_id': 'node_group_id_value'}]}, 'labels': {}}, 'cluster_selector': {'zone': 'zone_value', 'cluster_labels': {}}}, 'jobs': [{'step_id': 'step_id_value', 'hadoop_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {'driver_log_levels': {}}}, 'spark_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'pyspark_job': {'main_python_file_uri': 'main_python_file_uri_value', 'args': ['args_value1', 'args_value2'], 'python_file_uris': ['python_file_uris_value1', 'python_file_uris_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'hive_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {'queries': ['queries_value1', 'queries_value2']}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2']}, 'pig_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'spark_r_job': {'main_r_file_uri': 'main_r_file_uri_value', 'args': ['args_value1', 'args_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'spark_sql_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'presto_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'output_format': 'output_format_value', 'client_tags': ['client_tags_value1', 'client_tags_value2'], 'properties': {}, 'logging_config': {}}, 'labels': {}, 'scheduling': {'max_failures_per_hour': 2243, 'max_failures_total': 1923}, 'prerequisite_step_ids': ['prerequisite_step_ids_value1', 'prerequisite_step_ids_value2']}], 'parameters': [{'name': 'name_value', 'fields': ['fields_value1', 'fields_value2'], 'description': 'description_value', 'validation': {'regex': {'regexes': ['regexes_value1', 'regexes_value2']}, 'values': {'values': ['values_value1', 'values_value2']}}}], 'dag_timeout': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.instantiate_inline_workflow_template(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - - -def test_instantiate_inline_workflow_template_rest_required_fields(request_type=workflow_templates.InstantiateInlineWorkflowTemplateRequest): - transport_class = transports.WorkflowTemplateServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).instantiate_inline_workflow_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).instantiate_inline_workflow_template._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.instantiate_inline_workflow_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_instantiate_inline_workflow_template_rest_unset_required_fields(): - transport = transports.WorkflowTemplateServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.instantiate_inline_workflow_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("parent", "template", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_instantiate_inline_workflow_template_rest_interceptors(null_interceptor): - transport = transports.WorkflowTemplateServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.WorkflowTemplateServiceRestInterceptor(), - ) - client = WorkflowTemplateServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "post_instantiate_inline_workflow_template") as post, \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "pre_instantiate_inline_workflow_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = workflow_templates.InstantiateInlineWorkflowTemplateRequest.pb(workflow_templates.InstantiateInlineWorkflowTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.instantiate_inline_workflow_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_instantiate_inline_workflow_template_rest_bad_request(transport: str = 'rest', request_type=workflow_templates.InstantiateInlineWorkflowTemplateRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["template"] = {'id': 'id_value', 'name': 'name_value', 'version': 774, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'placement': {'managed_cluster': {'cluster_name': 'cluster_name_value', 'config': {'config_bucket': 'config_bucket_value', 'temp_bucket': 'temp_bucket_value', 'gce_cluster_config': {'zone_uri': 'zone_uri_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'internal_ip_only': True, 'private_ipv6_google_access': 1, 'service_account': 'service_account_value', 'service_account_scopes': ['service_account_scopes_value1', 'service_account_scopes_value2'], 'tags': ['tags_value1', 'tags_value2'], 'metadata': {}, 'reservation_affinity': {'consume_reservation_type': 1, 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'node_group_affinity': {'node_group_uri': 'node_group_uri_value'}, 'shielded_instance_config': {'enable_secure_boot': True, 'enable_vtpm': True, 'enable_integrity_monitoring': True}, 'confidential_instance_config': {'enable_confidential_compute': True}}, 'master_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'worker_config': {}, 'secondary_worker_config': {}, 'software_config': {'image_version': 'image_version_value', 'properties': {}, 'optional_components': [5]}, 'initialization_actions': [{'executable_file': 'executable_file_value', 'execution_timeout': {'seconds': 751, 'nanos': 543}}], 'encryption_config': {'gce_pd_kms_key_name': 'gce_pd_kms_key_name_value'}, 'autoscaling_config': {'policy_uri': 'policy_uri_value'}, 'security_config': {'kerberos_config': {'enable_kerberos': True, 'root_principal_password_uri': 'root_principal_password_uri_value', 'kms_key_uri': 'kms_key_uri_value', 'keystore_uri': 'keystore_uri_value', 'truststore_uri': 'truststore_uri_value', 'keystore_password_uri': 'keystore_password_uri_value', 'key_password_uri': 'key_password_uri_value', 'truststore_password_uri': 'truststore_password_uri_value', 'cross_realm_trust_realm': 'cross_realm_trust_realm_value', 'cross_realm_trust_kdc': 'cross_realm_trust_kdc_value', 'cross_realm_trust_admin_server': 'cross_realm_trust_admin_server_value', 'cross_realm_trust_shared_password_uri': 'cross_realm_trust_shared_password_uri_value', 'kdc_db_key_uri': 'kdc_db_key_uri_value', 'tgt_lifetime_hours': 1933, 'realm': 'realm_value'}, 'identity_config': {'user_service_account_mapping': {}}}, 'lifecycle_config': {'idle_delete_ttl': {}, 'auto_delete_time': {}, 'auto_delete_ttl': {}, 'idle_start_time': {}}, 'endpoint_config': {'http_ports': {}, 'enable_http_port_access': True}, 'metastore_config': {'dataproc_metastore_service': 'dataproc_metastore_service_value'}, 'dataproc_metric_config': {'metrics': [{'metric_source': 1, 'metric_overrides': ['metric_overrides_value1', 'metric_overrides_value2']}]}, 'auxiliary_node_groups': [{'node_group': {'name': 'name_value', 'roles': [1], 'node_group_config': {}, 'labels': {}}, 'node_group_id': 'node_group_id_value'}]}, 'labels': {}}, 'cluster_selector': {'zone': 'zone_value', 'cluster_labels': {}}}, 'jobs': [{'step_id': 'step_id_value', 'hadoop_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {'driver_log_levels': {}}}, 'spark_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'pyspark_job': {'main_python_file_uri': 'main_python_file_uri_value', 'args': ['args_value1', 'args_value2'], 'python_file_uris': ['python_file_uris_value1', 'python_file_uris_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'hive_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {'queries': ['queries_value1', 'queries_value2']}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2']}, 'pig_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'spark_r_job': {'main_r_file_uri': 'main_r_file_uri_value', 'args': ['args_value1', 'args_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'spark_sql_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'presto_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'output_format': 'output_format_value', 'client_tags': ['client_tags_value1', 'client_tags_value2'], 'properties': {}, 'logging_config': {}}, 'labels': {}, 'scheduling': {'max_failures_per_hour': 2243, 'max_failures_total': 1923}, 'prerequisite_step_ids': ['prerequisite_step_ids_value1', 'prerequisite_step_ids_value2']}], 'parameters': [{'name': 'name_value', 'fields': ['fields_value1', 'fields_value2'], 'description': 'description_value', 'validation': {'regex': {'regexes': ['regexes_value1', 'regexes_value2']}, 'values': {'values': ['values_value1', 'values_value2']}}}], 'dag_timeout': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.instantiate_inline_workflow_template(request) - - -def test_instantiate_inline_workflow_template_rest_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.instantiate_inline_workflow_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline" % client.transport._host, args[1]) - - -def test_instantiate_inline_workflow_template_rest_flattened_error(transport: str = 'rest'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.instantiate_inline_workflow_template( - workflow_templates.InstantiateInlineWorkflowTemplateRequest(), - parent='parent_value', - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - -def test_instantiate_inline_workflow_template_rest_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.UpdateWorkflowTemplateRequest, - dict, -]) -def test_update_workflow_template_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'template': {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'}} - request_init["template"] = {'id': 'id_value', 'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3', 'version': 774, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'placement': {'managed_cluster': {'cluster_name': 'cluster_name_value', 'config': {'config_bucket': 'config_bucket_value', 'temp_bucket': 'temp_bucket_value', 'gce_cluster_config': {'zone_uri': 'zone_uri_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'internal_ip_only': True, 'private_ipv6_google_access': 1, 'service_account': 'service_account_value', 'service_account_scopes': ['service_account_scopes_value1', 'service_account_scopes_value2'], 'tags': ['tags_value1', 'tags_value2'], 'metadata': {}, 'reservation_affinity': {'consume_reservation_type': 1, 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'node_group_affinity': {'node_group_uri': 'node_group_uri_value'}, 'shielded_instance_config': {'enable_secure_boot': True, 'enable_vtpm': True, 'enable_integrity_monitoring': True}, 'confidential_instance_config': {'enable_confidential_compute': True}}, 'master_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'worker_config': {}, 'secondary_worker_config': {}, 'software_config': {'image_version': 'image_version_value', 'properties': {}, 'optional_components': [5]}, 'initialization_actions': [{'executable_file': 'executable_file_value', 'execution_timeout': {'seconds': 751, 'nanos': 543}}], 'encryption_config': {'gce_pd_kms_key_name': 'gce_pd_kms_key_name_value'}, 'autoscaling_config': {'policy_uri': 'policy_uri_value'}, 'security_config': {'kerberos_config': {'enable_kerberos': True, 'root_principal_password_uri': 'root_principal_password_uri_value', 'kms_key_uri': 'kms_key_uri_value', 'keystore_uri': 'keystore_uri_value', 'truststore_uri': 'truststore_uri_value', 'keystore_password_uri': 'keystore_password_uri_value', 'key_password_uri': 'key_password_uri_value', 'truststore_password_uri': 'truststore_password_uri_value', 'cross_realm_trust_realm': 'cross_realm_trust_realm_value', 'cross_realm_trust_kdc': 'cross_realm_trust_kdc_value', 'cross_realm_trust_admin_server': 'cross_realm_trust_admin_server_value', 'cross_realm_trust_shared_password_uri': 'cross_realm_trust_shared_password_uri_value', 'kdc_db_key_uri': 'kdc_db_key_uri_value', 'tgt_lifetime_hours': 1933, 'realm': 'realm_value'}, 'identity_config': {'user_service_account_mapping': {}}}, 'lifecycle_config': {'idle_delete_ttl': {}, 'auto_delete_time': {}, 'auto_delete_ttl': {}, 'idle_start_time': {}}, 'endpoint_config': {'http_ports': {}, 'enable_http_port_access': True}, 'metastore_config': {'dataproc_metastore_service': 'dataproc_metastore_service_value'}, 'dataproc_metric_config': {'metrics': [{'metric_source': 1, 'metric_overrides': ['metric_overrides_value1', 'metric_overrides_value2']}]}, 'auxiliary_node_groups': [{'node_group': {'name': 'name_value', 'roles': [1], 'node_group_config': {}, 'labels': {}}, 'node_group_id': 'node_group_id_value'}]}, 'labels': {}}, 'cluster_selector': {'zone': 'zone_value', 'cluster_labels': {}}}, 'jobs': [{'step_id': 'step_id_value', 'hadoop_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {'driver_log_levels': {}}}, 'spark_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'pyspark_job': {'main_python_file_uri': 'main_python_file_uri_value', 'args': ['args_value1', 'args_value2'], 'python_file_uris': ['python_file_uris_value1', 'python_file_uris_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'hive_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {'queries': ['queries_value1', 'queries_value2']}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2']}, 'pig_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'spark_r_job': {'main_r_file_uri': 'main_r_file_uri_value', 'args': ['args_value1', 'args_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'spark_sql_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'presto_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'output_format': 'output_format_value', 'client_tags': ['client_tags_value1', 'client_tags_value2'], 'properties': {}, 'logging_config': {}}, 'labels': {}, 'scheduling': {'max_failures_per_hour': 2243, 'max_failures_total': 1923}, 'prerequisite_step_ids': ['prerequisite_step_ids_value1', 'prerequisite_step_ids_value2']}], 'parameters': [{'name': 'name_value', 'fields': ['fields_value1', 'fields_value2'], 'description': 'description_value', 'validation': {'regex': {'regexes': ['regexes_value1', 'regexes_value2']}, 'values': {'values': ['values_value1', 'values_value2']}}}], 'dag_timeout': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = workflow_templates.WorkflowTemplate( - id='id_value', - name='name_value', - version=774, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_workflow_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == 'id_value' - assert response.name == 'name_value' - assert response.version == 774 - - -def test_update_workflow_template_rest_required_fields(request_type=workflow_templates.UpdateWorkflowTemplateRequest): - transport_class = transports.WorkflowTemplateServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_workflow_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_workflow_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = workflow_templates.WorkflowTemplate() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "put", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_workflow_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_workflow_template_rest_unset_required_fields(): - transport = transports.WorkflowTemplateServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_workflow_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("template", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_workflow_template_rest_interceptors(null_interceptor): - transport = transports.WorkflowTemplateServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.WorkflowTemplateServiceRestInterceptor(), - ) - client = WorkflowTemplateServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "post_update_workflow_template") as post, \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "pre_update_workflow_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = workflow_templates.UpdateWorkflowTemplateRequest.pb(workflow_templates.UpdateWorkflowTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = workflow_templates.WorkflowTemplate.to_json(workflow_templates.WorkflowTemplate()) - - request = workflow_templates.UpdateWorkflowTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = workflow_templates.WorkflowTemplate() - - client.update_workflow_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_workflow_template_rest_bad_request(transport: str = 'rest', request_type=workflow_templates.UpdateWorkflowTemplateRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'template': {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'}} - request_init["template"] = {'id': 'id_value', 'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3', 'version': 774, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'placement': {'managed_cluster': {'cluster_name': 'cluster_name_value', 'config': {'config_bucket': 'config_bucket_value', 'temp_bucket': 'temp_bucket_value', 'gce_cluster_config': {'zone_uri': 'zone_uri_value', 'network_uri': 'network_uri_value', 'subnetwork_uri': 'subnetwork_uri_value', 'internal_ip_only': True, 'private_ipv6_google_access': 1, 'service_account': 'service_account_value', 'service_account_scopes': ['service_account_scopes_value1', 'service_account_scopes_value2'], 'tags': ['tags_value1', 'tags_value2'], 'metadata': {}, 'reservation_affinity': {'consume_reservation_type': 1, 'key': 'key_value', 'values': ['values_value1', 'values_value2']}, 'node_group_affinity': {'node_group_uri': 'node_group_uri_value'}, 'shielded_instance_config': {'enable_secure_boot': True, 'enable_vtpm': True, 'enable_integrity_monitoring': True}, 'confidential_instance_config': {'enable_confidential_compute': True}}, 'master_config': {'num_instances': 1399, 'instance_names': ['instance_names_value1', 'instance_names_value2'], 'image_uri': 'image_uri_value', 'machine_type_uri': 'machine_type_uri_value', 'disk_config': {'boot_disk_type': 'boot_disk_type_value', 'boot_disk_size_gb': 1792, 'num_local_ssds': 1494, 'local_ssd_interface': 'local_ssd_interface_value'}, 'is_preemptible': True, 'preemptibility': 1, 'managed_group_config': {'instance_template_name': 'instance_template_name_value', 'instance_group_manager_name': 'instance_group_manager_name_value'}, 'accelerators': [{'accelerator_type_uri': 'accelerator_type_uri_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value'}, 'worker_config': {}, 'secondary_worker_config': {}, 'software_config': {'image_version': 'image_version_value', 'properties': {}, 'optional_components': [5]}, 'initialization_actions': [{'executable_file': 'executable_file_value', 'execution_timeout': {'seconds': 751, 'nanos': 543}}], 'encryption_config': {'gce_pd_kms_key_name': 'gce_pd_kms_key_name_value'}, 'autoscaling_config': {'policy_uri': 'policy_uri_value'}, 'security_config': {'kerberos_config': {'enable_kerberos': True, 'root_principal_password_uri': 'root_principal_password_uri_value', 'kms_key_uri': 'kms_key_uri_value', 'keystore_uri': 'keystore_uri_value', 'truststore_uri': 'truststore_uri_value', 'keystore_password_uri': 'keystore_password_uri_value', 'key_password_uri': 'key_password_uri_value', 'truststore_password_uri': 'truststore_password_uri_value', 'cross_realm_trust_realm': 'cross_realm_trust_realm_value', 'cross_realm_trust_kdc': 'cross_realm_trust_kdc_value', 'cross_realm_trust_admin_server': 'cross_realm_trust_admin_server_value', 'cross_realm_trust_shared_password_uri': 'cross_realm_trust_shared_password_uri_value', 'kdc_db_key_uri': 'kdc_db_key_uri_value', 'tgt_lifetime_hours': 1933, 'realm': 'realm_value'}, 'identity_config': {'user_service_account_mapping': {}}}, 'lifecycle_config': {'idle_delete_ttl': {}, 'auto_delete_time': {}, 'auto_delete_ttl': {}, 'idle_start_time': {}}, 'endpoint_config': {'http_ports': {}, 'enable_http_port_access': True}, 'metastore_config': {'dataproc_metastore_service': 'dataproc_metastore_service_value'}, 'dataproc_metric_config': {'metrics': [{'metric_source': 1, 'metric_overrides': ['metric_overrides_value1', 'metric_overrides_value2']}]}, 'auxiliary_node_groups': [{'node_group': {'name': 'name_value', 'roles': [1], 'node_group_config': {}, 'labels': {}}, 'node_group_id': 'node_group_id_value'}]}, 'labels': {}}, 'cluster_selector': {'zone': 'zone_value', 'cluster_labels': {}}}, 'jobs': [{'step_id': 'step_id_value', 'hadoop_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {'driver_log_levels': {}}}, 'spark_job': {'main_jar_file_uri': 'main_jar_file_uri_value', 'main_class': 'main_class_value', 'args': ['args_value1', 'args_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'pyspark_job': {'main_python_file_uri': 'main_python_file_uri_value', 'args': ['args_value1', 'args_value2'], 'python_file_uris': ['python_file_uris_value1', 'python_file_uris_value2'], 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'hive_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {'queries': ['queries_value1', 'queries_value2']}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2']}, 'pig_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'spark_r_job': {'main_r_file_uri': 'main_r_file_uri_value', 'args': ['args_value1', 'args_value2'], 'file_uris': ['file_uris_value1', 'file_uris_value2'], 'archive_uris': ['archive_uris_value1', 'archive_uris_value2'], 'properties': {}, 'logging_config': {}}, 'spark_sql_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'script_variables': {}, 'properties': {}, 'jar_file_uris': ['jar_file_uris_value1', 'jar_file_uris_value2'], 'logging_config': {}}, 'presto_job': {'query_file_uri': 'query_file_uri_value', 'query_list': {}, 'continue_on_failure': True, 'output_format': 'output_format_value', 'client_tags': ['client_tags_value1', 'client_tags_value2'], 'properties': {}, 'logging_config': {}}, 'labels': {}, 'scheduling': {'max_failures_per_hour': 2243, 'max_failures_total': 1923}, 'prerequisite_step_ids': ['prerequisite_step_ids_value1', 'prerequisite_step_ids_value2']}], 'parameters': [{'name': 'name_value', 'fields': ['fields_value1', 'fields_value2'], 'description': 'description_value', 'validation': {'regex': {'regexes': ['regexes_value1', 'regexes_value2']}, 'values': {'values': ['values_value1', 'values_value2']}}}], 'dag_timeout': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_workflow_template(request) - - -def test_update_workflow_template_rest_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = workflow_templates.WorkflowTemplate() - - # get arguments that satisfy an http rule for this method - sample_request = {'template': {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = workflow_templates.WorkflowTemplate.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_workflow_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{template.name=projects/*/locations/*/workflowTemplates/*}" % client.transport._host, args[1]) - - -def test_update_workflow_template_rest_flattened_error(transport: str = 'rest'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_workflow_template( - workflow_templates.UpdateWorkflowTemplateRequest(), - template=workflow_templates.WorkflowTemplate(id='id_value'), - ) - - -def test_update_workflow_template_rest_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.ListWorkflowTemplatesRequest, - dict, -]) -def test_list_workflow_templates_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = workflow_templates.ListWorkflowTemplatesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = workflow_templates.ListWorkflowTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_workflow_templates(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkflowTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_workflow_templates_rest_required_fields(request_type=workflow_templates.ListWorkflowTemplatesRequest): - transport_class = transports.WorkflowTemplateServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_workflow_templates._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_workflow_templates._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = workflow_templates.ListWorkflowTemplatesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = workflow_templates.ListWorkflowTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_workflow_templates(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_workflow_templates_rest_unset_required_fields(): - transport = transports.WorkflowTemplateServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_workflow_templates._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_workflow_templates_rest_interceptors(null_interceptor): - transport = transports.WorkflowTemplateServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.WorkflowTemplateServiceRestInterceptor(), - ) - client = WorkflowTemplateServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "post_list_workflow_templates") as post, \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "pre_list_workflow_templates") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = workflow_templates.ListWorkflowTemplatesRequest.pb(workflow_templates.ListWorkflowTemplatesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = workflow_templates.ListWorkflowTemplatesResponse.to_json(workflow_templates.ListWorkflowTemplatesResponse()) - - request = workflow_templates.ListWorkflowTemplatesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = workflow_templates.ListWorkflowTemplatesResponse() - - client.list_workflow_templates(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_workflow_templates_rest_bad_request(transport: str = 'rest', request_type=workflow_templates.ListWorkflowTemplatesRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_workflow_templates(request) - - -def test_list_workflow_templates_rest_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = workflow_templates.ListWorkflowTemplatesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = workflow_templates.ListWorkflowTemplatesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_workflow_templates(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/workflowTemplates" % client.transport._host, args[1]) - - -def test_list_workflow_templates_rest_flattened_error(transport: str = 'rest'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_workflow_templates( - workflow_templates.ListWorkflowTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_workflow_templates_rest_pager(transport: str = 'rest'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - ], - next_page_token='abc', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[], - next_page_token='def', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - ], - next_page_token='ghi', - ), - workflow_templates.ListWorkflowTemplatesResponse( - templates=[ - workflow_templates.WorkflowTemplate(), - workflow_templates.WorkflowTemplate(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(workflow_templates.ListWorkflowTemplatesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_workflow_templates(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, workflow_templates.WorkflowTemplate) - for i in results) - - pages = list(client.list_workflow_templates(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - workflow_templates.DeleteWorkflowTemplateRequest, - dict, -]) -def test_delete_workflow_template_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_workflow_template(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_workflow_template_rest_required_fields(request_type=workflow_templates.DeleteWorkflowTemplateRequest): - transport_class = transports.WorkflowTemplateServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_workflow_template._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_workflow_template._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("version", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_workflow_template(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_workflow_template_rest_unset_required_fields(): - transport = transports.WorkflowTemplateServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_workflow_template._get_unset_required_fields({}) - assert set(unset_fields) == (set(("version", )) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_workflow_template_rest_interceptors(null_interceptor): - transport = transports.WorkflowTemplateServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.WorkflowTemplateServiceRestInterceptor(), - ) - client = WorkflowTemplateServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.WorkflowTemplateServiceRestInterceptor, "pre_delete_workflow_template") as pre: - pre.assert_not_called() - pb_message = workflow_templates.DeleteWorkflowTemplateRequest.pb(workflow_templates.DeleteWorkflowTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = workflow_templates.DeleteWorkflowTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_workflow_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_workflow_template_rest_bad_request(transport: str = 'rest', request_type=workflow_templates.DeleteWorkflowTemplateRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_workflow_template(request) - - -def test_delete_workflow_template_rest_flattened(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/workflowTemplates/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_workflow_template(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/workflowTemplates/*}" % client.transport._host, args[1]) - - -def test_delete_workflow_template_rest_flattened_error(transport: str = 'rest'): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_workflow_template( - workflow_templates.DeleteWorkflowTemplateRequest(), - name='name_value', - ) - - -def test_delete_workflow_template_rest_error(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = WorkflowTemplateServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = WorkflowTemplateServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = WorkflowTemplateServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = WorkflowTemplateServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = WorkflowTemplateServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.WorkflowTemplateServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.WorkflowTemplateServiceGrpcTransport, - transports.WorkflowTemplateServiceGrpcAsyncIOTransport, - transports.WorkflowTemplateServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = WorkflowTemplateServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.WorkflowTemplateServiceGrpcTransport, - ) - -def test_workflow_template_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.WorkflowTemplateServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_workflow_template_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataproc_v1.services.workflow_template_service.transports.WorkflowTemplateServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.WorkflowTemplateServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_workflow_template', - 'get_workflow_template', - 'instantiate_workflow_template', - 'instantiate_inline_workflow_template', - 'update_workflow_template', - 'list_workflow_templates', - 'delete_workflow_template', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_workflow_template_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataproc_v1.services.workflow_template_service.transports.WorkflowTemplateServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.WorkflowTemplateServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_workflow_template_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataproc_v1.services.workflow_template_service.transports.WorkflowTemplateServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.WorkflowTemplateServiceTransport() - adc.assert_called_once() - - -def test_workflow_template_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - WorkflowTemplateServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.WorkflowTemplateServiceGrpcTransport, - transports.WorkflowTemplateServiceGrpcAsyncIOTransport, - ], -) -def test_workflow_template_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.WorkflowTemplateServiceGrpcTransport, - transports.WorkflowTemplateServiceGrpcAsyncIOTransport, - transports.WorkflowTemplateServiceRestTransport, - ], -) -def test_workflow_template_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.WorkflowTemplateServiceGrpcTransport, grpc_helpers), - (transports.WorkflowTemplateServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_workflow_template_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataproc.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dataproc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.WorkflowTemplateServiceGrpcTransport, transports.WorkflowTemplateServiceGrpcAsyncIOTransport]) -def test_workflow_template_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_workflow_template_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.WorkflowTemplateServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -def test_workflow_template_service_rest_lro_client(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_workflow_template_service_host_no_port(transport_name): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_workflow_template_service_host_with_port(transport_name): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataproc.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataproc.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataproc.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_workflow_template_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = WorkflowTemplateServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = WorkflowTemplateServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_workflow_template._session - session2 = client2.transport.create_workflow_template._session - assert session1 != session2 - session1 = client1.transport.get_workflow_template._session - session2 = client2.transport.get_workflow_template._session - assert session1 != session2 - session1 = client1.transport.instantiate_workflow_template._session - session2 = client2.transport.instantiate_workflow_template._session - assert session1 != session2 - session1 = client1.transport.instantiate_inline_workflow_template._session - session2 = client2.transport.instantiate_inline_workflow_template._session - assert session1 != session2 - session1 = client1.transport.update_workflow_template._session - session2 = client2.transport.update_workflow_template._session - assert session1 != session2 - session1 = client1.transport.list_workflow_templates._session - session2 = client2.transport.list_workflow_templates._session - assert session1 != session2 - session1 = client1.transport.delete_workflow_template._session - session2 = client2.transport.delete_workflow_template._session - assert session1 != session2 -def test_workflow_template_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.WorkflowTemplateServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_workflow_template_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.WorkflowTemplateServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.WorkflowTemplateServiceGrpcTransport, transports.WorkflowTemplateServiceGrpcAsyncIOTransport]) -def test_workflow_template_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.WorkflowTemplateServiceGrpcTransport, transports.WorkflowTemplateServiceGrpcAsyncIOTransport]) -def test_workflow_template_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_workflow_template_service_grpc_lro_client(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_workflow_template_service_grpc_lro_async_client(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_node_group_path(): - project = "squid" - region = "clam" - cluster = "whelk" - node_group = "octopus" - expected = "projects/{project}/regions/{region}/clusters/{cluster}/nodeGroups/{node_group}".format(project=project, region=region, cluster=cluster, node_group=node_group, ) - actual = WorkflowTemplateServiceClient.node_group_path(project, region, cluster, node_group) - assert expected == actual - - -def test_parse_node_group_path(): - expected = { - "project": "oyster", - "region": "nudibranch", - "cluster": "cuttlefish", - "node_group": "mussel", - } - path = WorkflowTemplateServiceClient.node_group_path(**expected) - - # Check that the path construction is reversible. - actual = WorkflowTemplateServiceClient.parse_node_group_path(path) - assert expected == actual - -def test_service_path(): - project = "winkle" - location = "nautilus" - service = "scallop" - expected = "projects/{project}/locations/{location}/services/{service}".format(project=project, location=location, service=service, ) - actual = WorkflowTemplateServiceClient.service_path(project, location, service) - assert expected == actual - - -def test_parse_service_path(): - expected = { - "project": "abalone", - "location": "squid", - "service": "clam", - } - path = WorkflowTemplateServiceClient.service_path(**expected) - - # Check that the path construction is reversible. - actual = WorkflowTemplateServiceClient.parse_service_path(path) - assert expected == actual - -def test_workflow_template_path(): - project = "whelk" - region = "octopus" - workflow_template = "oyster" - expected = "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}".format(project=project, region=region, workflow_template=workflow_template, ) - actual = WorkflowTemplateServiceClient.workflow_template_path(project, region, workflow_template) - assert expected == actual - - -def test_parse_workflow_template_path(): - expected = { - "project": "nudibranch", - "region": "cuttlefish", - "workflow_template": "mussel", - } - path = WorkflowTemplateServiceClient.workflow_template_path(**expected) - - # Check that the path construction is reversible. - actual = WorkflowTemplateServiceClient.parse_workflow_template_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "winkle" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = WorkflowTemplateServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nautilus", - } - path = WorkflowTemplateServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = WorkflowTemplateServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "scallop" - expected = "folders/{folder}".format(folder=folder, ) - actual = WorkflowTemplateServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "abalone", - } - path = WorkflowTemplateServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = WorkflowTemplateServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "squid" - expected = "organizations/{organization}".format(organization=organization, ) - actual = WorkflowTemplateServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "clam", - } - path = WorkflowTemplateServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = WorkflowTemplateServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "whelk" - expected = "projects/{project}".format(project=project, ) - actual = WorkflowTemplateServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "octopus", - } - path = WorkflowTemplateServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = WorkflowTemplateServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "oyster" - location = "nudibranch" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = WorkflowTemplateServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - } - path = WorkflowTemplateServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = WorkflowTemplateServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.WorkflowTemplateServiceTransport, '_prep_wrapped_messages') as prep: - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.WorkflowTemplateServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = WorkflowTemplateServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - -def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/regions/sample2/clusters/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/regions/sample2/clusters/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/regions/sample2/operations'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/regions/sample2/operations'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - - -def test_delete_operation(transport: str = "grpc"): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = WorkflowTemplateServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = WorkflowTemplateServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (WorkflowTemplateServiceClient, transports.WorkflowTemplateServiceGrpcTransport), - (WorkflowTemplateServiceAsyncClient, transports.WorkflowTemplateServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - )