Skip to content
This repository has been archived by the owner on Nov 29, 2023. It is now read-only.

Commit

Permalink
feat: Location API methods (#324)
Browse files Browse the repository at this point in the history
* feat: Location API methods
docs: updated comments
chore: proto formatting

PiperOrigin-RevId: 513582719

Source-Link: googleapis/googleapis@822476c

Source-Link: googleapis/googleapis-gen@39ca527
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzljYTUyNzYwMTk5ZDExM2I1MWFkOGJkY2E3NWNlYTllMTFhOWU5OCJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

---------

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] committed Mar 3, 2023
1 parent 19bc444 commit 662e648
Show file tree
Hide file tree
Showing 11 changed files with 959 additions and 43 deletions.
123 changes: 116 additions & 7 deletions google/cloud/scheduler_v1/services/cloud_scheduler/async_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore

from google.cloud.location import locations_pb2 # type: ignore
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
Expand Down Expand Up @@ -285,7 +286,7 @@ async def sample_list_jobs():
Returns:
google.cloud.scheduler_v1.services.cloud_scheduler.pagers.ListJobsAsyncPager:
Response message for listing jobs using
[ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs].
[ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs].
Iterating over this object will yield results and
resolve additional pages automatically.
Expand Down Expand Up @@ -410,7 +411,7 @@ async def sample_get_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -537,7 +538,7 @@ async def sample_create_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -665,7 +666,7 @@ async def sample_update_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -882,7 +883,7 @@ async def sample_pause_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -995,7 +996,7 @@ async def sample_resume_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -1101,7 +1102,7 @@ async def sample_run_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -1146,6 +1147,114 @@ async def sample_run_job():
# Done; return the response.
return response

async def get_location(
self,
request: Optional[locations_pb2.GetLocationRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> locations_pb2.Location:
r"""Gets information about a location.
Args:
request (:class:`~.location_pb2.GetLocationRequest`):
The request object. Request message for
`GetLocation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.location_pb2.Location:
Location object.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = locations_pb2.GetLocationRequest(**request)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.get_location,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)

# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# Done; return the response.
return response

async def list_locations(
self,
request: Optional[locations_pb2.ListLocationsRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> locations_pb2.ListLocationsResponse:
r"""Lists information about the supported locations for this service.
Args:
request (:class:`~.location_pb2.ListLocationsRequest`):
The request object. Request message for
`ListLocations` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.location_pb2.ListLocationsResponse:
Response message for ``ListLocations`` method.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = locations_pb2.ListLocationsRequest(**request)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._client._transport.list_locations,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)

# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# Done; return the response.
return response

async def __aenter__(self):
return self

Expand Down
123 changes: 116 additions & 7 deletions google/cloud/scheduler_v1/services/cloud_scheduler/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore

from google.cloud.location import locations_pb2 # type: ignore
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
Expand Down Expand Up @@ -529,7 +530,7 @@ def sample_list_jobs():
Returns:
google.cloud.scheduler_v1.services.cloud_scheduler.pagers.ListJobsPager:
Response message for listing jobs using
[ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs].
[ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs].
Iterating over this object will yield results and
resolve additional pages automatically.
Expand Down Expand Up @@ -644,7 +645,7 @@ def sample_get_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -761,7 +762,7 @@ def sample_create_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -889,7 +890,7 @@ def sample_update_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -1096,7 +1097,7 @@ def sample_pause_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -1209,7 +1210,7 @@ def sample_resume_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -1315,7 +1316,7 @@ def sample_run_job():
google.cloud.scheduler_v1.types.Job:
Configuration for a job.
The maximum allowed size for a job is
100KB.
1MB.
"""
# Create or coerce a protobuf request object.
Expand Down Expand Up @@ -1373,6 +1374,114 @@ def __exit__(self, type, value, traceback):
"""
self.transport.close()

def get_location(
self,
request: Optional[locations_pb2.GetLocationRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> locations_pb2.Location:
r"""Gets information about a location.
Args:
request (:class:`~.location_pb2.GetLocationRequest`):
The request object. Request message for
`GetLocation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.location_pb2.Location:
Location object.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = locations_pb2.GetLocationRequest(**request)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._transport.get_location,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)

# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# Done; return the response.
return response

def list_locations(
self,
request: Optional[locations_pb2.ListLocationsRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> locations_pb2.ListLocationsResponse:
r"""Lists information about the supported locations for this service.
Args:
request (:class:`~.location_pb2.ListLocationsRequest`):
The request object. Request message for
`ListLocations` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.location_pb2.ListLocationsResponse:
Response message for ``ListLocations`` method.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = locations_pb2.ListLocationsRequest(**request)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._transport.list_locations,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)

# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)

# Done; return the response.
return response


DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
Expand Down
Loading

0 comments on commit 662e648

Please sign in to comment.