Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix!: api consistency between HTTP and Gapic layers #375

Merged
merged 16 commits into from
Oct 6, 2021
Merged
112 changes: 75 additions & 37 deletions google/cloud/logging_v2/_gapic.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,11 @@ def list_entries(
*,
filter_=None,
order_by=None,
max_results=None,
page_size=None,
page_token=None,
):
"""Return a page of log entry resources.
"""Return a generator of log entry resources.

Args:
resource_names (Sequence[str]): Names of one or more parent resources
Expand All @@ -69,14 +70,16 @@ def list_entries(
https://cloud.google.com/logging/docs/view/advanced_filters
order_by (str) One of :data:`~logging_v2.ASCENDING`
or :data:`~logging_v2.DESCENDING`.
page_size (int): maximum number of entries to return, If not passed,
defaults to a value set by the API.
page_token (str): opaque marker for the next "page" of entries. If not
passed, the API will return the first page of
entries.

max_results (Optional[int]):
Optional. The maximum number of entries to return.
Non-positive values are treated as 0. If None, uses API defaults.
page_size (int): number of entries to fetch in each API call. Although
requests are paged internally, logs are returned by the generator
one at a time. If not passed, defaults to a value set by the API.
page_token (str): opaque marker for the starting "page" of entries. If not
passed, the API will return the first page of entries.
Returns:
Iterator[~logging_v2.LogEntry]
Generator[~logging_v2.LogEntry]
"""
# full resource names are expected by the API
resource_names = resource_names
Expand All @@ -89,19 +92,28 @@ def list_entries(
)

response = self._gapic_api.list_log_entries(request=request)
page_iter = iter(response)
log_iter = iter(response)

# We attach a mutable loggers dictionary so that as Logger
# objects are created by entry_from_resource, they can be
# re-used by other log entries from the same logger.
loggers = {}

def log_entries_pager(page_iter):
for page in page_iter:
log_entry_dict = _parse_log_entry(LogEntryPB.pb(page))
if max_results is not None:
# drop negative values
max_results = max(max_results, 0)

# create generator
def log_entries_pager(log_iter):
i = 0
for entry in log_iter:
if max_results is not None and i >= max_results:
break
log_entry_dict = _parse_log_entry(LogEntryPB.pb(entry))
yield entry_from_resource(log_entry_dict, self._client, loggers=loggers)
i += 1

return log_entries_pager(page_iter)
return log_entries_pager(log_iter)

def write_entries(
self,
Expand Down Expand Up @@ -175,7 +187,7 @@ def __init__(self, gapic_api, client):
self._gapic_api = gapic_api
self._client = client

def list_sinks(self, parent, *, page_size=0, page_token=None):
def list_sinks(self, parent, *, max_results=None, page_size=None, page_token=None):
"""List sinks for the parent resource.

Args:
Expand All @@ -187,27 +199,38 @@ def list_sinks(self, parent, *, page_size=0, page_token=None):
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]".
page_size (Optional[int]): Maximum number of sinks to return, If not passed,
defaults to a value set by the API.
page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not
passed, the API will return the first page of
sinks.
max_results (Optional[int]):
Optional. The maximum number of entries to return.
Non-positive values are treated as 0. If None, uses API defaults.
page_size (int): number of entries to fetch in each API call. Although
requests are paged internally, logs are returned by the generator
one at a time. If not passed, defaults to a value set by the API.
page_token (str): opaque marker for the starting "page" of entries. If not
passed, the API will return the first page of entries.

Returns:
Iterator[~logging_v2.Sink]
Generator[~logging_v2.Sink]
"""
request = ListSinksRequest(
parent=parent, page_size=page_size, page_token=page_token
)
response = self._gapic_api.list_sinks(request)
page_iter = iter(response)
sink_iter = iter(response)

if max_results is not None:
# drop negative values
max_results = max(max_results, 0)

def sinks_pager(page_iter):
for page in page_iter:
def sinks_pager(sink_iter):
i = 0
for entry in sink_iter:
if max_results is not None and i >= max_results:
break
# Convert the GAPIC sink type into the handwritten `Sink` type
yield Sink.from_api_repr(LogSink.to_dict(page), client=self._client)
yield Sink.from_api_repr(LogSink.to_dict(entry), client=self._client)
i += 1

return sinks_pager(page_iter)
return sinks_pager(sink_iter)

def sink_create(
self, parent, sink_name, filter_, destination, *, unique_writer_identity=False
Expand Down Expand Up @@ -347,33 +370,48 @@ def __init__(self, gapic_api, client):
self._gapic_api = gapic_api
self._client = client

def list_metrics(self, project, *, page_size=0, page_token=None):
def list_metrics(
self, project, *, max_results=None, page_size=None, page_token=None
):
"""List metrics for the project associated with this client.

Args:
project (str): ID of the project whose metrics are to be listed.
page_size (int): Maximum number of metrics to return, If not passed,
defaults to a value set by the API.
page_token (str): Opaque marker for the next "page" of metrics. If not
passed, the API will return the first page of
sinks.
max_results (Optional[int]):
Optional. The maximum number of entries to return.
Non-positive values are treated as 0. If None, uses API defaults.
page_size (int): number of entries to fetch in each API call. Although
requests are paged internally, logs are returned by the generator
one at a time. If not passed, defaults to a value set by the API.
page_token (str): opaque marker for the starting "page" of entries. If not
passed, the API will return the first page of entries.

Returns:
Iterable[logging_v2.Metric]: Iterable of metrics.
Generator[logging_v2.Metric]
"""
path = f"projects/{project}"
request = ListLogMetricsRequest(
parent=path, page_size=page_size, page_token=page_token,
)
response = self._gapic_api.list_log_metrics(request=request)
page_iter = iter(response)
metric_iter = iter(response)

if max_results is not None:
# drop negative values
max_results = max(max_results, 0)

def metrics_pager(page_iter):
for page in page_iter:
def metrics_pager(metric_iter):
i = 0
for entry in metric_iter:
if max_results is not None and i >= max_results:
break
# Convert GAPIC metrics type into handwritten `Metric` type
yield Metric.from_api_repr(LogMetric.to_dict(page), client=self._client)
yield Metric.from_api_repr(
LogMetric.to_dict(entry), client=self._client
)
i += 1

return metrics_pager(page_iter)
return metrics_pager(metric_iter)

def metric_create(self, project, metric_name, filter_, description):
"""Create a metric resource.
Expand Down
79 changes: 52 additions & 27 deletions google/cloud/logging_v2/_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ def list_entries(
*,
filter_=None,
order_by=None,
max_results=None,
page_size=None,
page_token=None,
):
Expand All @@ -94,14 +95,16 @@ def list_entries(
https://cloud.google.com/logging/docs/view/advanced_filters
order_by (str) One of :data:`~logging_v2.ASCENDING`
or :data:`~logging_v2.DESCENDING`.
page_size (int): maximum number of entries to return, If not passed,
defaults to a value set by the API.
page_token (str): opaque marker for the next "page" of entries. If not
passed, the API will return the first page of
entries.

max_results (Optional[int]):
Optional. The maximum number of entries to return.
Non-positive values are treated as 0. If None, uses API defaults.
page_size (int): number of entries to fetch in each API call. Although
requests are paged internally, logs are returned by the generator
one at a time. If not passed, defaults to a value set by the API.
page_token (str): opaque marker for the starting "page" of entries. If not
passed, the API will return the first page of entries.
Returns:
Iterator[~logging_v2.LogEntry]
Generator[~logging_v2.LogEntry]
"""
extra_params = {"resourceNames": resource_names}

Expand Down Expand Up @@ -131,7 +134,8 @@ def list_entries(
)
# This method uses POST to make a read-only request.
iterator._HTTP_METHOD = "POST"
return iterator

return _entries_pager(iterator, max_results)

def write_entries(
self,
Expand Down Expand Up @@ -219,7 +223,7 @@ def __init__(self, client):
self._client = client
self.api_request = client._connection.api_request

def list_sinks(self, parent, *, page_size=None, page_token=None):
def list_sinks(self, parent, *, max_results=None, page_size=None, page_token=None):
"""List sinks for the parent resource.

See
Expand All @@ -234,22 +238,25 @@ def list_sinks(self, parent, *, page_size=None, page_token=None):
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]".
page_size (Optional[int]): Maximum number of sinks to return, If not passed,
defaults to a value set by the API.
page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not
passed, the API will return the first page of
sinks.
max_results (Optional[int]):
Optional. The maximum number of entries to return.
Non-positive values are treated as 0. If None, uses API defaults.
page_size (int): number of entries to fetch in each API call. Although
requests are paged internally, logs are returned by the generator
one at a time. If not passed, defaults to a value set by the API.
page_token (str): opaque marker for the starting "page" of entries. If not
passed, the API will return the first page of entries.

Returns:
Iterator[~logging_v2.Sink]
Generator[~logging_v2.Sink]
"""
extra_params = {}

if page_size is not None:
extra_params["pageSize"] = page_size

path = f"/{parent}/sinks"
return page_iterator.HTTPIterator(
iterator = page_iterator.HTTPIterator(
client=self._client,
api_request=self._client._connection.api_request,
path=path,
Expand All @@ -259,6 +266,8 @@ def list_sinks(self, parent, *, page_size=None, page_token=None):
extra_params=extra_params,
)

return _entries_pager(iterator, max_results)

def sink_create(
self, parent, sink_name, filter_, destination, *, unique_writer_identity=False
):
Expand Down Expand Up @@ -373,32 +382,35 @@ def __init__(self, client):
self._client = client
self.api_request = client._connection.api_request

def list_metrics(self, project, *, page_size=None, page_token=None):
def list_metrics(
self, project, *, max_results=None, page_size=None, page_token=None
):
"""List metrics for the project associated with this client.

See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list

Args:
page_size (Optional[int]): The maximum number of sinks in each
page of results from this request. Non-positive values are ignored. Defaults to a
sensible value set by the API.
page_token (Optional[str]): If present, return the next batch of sinks, using the
value, which must correspond to the ``nextPageToken`` value
returned in the previous response. Deprecated: use the ``pages``
property ofthe returned iterator instead of manually passing the
token.
max_results (Optional[int]):
Optional. The maximum number of entries to return.
Non-positive values are treated as 0. If None, uses API defaults.
page_size (int): number of entries to fetch in each API call. Although
requests are paged internally, logs are returned by the generator
one at a time. If not passed, defaults to a value set by the API.
page_token (str): opaque marker for the starting "page" of entries. If not
passed, the API will return the first page of entries.

Returns:
Iterator[google.cloud.logging_v2.metric.Metric]
Generator[logging_v2.Metric]

"""
extra_params = {}

if page_size is not None:
extra_params["pageSize"] = page_size

path = f"/projects/{project}/metrics"
return page_iterator.HTTPIterator(
iterator = page_iterator.HTTPIterator(
client=self._client,
api_request=self._client._connection.api_request,
path=path,
Expand All @@ -407,6 +419,7 @@ def list_metrics(self, project, *, page_size=None, page_token=None):
page_token=page_token,
extra_params=extra_params,
)
return _entries_pager(iterator, max_results)

def metric_create(self, project, metric_name, filter_, description):
"""Create a metric resource.
Expand Down Expand Up @@ -469,6 +482,18 @@ def metric_delete(self, project, metric_name):
self.api_request(method="DELETE", path=target)


def _entries_pager(page_iter, limit):
if limit is not None:
# drop negative values
limit = max(limit, 0)
i = 0
for page in page_iter:
if limit is not None and i >= limit:
break
yield page
i += 1


def _item_to_entry(iterator, resource, loggers):
"""Convert a log entry resource to the native object.

Expand Down
Loading