Skip to content
This repository has been archived by the owner on Nov 29, 2023. It is now read-only.

Commit

Permalink
docs: Add documentation for enums (#428)
Browse files Browse the repository at this point in the history
* docs: Add documentation for enums

fix: Add context manager return types

chore: Update gapic-generator-python to v1.8.1
PiperOrigin-RevId: 503210727

Source-Link: googleapis/googleapis@a391fd1

Source-Link: googleapis/googleapis-gen@0080f83
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDA4MGY4MzBkZWMzN2MzMzg0MTU3MDgyYmNlMjc5ZTM3MDc5ZWE1OCJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] committed Jan 20, 2023
1 parent db78fdf commit a10a470
Show file tree
Hide file tree
Showing 4 changed files with 102 additions and 9 deletions.
Expand Up @@ -2131,7 +2131,7 @@ def sample_enroll_data_sources():
metadata=metadata,
)

def __enter__(self):
def __enter__(self) -> "DataTransferServiceClient":
return self

def __exit__(self, type, value, traceback):
Expand Down
63 changes: 59 additions & 4 deletions google/cloud/bigquery_datatransfer_v1/types/datatransfer.py
Expand Up @@ -102,7 +102,25 @@ class DataSourceParameter(proto.Message):
"""

class Type(proto.Enum):
r"""Parameter type."""
r"""Parameter type.
Values:
TYPE_UNSPECIFIED (0):
Type unspecified.
STRING (1):
String parameter.
INTEGER (2):
Integer parameter (64-bits).
Will be serialized to json as string.
DOUBLE (3):
Double precision floating point parameter.
BOOLEAN (4):
Boolean parameter.
RECORD (5):
Deprecated. This field has no effect.
PLUS_PAGE (6):
Page ID for a Google+ Page.
"""
TYPE_UNSPECIFIED = 0
STRING = 1
INTEGER = 2
Expand Down Expand Up @@ -246,14 +264,44 @@ class DataSource(proto.Message):
"""

class AuthorizationType(proto.Enum):
r"""The type of authorization needed for this data source."""
r"""The type of authorization needed for this data source.
Values:
AUTHORIZATION_TYPE_UNSPECIFIED (0):
Type unspecified.
AUTHORIZATION_CODE (1):
Use OAuth 2 authorization codes that can be
exchanged for a refresh token on the backend.
GOOGLE_PLUS_AUTHORIZATION_CODE (2):
Return an authorization code for a given
Google+ page that can then be exchanged for a
refresh token on the backend.
FIRST_PARTY_OAUTH (3):
Use First Party OAuth.
"""
AUTHORIZATION_TYPE_UNSPECIFIED = 0
AUTHORIZATION_CODE = 1
GOOGLE_PLUS_AUTHORIZATION_CODE = 2
FIRST_PARTY_OAUTH = 3

class DataRefreshType(proto.Enum):
r"""Represents how the data source supports data auto refresh."""
r"""Represents how the data source supports data auto refresh.
Values:
DATA_REFRESH_TYPE_UNSPECIFIED (0):
The data source won't support data auto
refresh, which is default value.
SLIDING_WINDOW (1):
The data source supports data auto refresh,
and runs will be scheduled for the past few
days. Does not allow custom values to be set for
each transfer config.
CUSTOM_SLIDING_WINDOW (2):
The data source supports data auto refresh,
and runs will be scheduled for the past few
days. Allows custom values to be set for each
transfer config.
"""
DATA_REFRESH_TYPE_UNSPECIFIED = 0
SLIDING_WINDOW = 1
CUSTOM_SLIDING_WINDOW = 2
Expand Down Expand Up @@ -775,7 +823,14 @@ class ListTransferRunsRequest(proto.Message):
"""

class RunAttempt(proto.Enum):
r"""Represents which runs should be pulled."""
r"""Represents which runs should be pulled.
Values:
RUN_ATTEMPT_UNSPECIFIED (0):
All runs should be returned.
LATEST (1):
Only latest run per day should be returned.
"""
RUN_ATTEMPT_UNSPECIFIED = 0
LATEST = 1

Expand Down
44 changes: 41 additions & 3 deletions google/cloud/bigquery_datatransfer_v1/types/transfer.py
Expand Up @@ -36,15 +36,42 @@


class TransferType(proto.Enum):
r"""DEPRECATED. Represents data transfer type."""
r"""DEPRECATED. Represents data transfer type.
Values:
TRANSFER_TYPE_UNSPECIFIED (0):
Invalid or Unknown transfer type placeholder.
BATCH (1):
Batch data transfer.
STREAMING (2):
Streaming data transfer. Streaming data
source currently doesn't support multiple
transfer configs per project.
"""
_pb_options = {"deprecated": True}
TRANSFER_TYPE_UNSPECIFIED = 0
BATCH = 1
STREAMING = 2


class TransferState(proto.Enum):
r"""Represents data transfer run state."""
r"""Represents data transfer run state.
Values:
TRANSFER_STATE_UNSPECIFIED (0):
State placeholder (0).
PENDING (2):
Data transfer is scheduled and is waiting to
be picked up by data transfer backend (2).
RUNNING (3):
Data transfer is in progress (3).
SUCCEEDED (4):
Data transfer completed successfully (4).
FAILED (5):
Data transfer failed (5).
CANCELLED (6):
Data transfer is cancelled (6).
"""
TRANSFER_STATE_UNSPECIFIED = 0
PENDING = 2
RUNNING = 3
Expand Down Expand Up @@ -466,7 +493,18 @@ class TransferMessage(proto.Message):
"""

class MessageSeverity(proto.Enum):
r"""Represents data transfer user facing message severity."""
r"""Represents data transfer user facing message severity.
Values:
MESSAGE_SEVERITY_UNSPECIFIED (0):
No severity specified.
INFO (1):
Informational message.
WARNING (2):
Warning message.
ERROR (3):
Error message.
"""
MESSAGE_SEVERITY_UNSPECIFIED = 0
INFO = 1
WARNING = 2
Expand Down
Expand Up @@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-bigquery-datatransfer",
"version": "3.10.0"
"version": "0.1.0"
},
"snippets": [
{
Expand Down

0 comments on commit a10a470

Please sign in to comment.