Skip to content
This repository has been archived by the owner on Sep 5, 2023. It is now read-only.

Commit

Permalink
docs: Add documentation for enums (#152)
Browse files Browse the repository at this point in the history
* docs: Add documentation for enums

fix: Add context manager return types

chore: Update gapic-generator-python to v1.8.1
PiperOrigin-RevId: 503210727

Source-Link: googleapis/googleapis@a391fd1

Source-Link: googleapis/googleapis-gen@0080f83
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDA4MGY4MzBkZWMzN2MzMzg0MTU3MDgyYmNlMjc5ZTM3MDc5ZWE1OCJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] committed Jan 20, 2023
1 parent 0ec2f1e commit a12a7e7
Show file tree
Hide file tree
Showing 7 changed files with 223 additions and 19 deletions.
Binary file added datastream-v1alpha1-py.tar.gz
Binary file not shown.
2 changes: 1 addition & 1 deletion google/cloud/datastream_v1/services/datastream/client.py
Expand Up @@ -3529,7 +3529,7 @@ def sample_delete_route():
# Done; return the response.
return response

def __enter__(self):
def __enter__(self) -> "DatastreamClient":
return self

def __exit__(self, type, value, traceback):
Expand Down
136 changes: 128 additions & 8 deletions google/cloud/datastream_v1/types/datastream_resources.py
Expand Up @@ -328,7 +328,25 @@ class PrivateConnection(proto.Message):
"""

class State(proto.Enum):
r"""Private Connection state."""
r"""Private Connection state.
Values:
STATE_UNSPECIFIED (0):
Unspecified state.
CREATING (1):
The private connection is in creation state -
creating resources.
CREATED (2):
The private connection has been created with
all of its resources.
FAILED (3):
The private connection creation has failed.
DELETING (4):
The private connection is being deleted.
FAILED_TO_DELETE (5):
Delete request has failed, resource is in
invalid state.
"""
STATE_UNSPECIFIED = 0
CREATING = 1
CREATED = 2
Expand Down Expand Up @@ -1187,13 +1205,31 @@ class JsonFileFormat(proto.Message):
"""

class SchemaFileFormat(proto.Enum):
r"""Schema file format."""
r"""Schema file format.
Values:
SCHEMA_FILE_FORMAT_UNSPECIFIED (0):
Unspecified schema file format.
NO_SCHEMA_FILE (1):
Do not attach schema file.
AVRO_SCHEMA_FILE (2):
Avro schema format.
"""
SCHEMA_FILE_FORMAT_UNSPECIFIED = 0
NO_SCHEMA_FILE = 1
AVRO_SCHEMA_FILE = 2

class JsonCompression(proto.Enum):
r"""Json file compression."""
r"""Json file compression.
Values:
JSON_COMPRESSION_UNSPECIFIED (0):
Unspecified json file compression.
NO_COMPRESSION (1):
Do not compress JSON file.
GZIP (2):
Gzip compression.
"""
JSON_COMPRESSION_UNSPECIFIED = 0
NO_COMPRESSION = 1
GZIP = 2
Expand Down Expand Up @@ -1478,7 +1514,34 @@ class Stream(proto.Message):
"""

class State(proto.Enum):
r"""Stream state."""
r"""Stream state.
Values:
STATE_UNSPECIFIED (0):
Unspecified stream state.
NOT_STARTED (1):
The stream has been created but has not yet
started streaming data.
RUNNING (2):
The stream is running.
PAUSED (3):
The stream is paused.
MAINTENANCE (4):
The stream is in maintenance mode.
Updates are rejected on the resource in this
state.
FAILED (5):
The stream is experiencing an error that is
preventing data from being streamed.
FAILED_PERMANENTLY (6):
The stream has experienced a terminal
failure.
STARTING (7):
The stream is starting, but not yet running.
DRAINING (8):
The Stream is no longer reading new events,
but still writing events in the buffer.
"""
STATE_UNSPECIFIED = 0
NOT_STARTED = 1
RUNNING = 2
Expand Down Expand Up @@ -1782,7 +1845,32 @@ class BackfillJob(proto.Message):
"""

class State(proto.Enum):
r"""State of the stream object's backfill job."""
r"""State of the stream object's backfill job.
Values:
STATE_UNSPECIFIED (0):
Default value.
NOT_STARTED (1):
Backfill job was never started for the stream
object (stream has backfill strategy defined as
manual or object was explicitly excluded from
automatic backfill).
PENDING (2):
Backfill job will start pending available
resources.
ACTIVE (3):
Backfill job is running.
STOPPED (4):
Backfill job stopped (next job run will start
from beginning).
FAILED (5):
Backfill job failed (due to an error).
COMPLETED (6):
Backfill completed successfully.
UNSUPPORTED (7):
Backfill job failed since the table structure
is currently unsupported for backfill.
"""
STATE_UNSPECIFIED = 0
NOT_STARTED = 1
PENDING = 2
Expand All @@ -1793,7 +1881,19 @@ class State(proto.Enum):
UNSUPPORTED = 7

class Trigger(proto.Enum):
r"""Triggering reason for a backfill job."""
r"""Triggering reason for a backfill job.
Values:
TRIGGER_UNSPECIFIED (0):
Default value.
AUTOMATIC (1):
Object backfill job was triggered
automatically according to the stream's backfill
strategy.
MANUAL (2):
Object backfill job was triggered manually
using the dedicated API.
"""
TRIGGER_UNSPECIFIED = 0
AUTOMATIC = 1
MANUAL = 2
Expand Down Expand Up @@ -1900,7 +2000,18 @@ class Validation(proto.Message):
"""

class State(proto.Enum):
r"""Validation execution state."""
r"""Validation execution state.
Values:
STATE_UNSPECIFIED (0):
Unspecified state.
NOT_EXECUTED (1):
Validation did not execute.
FAILED (2):
Validation failed.
PASSED (3):
Validation passed.
"""
STATE_UNSPECIFIED = 0
NOT_EXECUTED = 1
FAILED = 2
Expand Down Expand Up @@ -1942,7 +2053,16 @@ class ValidationMessage(proto.Message):
"""

class Level(proto.Enum):
r"""Validation message level."""
r"""Validation message level.
Values:
LEVEL_UNSPECIFIED (0):
Unspecified level.
WARNING (1):
Potentially cause issues with the Stream.
ERROR (2):
Definitely cause issues with the Stream.
"""
LEVEL_UNSPECIFIED = 0
WARNING = 1
ERROR = 2
Expand Down
Expand Up @@ -3040,7 +3040,7 @@ def sample_delete_route():
# Done; return the response.
return response

def __enter__(self):
def __enter__(self) -> "DatastreamClient":
return self

def __exit__(self, type, value, traceback):
Expand Down
98 changes: 91 additions & 7 deletions google/cloud/datastream_v1alpha1/types/datastream_resources.py
Expand Up @@ -61,14 +61,30 @@


class GcsFileFormat(proto.Enum):
r"""File format in Cloud Storage."""
r"""File format in Cloud Storage.
Values:
GCS_FILE_FORMAT_UNSPECIFIED (0):
Unspecified Cloud Storage file format.
AVRO (1):
Avro file format
"""
_pb_options = {"deprecated": True}
GCS_FILE_FORMAT_UNSPECIFIED = 0
AVRO = 1


class SchemaFileFormat(proto.Enum):
r"""Schema file format."""
r"""Schema file format.
Values:
SCHEMA_FILE_FORMAT_UNSPECIFIED (0):
Unspecified schema file format.
NO_SCHEMA_FILE (1):
Do not attach schema file.
AVRO_SCHEMA_FILE (2):
Avro schema format.
"""
SCHEMA_FILE_FORMAT_UNSPECIFIED = 0
NO_SCHEMA_FILE = 1
AVRO_SCHEMA_FILE = 2
Expand Down Expand Up @@ -291,7 +307,20 @@ class PrivateConnection(proto.Message):
"""

class State(proto.Enum):
r"""Private Connection state."""
r"""Private Connection state.
Values:
STATE_UNSPECIFIED (0):
CREATING (1):
The private connection is in creation state -
creating resources.
CREATED (2):
The private connection has been created with
all of it's resources.
FAILED (3):
The private connection creation has failed.
"""
STATE_UNSPECIFIED = 0
CREATING = 1
CREATED = 2
Expand Down Expand Up @@ -919,7 +948,16 @@ class JsonFileFormat(proto.Message):
"""

class JsonCompression(proto.Enum):
r"""Json file compression."""
r"""Json file compression.
Values:
JSON_COMPRESSION_UNSPECIFIED (0):
Unspecified json file compression.
NO_COMPRESSION (1):
Do not compress JSON file.
GZIP (2):
Gzip compression.
"""
JSON_COMPRESSION_UNSPECIFIED = 0
NO_COMPRESSION = 1
GZIP = 2
Expand Down Expand Up @@ -1073,7 +1111,33 @@ class Stream(proto.Message):
"""

class State(proto.Enum):
r"""Stream state."""
r"""Stream state.
Values:
STATE_UNSPECIFIED (0):
Unspecified stream state.
CREATED (1):
The stream has been created.
RUNNING (2):
The stream is running.
PAUSED (3):
The stream is paused.
MAINTENANCE (4):
The stream is in maintenance mode.
Updates are rejected on the resource in this
state.
FAILED (5):
The stream is experiencing an error that is
preventing data from being streamed.
FAILED_PERMANENTLY (6):
The stream has experienced a terminal
failure.
STARTING (7):
The stream is starting, but not yet running.
DRAINING (8):
The Stream is no longer reading new events,
but still writing events in the buffer.
"""
STATE_UNSPECIFIED = 0
CREATED = 1
RUNNING = 2
Expand Down Expand Up @@ -1259,7 +1323,18 @@ class Validation(proto.Message):
"""

class Status(proto.Enum):
r"""Validation execution status."""
r"""Validation execution status.
Values:
STATUS_UNSPECIFIED (0):
Unspecified status.
NOT_EXECUTED (1):
Validation did not execute.
FAILED (2):
Validation failed.
PASSED (3):
Validation passed.
"""
STATUS_UNSPECIFIED = 0
NOT_EXECUTED = 1
FAILED = 2
Expand Down Expand Up @@ -1301,7 +1376,16 @@ class ValidationMessage(proto.Message):
"""

class Level(proto.Enum):
r"""Validation message level."""
r"""Validation message level.
Values:
LEVEL_UNSPECIFIED (0):
Unspecified level.
WARNING (1):
Potentially cause issues with the Stream.
ERROR (2):
Definitely cause issues with the Stream.
"""
LEVEL_UNSPECIFIED = 0
WARNING = 1
ERROR = 2
Expand Down
Expand Up @@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-datastream",
"version": "1.4.0"
"version": "0.1.0"
},
"snippets": [
{
Expand Down
Expand Up @@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-datastream",
"version": "1.4.0"
"version": "0.1.0"
},
"snippets": [
{
Expand Down

0 comments on commit a12a7e7

Please sign in to comment.