Skip to content

Commit

Permalink
Merge branch 'main' into feat--Made-display_name-optional-for-most-calls
Browse files Browse the repository at this point in the history
  • Loading branch information
sasha-gitg committed Mar 18, 2022
2 parents 57b1241 + 79aeec1 commit 45d8e14
Show file tree
Hide file tree
Showing 82 changed files with 5,372 additions and 148 deletions.
4 changes: 2 additions & 2 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
# For syntax help see:
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax

# yoshi-python is the default owner
* @googleapis/yoshi-python
# @googleapis/cdpe-cloudai and yoshi-python are the default owners
* @googleapis/cdpe-cloudai @googleapis/yoshi-python

# The AI Platform GAPIC libraries are owned by Cloud AI DPE
/google/cloud/aiplatform_*/** @googleapis/cdpe-cloudai
Expand Down
4 changes: 2 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ Initialize the SDK to store common configurations that you use with the SDK.
staging_bucket='gs://my_staging_bucket',
# custom google.auth.credentials.Credentials
# environment default creds used if not set
# environment default credentials used if not set
credentials=my_credentials,
# customer managed encryption key resource name
Expand Down Expand Up @@ -188,7 +188,7 @@ Please visit `Using a managed dataset in a custom training application`_ for a d

.. _Using a managed dataset in a custom training application: https://cloud.google.com/vertex-ai/docs/training/using-managed-datasets

It must write the model artifact to the environment variable populated by the traing service:
It must write the model artifact to the environment variable populated by the training service:

.. code-block:: Python
Expand Down
6 changes: 6 additions & 0 deletions google/cloud/aiplatform/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,10 @@
Feature,
Featurestore,
)
from google.cloud.aiplatform.matching_engine import (
MatchingEngineIndex,
MatchingEngineIndexEndpoint,
)
from google.cloud.aiplatform.metadata import metadata
from google.cloud.aiplatform.models import Endpoint
from google.cloud.aiplatform.models import Model
Expand Down Expand Up @@ -104,6 +108,8 @@
"EntityType",
"Feature",
"Featurestore",
"MatchingEngineIndex",
"MatchingEngineIndexEndpoint",
"ImageDataset",
"HyperparameterTuningJob",
"Model",
Expand Down
76 changes: 70 additions & 6 deletions google/cloud/aiplatform/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def log_create_complete(
cls (VertexAiResourceNoun):
Vertex AI Resource Noun class that is being created.
resource (proto.Message):
Vertex AI Resourc proto.Message
Vertex AI Resource proto.Message
variable_name (str): Name of variable to use for code snippet
"""
self._logger.info(f"{cls.__name__} created. Resource name: {resource.name}")
Expand All @@ -121,7 +121,7 @@ def log_create_complete_with_getter(
cls (VertexAiResourceNoun):
Vertex AI Resource Noun class that is being created.
resource (proto.Message):
Vertex AI Resourc proto.Message
Vertex AI Resource proto.Message
variable_name (str): Name of variable to use for code snippet
"""
self._logger.info(f"{cls.__name__} created. Resource name: {resource.name}")
Expand Down Expand Up @@ -462,7 +462,7 @@ def __init__(
Args:
project(str): Project of the resource noun.
location(str): The location of the resource noun.
credentials(google.auth.crendentials.Crendentials): Optional custom
credentials(google.auth.credentials.Credentials): Optional custom
credentials to use when accessing interacting with resource noun.
resource_name(str): A fully-qualified resource name or ID.
"""
Expand Down Expand Up @@ -655,6 +655,15 @@ def gca_resource(self) -> proto.Message:
self._assert_gca_resource_is_available()
return self._gca_resource

@property
def _resource_is_available(self) -> bool:
"""Returns True if GCA resource has been created and is available, otherwise False"""
try:
self._assert_gca_resource_is_available()
return True
except RuntimeError:
return False

def _assert_gca_resource_is_available(self) -> None:
"""Helper method to raise when property is not accessible.
Expand Down Expand Up @@ -840,7 +849,7 @@ def __init__(
Args:
project (str): Optional. Project of the resource noun.
location (str): Optional. The location of the resource noun.
credentials(google.auth.crendentials.Crendentials):
credentials(google.auth.credentials.Credentials):
Optional. custom credentials to use when accessing interacting with
resource noun.
resource_name(str): A fully-qualified resource name or ID.
Expand Down Expand Up @@ -870,7 +879,7 @@ def _empty_constructor(
Args:
project (str): Optional. Project of the resource noun.
location (str): Optional. The location of the resource noun.
credentials(google.auth.crendentials.Crendentials):
credentials(google.auth.credentials.Credentials):
Optional. custom credentials to use when accessing interacting with
resource noun.
resource_name(str): A fully-qualified resource name or ID.
Expand Down Expand Up @@ -1160,7 +1169,7 @@ def delete(self, sync: bool = True) -> None:
_LOGGER.log_action_completed_against_resource("deleted.", "", self)

def __repr__(self) -> str:
if self._gca_resource:
if self._gca_resource and self._resource_is_available:
return VertexAiResourceNoun.__repr__(self)

return FutureManager.__repr__(self)
Expand Down Expand Up @@ -1227,3 +1236,58 @@ def get_annotation_class(annotation: type) -> type:
return annotation.__args__[0]
else:
return annotation


class DoneMixin(abc.ABC):
"""An abstract class for implementing a done method, indicating
whether a job has completed.
"""

@abc.abstractmethod
def done(self) -> bool:
"""Method indicating whether a job has completed."""
pass


class StatefulResource(DoneMixin):
"""Extends DoneMixin to check whether a job returning a stateful resource has compted."""

@property
@abc.abstractmethod
def state(self):
"""The current state of the job."""
pass

@property
@classmethod
@abc.abstractmethod
def _valid_done_states(cls):
"""A set() containing all job states associated with a completed job."""
pass

def done(self) -> bool:
"""Method indicating whether a job has completed.
Returns:
True if the job has completed.
"""
if self.state in self._valid_done_states:
return True
else:
return False


class VertexAiStatefulResource(VertexAiResourceNounWithFutureManager, StatefulResource):
"""Extends StatefulResource to include a check for self._gca_resource."""

def done(self) -> bool:
"""Method indicating whether a job has completed.
Returns:
True if the job has completed.
"""
if self._gca_resource and self._gca_resource.name:
return super().done()
else:
return False
10 changes: 10 additions & 0 deletions google/cloud/aiplatform/compat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,11 @@
types.job_state = types.job_state_v1beta1
types.machine_resources = types.machine_resources_v1beta1
types.manual_batch_tuning_parameters = types.manual_batch_tuning_parameters_v1beta1
types.matching_engine_deployed_index_ref = (
types.matching_engine_deployed_index_ref_v1beta1
)
types.matching_engine_index = types.matching_engine_index_v1beta1
types.matching_engine_index_endpoint = types.matching_engine_index_endpoint_v1beta1
types.metadata_service = types.metadata_service_v1beta1
types.metadata_store = types.metadata_store_v1beta1
types.model = types.model_v1beta1
Expand Down Expand Up @@ -147,6 +152,11 @@
types.job_state = types.job_state_v1
types.machine_resources = types.machine_resources_v1
types.manual_batch_tuning_parameters = types.manual_batch_tuning_parameters_v1
types.matching_engine_deployed_index_ref = (
types.matching_engine_deployed_index_ref_v1
)
types.matching_engine_index = types.matching_engine_index_v1
types.matching_engine_index_endpoint = types.matching_engine_index_endpoint_v1
types.metadata_service = types.metadata_service_v1
types.metadata_store = types.metadata_store_v1
types.model = types.model_v1
Expand Down
16 changes: 16 additions & 0 deletions google/cloud/aiplatform/compat/services/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,12 @@
from google.cloud.aiplatform_v1beta1.services.featurestore_service import (
client as featurestore_service_client_v1beta1,
)
from google.cloud.aiplatform_v1beta1.services.index_service import (
client as index_service_client_v1beta1,
)
from google.cloud.aiplatform_v1beta1.services.index_endpoint_service import (
client as index_endpoint_service_client_v1beta1,
)
from google.cloud.aiplatform_v1beta1.services.job_service import (
client as job_service_client_v1beta1,
)
Expand Down Expand Up @@ -61,6 +67,12 @@
from google.cloud.aiplatform_v1.services.featurestore_service import (
client as featurestore_service_client_v1,
)
from google.cloud.aiplatform_v1.services.index_service import (
client as index_service_client_v1,
)
from google.cloud.aiplatform_v1.services.index_endpoint_service import (
client as index_endpoint_service_client_v1,
)
from google.cloud.aiplatform_v1.services.job_service import (
client as job_service_client_v1,
)
Expand Down Expand Up @@ -89,6 +101,8 @@
endpoint_service_client_v1,
featurestore_online_serving_service_client_v1,
featurestore_service_client_v1,
index_service_client_v1,
index_endpoint_service_client_v1,
job_service_client_v1,
metadata_service_client_v1,
model_service_client_v1,
Expand All @@ -101,6 +115,8 @@
endpoint_service_client_v1beta1,
featurestore_online_serving_service_client_v1beta1,
featurestore_service_client_v1beta1,
index_service_client_v1beta1,
index_endpoint_service_client_v1beta1,
job_service_client_v1beta1,
model_service_client_v1beta1,
pipeline_service_client_v1beta1,
Expand Down
12 changes: 12 additions & 0 deletions google/cloud/aiplatform/compat/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
data_labeling_job as data_labeling_job_v1beta1,
dataset as dataset_v1beta1,
dataset_service as dataset_service_v1beta1,
deployed_index_ref as matching_engine_deployed_index_ref_v1beta1,
deployed_model_ref as deployed_model_ref_v1beta1,
encryption_spec as encryption_spec_v1beta1,
endpoint as endpoint_v1beta1,
Expand All @@ -45,6 +46,8 @@
featurestore_monitoring as featurestore_monitoring_v1beta1,
featurestore_online_service as featurestore_online_service_v1beta1,
featurestore_service as featurestore_service_v1beta1,
index as matching_engine_index_v1beta1,
index_endpoint as matching_engine_index_endpoint_v1beta1,
hyperparameter_tuning_job as hyperparameter_tuning_job_v1beta1,
io as io_v1beta1,
job_service as job_service_v1beta1,
Expand Down Expand Up @@ -86,6 +89,7 @@
data_labeling_job as data_labeling_job_v1,
dataset as dataset_v1,
dataset_service as dataset_service_v1,
deployed_index_ref as matching_engine_deployed_index_ref_v1,
deployed_model_ref as deployed_model_ref_v1,
encryption_spec as encryption_spec_v1,
endpoint as endpoint_v1,
Expand All @@ -103,6 +107,8 @@
featurestore_online_service as featurestore_online_service_v1,
featurestore_service as featurestore_service_v1,
hyperparameter_tuning_job as hyperparameter_tuning_job_v1,
index as matching_engine_index_v1,
index_endpoint as matching_engine_index_endpoint_v1,
io as io_v1,
job_service as job_service_v1,
job_state as job_state_v1,
Expand Down Expand Up @@ -167,6 +173,9 @@
job_state_v1,
machine_resources_v1,
manual_batch_tuning_parameters_v1,
matching_engine_deployed_index_ref_v1,
matching_engine_index_v1,
matching_engine_index_endpoint_v1,
metadata_service_v1,
metadata_store_v1,
model_v1,
Expand Down Expand Up @@ -223,6 +232,9 @@
job_state_v1beta1,
machine_resources_v1beta1,
manual_batch_tuning_parameters_v1beta1,
matching_engine_deployed_index_ref_v1beta1,
matching_engine_index_v1beta1,
matching_engine_index_endpoint_v1beta1,
metadata_service_v1beta1,
metadata_store_v1beta1,
model_v1beta1,
Expand Down
2 changes: 2 additions & 0 deletions google/cloud/aiplatform/constants/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@
"us-east4",
"us-west1",
"us-west2",
"us-west4",
"southamerica-east1",
}

API_BASE_PATH = "aiplatform.googleapis.com"
Expand Down
4 changes: 2 additions & 2 deletions google/cloud/aiplatform/datasets/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def create(
be picked randomly. Two DataItems are considered identical
if their content bytes are identical (e.g. image bytes or
pdf bytes). These labels will be overridden by Annotation
labels specified inside index file refenced by
labels specified inside index file referenced by
``import_schema_uri``,
e.g. jsonl file.
project (str):
Expand Down Expand Up @@ -489,7 +489,7 @@ def import_data(
be picked randomly. Two DataItems are considered identical
if their content bytes are identical (e.g. image bytes or
pdf bytes). These labels will be overridden by Annotation
labels specified inside index file refenced by
labels specified inside index file referenced by
``import_schema_uri``,
e.g. jsonl file.
sync (bool):
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/datasets/image_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def create(
be picked randomly. Two DataItems are considered identical
if their content bytes are identical (e.g. image bytes or
pdf bytes). These labels will be overridden by Annotation
labels specified inside index file refenced by
labels specified inside index file referenced by
``import_schema_uri``,
e.g. jsonl file.
project (str):
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/datasets/text_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def create(
be picked randomly. Two DataItems are considered identical
if their content bytes are identical (e.g. image bytes or
pdf bytes). These labels will be overridden by Annotation
labels specified inside index file refenced by
labels specified inside index file referenced by
``import_schema_uri``,
e.g. jsonl file.
project (str):
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/aiplatform/datasets/video_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def create(
be picked randomly. Two DataItems are considered identical
if their content bytes are identical (e.g. image bytes or
pdf bytes). These labels will be overridden by Annotation
labels specified inside index file refenced by
labels specified inside index file referenced by
``import_schema_uri``,
e.g. jsonl file.
project (str):
Expand Down
6 changes: 3 additions & 3 deletions google/cloud/aiplatform/explain/lit.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,23 +29,23 @@
except ImportError:
raise ImportError(
"LIT is not installed and is required to get Dataset as the return format. "
'Please install the SDK using "pip install python-aiplatform[lit]"'
'Please install the SDK using "pip install google-cloud-aiplatform[lit]"'
)

try:
import tensorflow as tf
except ImportError:
raise ImportError(
"Tensorflow is not installed and is required to load saved model. "
'Please install the SDK using "pip install pip install python-aiplatform[lit]"'
'Please install the SDK using "pip install google-cloud-aiplatform[lit]"'
)

try:
import pandas as pd
except ImportError:
raise ImportError(
"Pandas is not installed and is required to read the dataset. "
'Please install Pandas using "pip install python-aiplatform[lit]"'
'Please install Pandas using "pip install google-cloud-aiplatform[lit]"'
)


Expand Down

0 comments on commit 45d8e14

Please sign in to comment.