Skip to content

Commit

Permalink
chore: unit test cleanup
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 523424642
  • Loading branch information
vertex-sdk-bot authored and Copybara-Service committed Apr 11, 2023
1 parent e55a177 commit cfb0bbd
Show file tree
Hide file tree
Showing 9 changed files with 472 additions and 184 deletions.
196 changes: 195 additions & 1 deletion tests/unit/aiplatform/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

from unittest import mock

from google.cloud import aiplatform
from google.cloud.aiplatform.utils import source_utils
import constants as test_constants
from google.cloud.aiplatform.compat.services import (
Expand All @@ -34,16 +35,24 @@
from google.cloud.aiplatform.compat.types import (
context,
endpoint,
endpoint_service,
model,
model_service,
pipeline_job,
pipeline_state,
tensorboard,
tensorboard_service,
dataset,
prediction_service,
training_pipeline,
)


from google.cloud.aiplatform.compat.services import endpoint_service_client
from google.cloud.aiplatform.compat.services import (
dataset_service_client,
endpoint_service_client,
prediction_service_client,
)


# Module-scoped fixtures
Expand Down Expand Up @@ -106,6 +115,25 @@ def get_model_with_version_mock():
yield get_model_mock


@pytest.fixture
def deploy_model_mock():
with mock.patch.object(
endpoint_service_client.EndpointServiceClient, "deploy_model"
) as deploy_model_mock:
deployed_model = endpoint.DeployedModel(
model=test_constants.ModelConstants._TEST_MODEL_RESOURCE_NAME,
display_name=test_constants.ModelConstants._TEST_MODEL_NAME,
)
deploy_model_lro_mock = mock.Mock(operation.Operation)
deploy_model_lro_mock.result.return_value = (
endpoint_service.DeployModelResponse(
deployed_model=deployed_model,
)
)
deploy_model_mock.return_value = deploy_model_lro_mock
yield deploy_model_mock


# Tensorboard fixtures
@pytest.fixture
def get_tensorboard_mock():
Expand Down Expand Up @@ -219,6 +247,19 @@ def create_endpoint_mock():
yield create_endpoint_mock


@pytest.fixture
def get_endpoint_mock():
with mock.patch.object(
endpoint_service_client.EndpointServiceClient, "get_endpoint"
) as get_endpoint_mock:
get_endpoint_mock.return_value = endpoint.Endpoint(
display_name=test_constants.EndpointConstants._TEST_DISPLAY_NAME,
name=test_constants.EndpointConstants._TEST_ENDPOINT_NAME,
encryption_spec=test_constants.ProjectConstants._TEST_ENCRYPTION_SPEC,
)
yield get_endpoint_mock


@pytest.fixture
def get_endpoint_with_models_mock():
with mock.patch.object(
Expand All @@ -233,6 +274,22 @@ def get_endpoint_with_models_mock():
yield get_endpoint_mock


@pytest.fixture
def predict_client_predict_mock():
with mock.patch.object(
prediction_service_client.PredictionServiceClient, "predict"
) as predict_mock:
predict_mock.return_value = prediction_service.PredictResponse(
deployed_model_id=test_constants.EndpointConstants._TEST_MODEL_ID,
model_version_id=test_constants.EndpointConstants._TEST_VERSION_ID,
model=test_constants.EndpointConstants._TEST_MODEL_NAME,
)
predict_mock.return_value.predictions.extend(
test_constants.EndpointConstants._TEST_PREDICTION
)
yield predict_mock


# PipelineJob fixtures
def make_pipeline_job(state):
return pipeline_job.PipelineJob(
Expand Down Expand Up @@ -267,3 +324,140 @@ def get_pipeline_job_mock():
]

yield mock_get_pipeline_job


# Dataset mocks
@pytest.fixture
def create_dataset_mock():
with mock.patch.object(
dataset_service_client.DatasetServiceClient, "create_dataset"
) as create_dataset_mock:
create_dataset_lro_mock = mock.Mock(operation.Operation)
create_dataset_lro_mock.result.return_value = dataset.Dataset(
name=test_constants.DatasetConstants._TEST_NAME,
display_name=test_constants.DatasetConstants._TEST_DISPLAY_NAME,
metadata_schema_uri=test_constants.DatasetConstants._TEST_METADATA_SCHEMA_URI_TEXT,
encryption_spec=test_constants.DatasetConstants._TEST_ENCRYPTION_SPEC,
)
create_dataset_mock.return_value = create_dataset_lro_mock
yield create_dataset_mock


@pytest.fixture
def get_dataset_mock():
with mock.patch.object(
dataset_service_client.DatasetServiceClient, "get_dataset"
) as get_dataset_mock:
get_dataset_mock.return_value = dataset.Dataset(
display_name=test_constants.DatasetConstants._TEST_DISPLAY_NAME,
metadata_schema_uri=test_constants.DatasetConstants._TEST_METADATA_SCHEMA_URI_NONTABULAR,
name=test_constants.DatasetConstants._TEST_NAME,
metadata=test_constants.DatasetConstants._TEST_NONTABULAR_DATASET_METADATA,
encryption_spec=test_constants.DatasetConstants._TEST_ENCRYPTION_SPEC,
)
yield get_dataset_mock


@pytest.fixture
def import_data_mock():
with mock.patch.object(
dataset_service_client.DatasetServiceClient, "import_data"
) as import_data_mock:
import_data_mock.return_value = mock.Mock(operation.Operation)
yield import_data_mock


# TrainingJob mocks
@pytest.fixture
def mock_model_service_get():
with mock.patch.object(
model_service_client.ModelServiceClient, "get_model"
) as mock_get_model:
mock_get_model.return_value = model.Model(
name=test_constants.TrainingJobConstants._TEST_MODEL_NAME
)
mock_get_model.return_value.supported_deployment_resources_types.append(
aiplatform.gapic.Model.DeploymentResourcesType.DEDICATED_RESOURCES
)
mock_get_model.return_value.version_id = "1"
yield mock_get_model


@pytest.fixture
def mock_pipeline_service_create():
with mock.patch.object(
pipeline_service_client.PipelineServiceClient, "create_training_pipeline"
) as mock_create_training_pipeline:
mock_create_training_pipeline.return_value = training_pipeline.TrainingPipeline(
name=test_constants.TrainingJobConstants._TEST_PIPELINE_RESOURCE_NAME,
state=pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED,
model_to_upload=model.Model(
name=test_constants.TrainingJobConstants._TEST_MODEL_NAME
),
)
yield mock_create_training_pipeline


def make_training_pipeline(state, add_training_task_metadata=True):
return training_pipeline.TrainingPipeline(
name=test_constants.TrainingJobConstants._TEST_PIPELINE_RESOURCE_NAME,
state=state,
model_to_upload=model.Model(
name=test_constants.TrainingJobConstants._TEST_MODEL_NAME
),
training_task_inputs={
"tensorboard": test_constants.TrainingJobConstants._TEST_TENSORBOARD_RESOURCE_NAME
},
training_task_metadata={
"backingCustomJob": test_constants.TrainingJobConstants._TEST_CUSTOM_JOB_RESOURCE_NAME
}
if add_training_task_metadata
else None,
)


@pytest.fixture
def mock_pipeline_service_get(make_call=make_training_pipeline):
with mock.patch.object(
pipeline_service_client.PipelineServiceClient, "get_training_pipeline"
) as mock_get_training_pipeline:
mock_get_training_pipeline.side_effect = [
make_call(
pipeline_state.PipelineState.PIPELINE_STATE_RUNNING,
add_training_task_metadata=False,
),
make_call(
pipeline_state.PipelineState.PIPELINE_STATE_RUNNING,
),
make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED),
make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED),
make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED),
make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED),
make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED),
make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED),
make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED),
make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED),
]

yield mock_get_training_pipeline


@pytest.fixture
def mock_pipeline_service_create_and_get_with_fail():
with mock.patch.object(
pipeline_service_client.PipelineServiceClient, "create_training_pipeline"
) as mock_create_training_pipeline:
mock_create_training_pipeline.return_value = training_pipeline.TrainingPipeline(
name=test_constants.TrainingJobConstants._TEST_PIPELINE_RESOURCE_NAME,
state=pipeline_state.PipelineState.PIPELINE_STATE_RUNNING,
)

with mock.patch.object(
pipeline_service_client.PipelineServiceClient, "get_training_pipeline"
) as mock_get_training_pipeline:
mock_get_training_pipeline.return_value = training_pipeline.TrainingPipeline(
name=test_constants.TrainingJobConstants._TEST_PIPELINE_RESOURCE_NAME,
state=pipeline_state.PipelineState.PIPELINE_STATE_FAILED,
)

yield mock_create_training_pipeline, mock_get_training_pipeline
45 changes: 44 additions & 1 deletion tests/unit/aiplatform/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,14 @@

import dataclasses
from datetime import datetime

from unittest import mock
from google.auth import credentials as auth_credentials
from google.protobuf import timestamp_pb2, duration_pb2

from google.cloud.aiplatform.utils import source_utils
from google.cloud.aiplatform import explain
from google.cloud.aiplatform import utils
from google.cloud.aiplatform import schema

from google.cloud.aiplatform.compat.services import (
model_service_client,
Expand Down Expand Up @@ -130,6 +132,20 @@ class TrainingJobConstants:
labels=ProjectConstants._TEST_LABELS,
encryption_spec=ProjectConstants._TEST_ENCRYPTION_SPEC,
)
_TEST_PIPELINE_RESOURCE_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/us-central1/trainingPipelines/{_TEST_ID}"
_TEST_BUCKET_NAME = "test-bucket"
_TEST_TENSORBOARD_RESOURCE_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/{ProjectConstants._TEST_LOCATION}/tensorboards/{_TEST_ID}"
_TEST_MODEL_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/us-central1/models/{_TEST_ID}"
_TEST_CUSTOM_JOB_RESOURCE_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/{ProjectConstants._TEST_LOCATION}/customJobs/{_TEST_ID}"
_TEST_CREDENTIALS = mock.Mock(spec=auth_credentials.AnonymousCredentials())
_TEST_SERVING_CONTAINER_PREDICTION_ROUTE = "predict"
_TEST_SERVING_CONTAINER_HEALTH_ROUTE = "metadata"
_TEST_MODEL_DISPLAY_NAME = "model-display-name"
_TEST_TRAINING_FRACTION_SPLIT = 0.6
_TEST_VALIDATION_FRACTION_SPLIT = 0.2
_TEST_TEST_FRACTION_SPLIT = 0.2
_TEST_BOOT_DISK_TYPE_DEFAULT = "pd-ssd"
_TEST_BOOT_DISK_SIZE_GB_DEFAULT = 100


@dataclasses.dataclass(frozen=True)
Expand Down Expand Up @@ -180,6 +196,10 @@ class EndpointConstants:
endpoint.DeployedModel(id=_TEST_ID_3, display_name=_TEST_DISPLAY_NAME_3),
]
_TEST_TRAFFIC_SPLIT = {_TEST_ID: 0, _TEST_ID_2: 100, _TEST_ID_3: 0}
_TEST_MODEL_ID = "1028944691210842416"
_TEST_PREDICTION = [[1.0, 2.0, 3.0], [3.0, 3.0, 1.0]]
_TEST_VERSION_ID = "1"
_TEST_MODEL_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/{ProjectConstants._TEST_LOCATION}/models/{_TEST_ID}"


@dataclasses.dataclass(frozen=True)
Expand Down Expand Up @@ -237,3 +257,26 @@ class PipelineJobConstants:
_TEST_PIPELINE_JOB_ID = "sample-test-pipeline-202111111"
_TEST_PIPELINE_JOB_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/{ProjectConstants._TEST_LOCATION}/pipelineJobs/{_TEST_PIPELINE_JOB_ID}"
_TEST_PIPELINE_CREATE_TIME = datetime.now()


@dataclasses.dataclass(frozen=True)
class DatasetConstants:
"""Defines constants used by tests that create Dataset resources."""

_TEST_ID = "1028944691210842416"
_TEST_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/{ProjectConstants._TEST_LOCATION}/datasets/{_TEST_ID}"
_TEST_DISPLAY_NAME = "my_dataset_1234"
_TEST_ENCRYPTION_KEY_NAME = "key_1234"
_TEST_METADATA_SCHEMA_URI_TEXT = schema.dataset.metadata.text
_TEST_ENCRYPTION_SPEC = encryption_spec.EncryptionSpec(
kms_key_name=_TEST_ENCRYPTION_KEY_NAME
)
_TEST_METADATA_SCHEMA_URI_NONTABULAR = schema.dataset.metadata.image
_TEST_NONTABULAR_DATASET_METADATA = None
_TEST_IMPORT_SCHEMA_URI = schema.dataset.ioformat.image.single_label_classification
_TEST_IMPORT_SCHEMA_URI_IMAGE = (
schema.dataset.ioformat.image.single_label_classification
)
_TEST_DATA_LABEL_ITEMS = None
_TEST_REQUEST_METADATA = ()
_TEST_SOURCE_URI_GCS = "gs://my-bucket/my_index_file.jsonl"
12 changes: 5 additions & 7 deletions tests/unit/aiplatform/test_autologging.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,13 @@
tensorboard_time_series as gca_tensorboard_time_series,
)
from google.cloud.aiplatform.metadata import constants
import constants as test_constants

from google.cloud.aiplatform.compat.services import (
tensorboard_service_client,
)

from google.cloud.aiplatform.compat.types import (
encryption_spec as gca_encryption_spec,
tensorboard as gca_tensorboard,
)

Expand All @@ -66,9 +66,9 @@

import numpy as np

_TEST_PROJECT = "test-project"
_TEST_PROJECT = test_constants.ProjectConstants._TEST_PROJECT
_TEST_OTHER_PROJECT = "test-project-1"
_TEST_LOCATION = "us-central1"
_TEST_LOCATION = test_constants.ProjectConstants._TEST_LOCATION
_TEST_PARENT = (
f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/metadataStores/default"
)
Expand Down Expand Up @@ -167,10 +167,8 @@
f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/tensorboards/{_TEST_TB_ID}"
)
_TEST_TB_DISPLAY_NAME = "my_tensorboard_1234"
_TEST_ENCRYPTION_KEY_NAME = "key_1234"
_TEST_ENCRYPTION_SPEC = gca_encryption_spec.EncryptionSpec(
kms_key_name=_TEST_ENCRYPTION_KEY_NAME
)
_TEST_ENCRYPTION_KEY_NAME = test_constants.ProjectConstants._TEST_ENCRYPTION_KEY_NAME
_TEST_ENCRYPTION_SPEC = test_constants.ProjectConstants._TEST_ENCRYPTION_SPEC
_TEST_TB_NAME = (
f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/tensorboards/{_TEST_TB_ID}"
)
Expand Down
Loading

0 comments on commit cfb0bbd

Please sign in to comment.