From cfb0bbdb958b8d5dcb3d8c98c20941571410381b Mon Sep 17 00:00:00 2001 From: A Vertex SDK engineer Date: Tue, 11 Apr 2023 09:34:23 -0700 Subject: [PATCH] chore: unit test cleanup PiperOrigin-RevId: 523424642 --- tests/unit/aiplatform/conftest.py | 196 ++++++++++- tests/unit/aiplatform/constants.py | 45 ++- tests/unit/aiplatform/test_autologging.py | 12 +- tests/unit/aiplatform/test_end_to_end.py | 322 +++++++++++-------- tests/unit/aiplatform/test_endpoints.py | 44 ++- tests/unit/aiplatform/test_initializer.py | 5 +- tests/unit/aiplatform/test_metadata_store.py | 4 +- tests/unit/aiplatform/test_prediction.py | 20 +- tests/unit/aiplatform/test_uploader_main.py | 8 +- 9 files changed, 472 insertions(+), 184 deletions(-) diff --git a/tests/unit/aiplatform/conftest.py b/tests/unit/aiplatform/conftest.py index e731f1bbee..d01a1a4061 100644 --- a/tests/unit/aiplatform/conftest.py +++ b/tests/unit/aiplatform/conftest.py @@ -23,6 +23,7 @@ from unittest import mock +from google.cloud import aiplatform from google.cloud.aiplatform.utils import source_utils import constants as test_constants from google.cloud.aiplatform.compat.services import ( @@ -34,16 +35,24 @@ from google.cloud.aiplatform.compat.types import ( context, endpoint, + endpoint_service, model, model_service, pipeline_job, pipeline_state, tensorboard, tensorboard_service, + dataset, + prediction_service, + training_pipeline, ) -from google.cloud.aiplatform.compat.services import endpoint_service_client +from google.cloud.aiplatform.compat.services import ( + dataset_service_client, + endpoint_service_client, + prediction_service_client, +) # Module-scoped fixtures @@ -106,6 +115,25 @@ def get_model_with_version_mock(): yield get_model_mock +@pytest.fixture +def deploy_model_mock(): + with mock.patch.object( + endpoint_service_client.EndpointServiceClient, "deploy_model" + ) as deploy_model_mock: + deployed_model = endpoint.DeployedModel( + model=test_constants.ModelConstants._TEST_MODEL_RESOURCE_NAME, + display_name=test_constants.ModelConstants._TEST_MODEL_NAME, + ) + deploy_model_lro_mock = mock.Mock(operation.Operation) + deploy_model_lro_mock.result.return_value = ( + endpoint_service.DeployModelResponse( + deployed_model=deployed_model, + ) + ) + deploy_model_mock.return_value = deploy_model_lro_mock + yield deploy_model_mock + + # Tensorboard fixtures @pytest.fixture def get_tensorboard_mock(): @@ -219,6 +247,19 @@ def create_endpoint_mock(): yield create_endpoint_mock +@pytest.fixture +def get_endpoint_mock(): + with mock.patch.object( + endpoint_service_client.EndpointServiceClient, "get_endpoint" + ) as get_endpoint_mock: + get_endpoint_mock.return_value = endpoint.Endpoint( + display_name=test_constants.EndpointConstants._TEST_DISPLAY_NAME, + name=test_constants.EndpointConstants._TEST_ENDPOINT_NAME, + encryption_spec=test_constants.ProjectConstants._TEST_ENCRYPTION_SPEC, + ) + yield get_endpoint_mock + + @pytest.fixture def get_endpoint_with_models_mock(): with mock.patch.object( @@ -233,6 +274,22 @@ def get_endpoint_with_models_mock(): yield get_endpoint_mock +@pytest.fixture +def predict_client_predict_mock(): + with mock.patch.object( + prediction_service_client.PredictionServiceClient, "predict" + ) as predict_mock: + predict_mock.return_value = prediction_service.PredictResponse( + deployed_model_id=test_constants.EndpointConstants._TEST_MODEL_ID, + model_version_id=test_constants.EndpointConstants._TEST_VERSION_ID, + model=test_constants.EndpointConstants._TEST_MODEL_NAME, + ) + predict_mock.return_value.predictions.extend( + test_constants.EndpointConstants._TEST_PREDICTION + ) + yield predict_mock + + # PipelineJob fixtures def make_pipeline_job(state): return pipeline_job.PipelineJob( @@ -267,3 +324,140 @@ def get_pipeline_job_mock(): ] yield mock_get_pipeline_job + + +# Dataset mocks +@pytest.fixture +def create_dataset_mock(): + with mock.patch.object( + dataset_service_client.DatasetServiceClient, "create_dataset" + ) as create_dataset_mock: + create_dataset_lro_mock = mock.Mock(operation.Operation) + create_dataset_lro_mock.result.return_value = dataset.Dataset( + name=test_constants.DatasetConstants._TEST_NAME, + display_name=test_constants.DatasetConstants._TEST_DISPLAY_NAME, + metadata_schema_uri=test_constants.DatasetConstants._TEST_METADATA_SCHEMA_URI_TEXT, + encryption_spec=test_constants.DatasetConstants._TEST_ENCRYPTION_SPEC, + ) + create_dataset_mock.return_value = create_dataset_lro_mock + yield create_dataset_mock + + +@pytest.fixture +def get_dataset_mock(): + with mock.patch.object( + dataset_service_client.DatasetServiceClient, "get_dataset" + ) as get_dataset_mock: + get_dataset_mock.return_value = dataset.Dataset( + display_name=test_constants.DatasetConstants._TEST_DISPLAY_NAME, + metadata_schema_uri=test_constants.DatasetConstants._TEST_METADATA_SCHEMA_URI_NONTABULAR, + name=test_constants.DatasetConstants._TEST_NAME, + metadata=test_constants.DatasetConstants._TEST_NONTABULAR_DATASET_METADATA, + encryption_spec=test_constants.DatasetConstants._TEST_ENCRYPTION_SPEC, + ) + yield get_dataset_mock + + +@pytest.fixture +def import_data_mock(): + with mock.patch.object( + dataset_service_client.DatasetServiceClient, "import_data" + ) as import_data_mock: + import_data_mock.return_value = mock.Mock(operation.Operation) + yield import_data_mock + + +# TrainingJob mocks +@pytest.fixture +def mock_model_service_get(): + with mock.patch.object( + model_service_client.ModelServiceClient, "get_model" + ) as mock_get_model: + mock_get_model.return_value = model.Model( + name=test_constants.TrainingJobConstants._TEST_MODEL_NAME + ) + mock_get_model.return_value.supported_deployment_resources_types.append( + aiplatform.gapic.Model.DeploymentResourcesType.DEDICATED_RESOURCES + ) + mock_get_model.return_value.version_id = "1" + yield mock_get_model + + +@pytest.fixture +def mock_pipeline_service_create(): + with mock.patch.object( + pipeline_service_client.PipelineServiceClient, "create_training_pipeline" + ) as mock_create_training_pipeline: + mock_create_training_pipeline.return_value = training_pipeline.TrainingPipeline( + name=test_constants.TrainingJobConstants._TEST_PIPELINE_RESOURCE_NAME, + state=pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED, + model_to_upload=model.Model( + name=test_constants.TrainingJobConstants._TEST_MODEL_NAME + ), + ) + yield mock_create_training_pipeline + + +def make_training_pipeline(state, add_training_task_metadata=True): + return training_pipeline.TrainingPipeline( + name=test_constants.TrainingJobConstants._TEST_PIPELINE_RESOURCE_NAME, + state=state, + model_to_upload=model.Model( + name=test_constants.TrainingJobConstants._TEST_MODEL_NAME + ), + training_task_inputs={ + "tensorboard": test_constants.TrainingJobConstants._TEST_TENSORBOARD_RESOURCE_NAME + }, + training_task_metadata={ + "backingCustomJob": test_constants.TrainingJobConstants._TEST_CUSTOM_JOB_RESOURCE_NAME + } + if add_training_task_metadata + else None, + ) + + +@pytest.fixture +def mock_pipeline_service_get(make_call=make_training_pipeline): + with mock.patch.object( + pipeline_service_client.PipelineServiceClient, "get_training_pipeline" + ) as mock_get_training_pipeline: + mock_get_training_pipeline.side_effect = [ + make_call( + pipeline_state.PipelineState.PIPELINE_STATE_RUNNING, + add_training_task_metadata=False, + ), + make_call( + pipeline_state.PipelineState.PIPELINE_STATE_RUNNING, + ), + make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED), + make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED), + make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED), + make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED), + make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED), + make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED), + make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED), + make_call(pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED), + ] + + yield mock_get_training_pipeline + + +@pytest.fixture +def mock_pipeline_service_create_and_get_with_fail(): + with mock.patch.object( + pipeline_service_client.PipelineServiceClient, "create_training_pipeline" + ) as mock_create_training_pipeline: + mock_create_training_pipeline.return_value = training_pipeline.TrainingPipeline( + name=test_constants.TrainingJobConstants._TEST_PIPELINE_RESOURCE_NAME, + state=pipeline_state.PipelineState.PIPELINE_STATE_RUNNING, + ) + + with mock.patch.object( + pipeline_service_client.PipelineServiceClient, "get_training_pipeline" + ) as mock_get_training_pipeline: + mock_get_training_pipeline.return_value = training_pipeline.TrainingPipeline( + name=test_constants.TrainingJobConstants._TEST_PIPELINE_RESOURCE_NAME, + state=pipeline_state.PipelineState.PIPELINE_STATE_FAILED, + ) + + yield mock_create_training_pipeline, mock_get_training_pipeline diff --git a/tests/unit/aiplatform/constants.py b/tests/unit/aiplatform/constants.py index 2c6a6b495f..18cfd0c956 100644 --- a/tests/unit/aiplatform/constants.py +++ b/tests/unit/aiplatform/constants.py @@ -18,12 +18,14 @@ import dataclasses from datetime import datetime - +from unittest import mock +from google.auth import credentials as auth_credentials from google.protobuf import timestamp_pb2, duration_pb2 from google.cloud.aiplatform.utils import source_utils from google.cloud.aiplatform import explain from google.cloud.aiplatform import utils +from google.cloud.aiplatform import schema from google.cloud.aiplatform.compat.services import ( model_service_client, @@ -130,6 +132,20 @@ class TrainingJobConstants: labels=ProjectConstants._TEST_LABELS, encryption_spec=ProjectConstants._TEST_ENCRYPTION_SPEC, ) + _TEST_PIPELINE_RESOURCE_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/us-central1/trainingPipelines/{_TEST_ID}" + _TEST_BUCKET_NAME = "test-bucket" + _TEST_TENSORBOARD_RESOURCE_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/{ProjectConstants._TEST_LOCATION}/tensorboards/{_TEST_ID}" + _TEST_MODEL_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/us-central1/models/{_TEST_ID}" + _TEST_CUSTOM_JOB_RESOURCE_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/{ProjectConstants._TEST_LOCATION}/customJobs/{_TEST_ID}" + _TEST_CREDENTIALS = mock.Mock(spec=auth_credentials.AnonymousCredentials()) + _TEST_SERVING_CONTAINER_PREDICTION_ROUTE = "predict" + _TEST_SERVING_CONTAINER_HEALTH_ROUTE = "metadata" + _TEST_MODEL_DISPLAY_NAME = "model-display-name" + _TEST_TRAINING_FRACTION_SPLIT = 0.6 + _TEST_VALIDATION_FRACTION_SPLIT = 0.2 + _TEST_TEST_FRACTION_SPLIT = 0.2 + _TEST_BOOT_DISK_TYPE_DEFAULT = "pd-ssd" + _TEST_BOOT_DISK_SIZE_GB_DEFAULT = 100 @dataclasses.dataclass(frozen=True) @@ -180,6 +196,10 @@ class EndpointConstants: endpoint.DeployedModel(id=_TEST_ID_3, display_name=_TEST_DISPLAY_NAME_3), ] _TEST_TRAFFIC_SPLIT = {_TEST_ID: 0, _TEST_ID_2: 100, _TEST_ID_3: 0} + _TEST_MODEL_ID = "1028944691210842416" + _TEST_PREDICTION = [[1.0, 2.0, 3.0], [3.0, 3.0, 1.0]] + _TEST_VERSION_ID = "1" + _TEST_MODEL_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/{ProjectConstants._TEST_LOCATION}/models/{_TEST_ID}" @dataclasses.dataclass(frozen=True) @@ -237,3 +257,26 @@ class PipelineJobConstants: _TEST_PIPELINE_JOB_ID = "sample-test-pipeline-202111111" _TEST_PIPELINE_JOB_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/{ProjectConstants._TEST_LOCATION}/pipelineJobs/{_TEST_PIPELINE_JOB_ID}" _TEST_PIPELINE_CREATE_TIME = datetime.now() + + +@dataclasses.dataclass(frozen=True) +class DatasetConstants: + """Defines constants used by tests that create Dataset resources.""" + + _TEST_ID = "1028944691210842416" + _TEST_NAME = f"projects/{ProjectConstants._TEST_PROJECT}/locations/{ProjectConstants._TEST_LOCATION}/datasets/{_TEST_ID}" + _TEST_DISPLAY_NAME = "my_dataset_1234" + _TEST_ENCRYPTION_KEY_NAME = "key_1234" + _TEST_METADATA_SCHEMA_URI_TEXT = schema.dataset.metadata.text + _TEST_ENCRYPTION_SPEC = encryption_spec.EncryptionSpec( + kms_key_name=_TEST_ENCRYPTION_KEY_NAME + ) + _TEST_METADATA_SCHEMA_URI_NONTABULAR = schema.dataset.metadata.image + _TEST_NONTABULAR_DATASET_METADATA = None + _TEST_IMPORT_SCHEMA_URI = schema.dataset.ioformat.image.single_label_classification + _TEST_IMPORT_SCHEMA_URI_IMAGE = ( + schema.dataset.ioformat.image.single_label_classification + ) + _TEST_DATA_LABEL_ITEMS = None + _TEST_REQUEST_METADATA = () + _TEST_SOURCE_URI_GCS = "gs://my-bucket/my_index_file.jsonl" diff --git a/tests/unit/aiplatform/test_autologging.py b/tests/unit/aiplatform/test_autologging.py index a408c12eb1..4705c95169 100644 --- a/tests/unit/aiplatform/test_autologging.py +++ b/tests/unit/aiplatform/test_autologging.py @@ -51,13 +51,13 @@ tensorboard_time_series as gca_tensorboard_time_series, ) from google.cloud.aiplatform.metadata import constants +import constants as test_constants from google.cloud.aiplatform.compat.services import ( tensorboard_service_client, ) from google.cloud.aiplatform.compat.types import ( - encryption_spec as gca_encryption_spec, tensorboard as gca_tensorboard, ) @@ -66,9 +66,9 @@ import numpy as np -_TEST_PROJECT = "test-project" +_TEST_PROJECT = test_constants.ProjectConstants._TEST_PROJECT _TEST_OTHER_PROJECT = "test-project-1" -_TEST_LOCATION = "us-central1" +_TEST_LOCATION = test_constants.ProjectConstants._TEST_LOCATION _TEST_PARENT = ( f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/metadataStores/default" ) @@ -167,10 +167,8 @@ f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/tensorboards/{_TEST_TB_ID}" ) _TEST_TB_DISPLAY_NAME = "my_tensorboard_1234" -_TEST_ENCRYPTION_KEY_NAME = "key_1234" -_TEST_ENCRYPTION_SPEC = gca_encryption_spec.EncryptionSpec( - kms_key_name=_TEST_ENCRYPTION_KEY_NAME -) +_TEST_ENCRYPTION_KEY_NAME = test_constants.ProjectConstants._TEST_ENCRYPTION_KEY_NAME +_TEST_ENCRYPTION_SPEC = test_constants.ProjectConstants._TEST_ENCRYPTION_SPEC _TEST_TB_NAME = ( f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/tensorboards/{_TEST_TB_ID}" ) diff --git a/tests/unit/aiplatform/test_end_to_end.py b/tests/unit/aiplatform/test_end_to_end.py index 0b9d5c0345..7a5ccef347 100644 --- a/tests/unit/aiplatform/test_end_to_end.py +++ b/tests/unit/aiplatform/test_end_to_end.py @@ -27,43 +27,80 @@ from google.cloud.aiplatform.compat.types import ( dataset as gca_dataset, - encryption_spec as gca_encryption_spec, io as gca_io, model as gca_model, pipeline_state as gca_pipeline_state, training_pipeline as gca_training_pipeline, ) -import test_datasets -from test_datasets import create_dataset_mock # noqa: F401 -from test_datasets import get_dataset_mock # noqa: F401 -from test_datasets import import_data_mock # noqa: F401 - -import test_endpoints -from test_endpoints import create_endpoint_mock # noqa: F401 -from test_endpoints import get_endpoint_mock # noqa: F401 -from test_endpoints import predict_client_predict_mock # noqa: F401 - -from test_models import deploy_model_mock # noqa: F401 - -import test_training_jobs -from test_training_jobs import make_training_pipeline -from test_training_jobs import mock_model_service_get # noqa: F401 -from test_training_jobs import mock_pipeline_service_create # noqa: F401 -from test_training_jobs import mock_pipeline_service_get # noqa: F401 -from test_training_jobs import ( # noqa: F401 - mock_pipeline_service_create_and_get_with_fail, -) -from test_training_jobs import mock_python_package_to_gcs # noqa: F401 +import constants as test_constants from google.protobuf import json_format from google.protobuf import struct_pb2 -# dataset_encryption -_TEST_ENCRYPTION_KEY_NAME = "key_1234" -_TEST_ENCRYPTION_SPEC = gca_encryption_spec.EncryptionSpec( - kms_key_name=_TEST_ENCRYPTION_KEY_NAME + +# Training job test variables +_TEST_CREDENTIALS = test_constants.TrainingJobConstants._TEST_CREDENTIALS +_TEST_JOB_DISPLAY_NAME = "test-display-name" +_TEST_SERVING_CONTAINER_IMAGE = ( + test_constants.TrainingJobConstants._TEST_TRAINING_CONTAINER_IMAGE +) +_TEST_SERVING_CONTAINER_PREDICTION_ROUTE = ( + test_constants.TrainingJobConstants._TEST_SERVING_CONTAINER_PREDICTION_ROUTE +) +_TEST_SERVING_CONTAINER_HEALTH_ROUTE = ( + test_constants.TrainingJobConstants._TEST_SERVING_CONTAINER_HEALTH_ROUTE +) +_TEST_BASE_OUTPUT_DIR = "gs://test-base-output-dir" +_TEST_MACHINE_TYPE = test_constants.TrainingJobConstants._TEST_MACHINE_TYPE +_TEST_ACCELERATOR_TYPE = test_constants.TrainingJobConstants._TEST_ACCELERATOR_TYPE +_TEST_MODEL_DISPLAY_NAME = test_constants.TrainingJobConstants._TEST_MODEL_DISPLAY_NAME +_TEST_TRAINING_FRACTION_SPLIT = ( + test_constants.TrainingJobConstants._TEST_TRAINING_FRACTION_SPLIT +) +_TEST_VALIDATION_FRACTION_SPLIT = ( + test_constants.TrainingJobConstants._TEST_VALIDATION_FRACTION_SPLIT +) +_TEST_TEST_FRACTION_SPLIT = ( + test_constants.TrainingJobConstants._TEST_TEST_FRACTION_SPLIT +) +_TEST_BOOT_DISK_TYPE_DEFAULT = ( + test_constants.TrainingJobConstants._TEST_BOOT_DISK_TYPE_DEFAULT +) +_TEST_BOOT_DISK_SIZE_GB_DEFAULT = ( + test_constants.TrainingJobConstants._TEST_BOOT_DISK_SIZE_GB_DEFAULT +) + + +# Dataset test variables +_TEST_DATA_LABEL_ITEMS = test_constants.DatasetConstants._TEST_DATA_LABEL_ITEMS +_TEST_IMPORT_SCHEMA_URI = test_constants.DatasetConstants._TEST_IMPORT_SCHEMA_URI +_TEST_IMPORT_SCHEMA_URI_IMAGE = ( + test_constants.DatasetConstants._TEST_IMPORT_SCHEMA_URI_IMAGE ) +_TEST_REQUEST_METADATA = test_constants.DatasetConstants._TEST_REQUEST_METADATA +_TEST_NAME = test_constants.DatasetConstants._TEST_NAME +_TEST_SOURCE_URI_GCS = test_constants.DatasetConstants._TEST_SOURCE_URI_GCS +_TEST_ENCRYPTION_KEY_NAME = test_constants.ProjectConstants._TEST_ENCRYPTION_KEY_NAME +_TEST_ENCRYPTION_SPEC = test_constants.ProjectConstants._TEST_ENCRYPTION_SPEC + + +def make_training_pipeline(state, add_training_task_metadata=True): + return gca_training_pipeline.TrainingPipeline( + name=test_constants.TrainingJobConstants._TEST_PIPELINE_RESOURCE_NAME, + state=state, + model_to_upload=gca_model.Model( + name=test_constants.TrainingJobConstants._TEST_MODEL_NAME + ), + training_task_inputs={ + "tensorboard": test_constants.TrainingJobConstants._TEST_TENSORBOARD_RESOURCE_NAME + }, + training_task_metadata={ + "backingCustomJob": test_constants.TrainingJobConstants._TEST_CUSTOM_JOB_RESOURCE_NAME + } + if add_training_task_metadata + else None, + ) @pytest.mark.usefixtures("google_auth_mock") @@ -95,53 +132,53 @@ def test_dataset_create_to_model_predict( ): aiplatform.init( - project=test_datasets._TEST_PROJECT, - staging_bucket=test_training_jobs._TEST_BUCKET_NAME, - credentials=test_training_jobs._TEST_CREDENTIALS, + project=test_constants.ProjectConstants._TEST_PROJECT, + staging_bucket=test_constants.TrainingJobConstants._TEST_BUCKET_NAME, + credentials=_TEST_CREDENTIALS, ) my_dataset = aiplatform.ImageDataset.create( - display_name=test_datasets._TEST_DISPLAY_NAME, + display_name=test_constants.DatasetConstants._TEST_DISPLAY_NAME, encryption_spec_key_name=_TEST_ENCRYPTION_KEY_NAME, sync=sync, create_request_timeout=None, ) my_dataset.import_data( - gcs_source=test_datasets._TEST_SOURCE_URI_GCS, - import_schema_uri=test_datasets._TEST_IMPORT_SCHEMA_URI, - data_item_labels=test_datasets._TEST_DATA_LABEL_ITEMS, + gcs_source=_TEST_SOURCE_URI_GCS, + import_schema_uri=_TEST_IMPORT_SCHEMA_URI, + data_item_labels=_TEST_DATA_LABEL_ITEMS, sync=sync, import_request_timeout=None, ) job = aiplatform.CustomTrainingJob( - display_name=test_training_jobs._TEST_DISPLAY_NAME, - script_path=test_training_jobs._TEST_LOCAL_SCRIPT_FILE_NAME, - container_uri=test_training_jobs._TEST_TRAINING_CONTAINER_IMAGE, - model_serving_container_image_uri=test_training_jobs._TEST_SERVING_CONTAINER_IMAGE, - model_serving_container_predict_route=test_training_jobs._TEST_SERVING_CONTAINER_PREDICTION_ROUTE, - model_serving_container_health_route=test_training_jobs._TEST_SERVING_CONTAINER_HEALTH_ROUTE, + display_name=_TEST_JOB_DISPLAY_NAME, + script_path=test_constants.TrainingJobConstants._TEST_LOCAL_SCRIPT_FILE_NAME, + container_uri=test_constants.TrainingJobConstants._TEST_TRAINING_CONTAINER_IMAGE, + model_serving_container_image_uri=_TEST_SERVING_CONTAINER_IMAGE, + model_serving_container_predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE, + model_serving_container_health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE, ) model_from_job = job.run( dataset=my_dataset, - base_output_dir=test_training_jobs._TEST_BASE_OUTPUT_DIR, - args=test_training_jobs._TEST_RUN_ARGS, + base_output_dir=_TEST_BASE_OUTPUT_DIR, + args=test_constants.TrainingJobConstants._TEST_RUN_ARGS, replica_count=1, - machine_type=test_training_jobs._TEST_MACHINE_TYPE, - accelerator_type=test_training_jobs._TEST_ACCELERATOR_TYPE, - accelerator_count=test_training_jobs._TEST_ACCELERATOR_COUNT, - model_display_name=test_training_jobs._TEST_MODEL_DISPLAY_NAME, - training_fraction_split=test_training_jobs._TEST_TRAINING_FRACTION_SPLIT, - validation_fraction_split=test_training_jobs._TEST_VALIDATION_FRACTION_SPLIT, - test_fraction_split=test_training_jobs._TEST_TEST_FRACTION_SPLIT, + machine_type=_TEST_MACHINE_TYPE, + accelerator_type=test_constants.TrainingJobConstants._TEST_ACCELERATOR_TYPE, + accelerator_count=test_constants.TrainingJobConstants._TEST_ACCELERATOR_COUNT, + model_display_name=_TEST_MODEL_DISPLAY_NAME, + training_fraction_split=_TEST_TRAINING_FRACTION_SPLIT, + validation_fraction_split=_TEST_VALIDATION_FRACTION_SPLIT, + test_fraction_split=_TEST_TEST_FRACTION_SPLIT, sync=sync, create_request_timeout=None, ) created_endpoint = models.Endpoint.create( - display_name=test_endpoints._TEST_DISPLAY_NAME, + display_name=test_constants.EndpointConstants._TEST_DISPLAY_NAME, encryption_spec_key_name=_TEST_ENCRYPTION_KEY_NAME, sync=sync, create_request_timeout=None, @@ -172,90 +209,92 @@ def test_dataset_create_to_model_predict( ) true_prediction = models.Prediction( - predictions=test_endpoints._TEST_PREDICTION, - deployed_model_id=test_endpoints._TEST_ID, + predictions=test_constants.EndpointConstants._TEST_PREDICTION, + deployed_model_id=test_constants.EndpointConstants._TEST_ID, model_resource_name=model_from_job.resource_name, model_version_id=model_from_job.version_id, ) assert true_prediction == test_prediction predict_client_predict_mock.assert_called_once_with( - endpoint=test_endpoints._TEST_ENDPOINT_NAME, + endpoint=test_constants.EndpointConstants._TEST_ENDPOINT_NAME, instances=[[1.0, 2.0, 3.0], [1.0, 3.0, 4.0]], parameters={"param": 3.0}, timeout=None, ) expected_dataset = gca_dataset.Dataset( - display_name=test_datasets._TEST_DISPLAY_NAME, - metadata_schema_uri=test_datasets._TEST_METADATA_SCHEMA_URI_NONTABULAR, - metadata=test_datasets._TEST_NONTABULAR_DATASET_METADATA, + display_name=test_constants.DatasetConstants._TEST_DISPLAY_NAME, + metadata_schema_uri=test_constants.DatasetConstants._TEST_METADATA_SCHEMA_URI_NONTABULAR, + metadata=test_constants.DatasetConstants._TEST_NONTABULAR_DATASET_METADATA, encryption_spec=_TEST_ENCRYPTION_SPEC, ) expected_import_config = gca_dataset.ImportDataConfig( - gcs_source=gca_io.GcsSource(uris=[test_datasets._TEST_SOURCE_URI_GCS]), - import_schema_uri=test_datasets._TEST_IMPORT_SCHEMA_URI, - data_item_labels=test_datasets._TEST_DATA_LABEL_ITEMS, + gcs_source=gca_io.GcsSource(uris=[_TEST_SOURCE_URI_GCS]), + import_schema_uri=_TEST_IMPORT_SCHEMA_URI, + data_item_labels=_TEST_DATA_LABEL_ITEMS, ) create_dataset_mock.assert_called_once_with( - parent=test_datasets._TEST_PARENT, + parent=test_constants.ProjectConstants._TEST_PARENT, dataset=expected_dataset, - metadata=test_datasets._TEST_REQUEST_METADATA, + metadata=_TEST_REQUEST_METADATA, timeout=None, ) import_data_mock.assert_called_once_with( - name=test_datasets._TEST_NAME, + name=_TEST_NAME, import_configs=[expected_import_config], timeout=None, ) - expected_dataset.name = test_datasets._TEST_NAME + expected_dataset.name = _TEST_NAME assert my_dataset._gca_resource == expected_dataset mock_python_package_to_gcs.assert_called_once_with( - gcs_staging_dir=test_training_jobs._TEST_BUCKET_NAME, - project=test_training_jobs._TEST_PROJECT, + gcs_staging_dir=test_constants.TrainingJobConstants._TEST_BUCKET_NAME, + project=test_constants.ProjectConstants._TEST_PROJECT, credentials=initializer.global_config.credentials, ) - true_args = test_training_jobs._TEST_RUN_ARGS + true_args = test_constants.TrainingJobConstants._TEST_RUN_ARGS true_worker_pool_spec = { - "replica_count": test_training_jobs._TEST_REPLICA_COUNT, + "replica_count": test_constants.TrainingJobConstants._TEST_REPLICA_COUNT, "machine_spec": { - "machine_type": test_training_jobs._TEST_MACHINE_TYPE, - "accelerator_type": test_training_jobs._TEST_ACCELERATOR_TYPE, - "accelerator_count": test_training_jobs._TEST_ACCELERATOR_COUNT, + "machine_type": _TEST_MACHINE_TYPE, + "accelerator_type": _TEST_ACCELERATOR_TYPE, + "accelerator_count": test_constants.TrainingJobConstants._TEST_ACCELERATOR_COUNT, }, "disk_spec": { - "boot_disk_type": test_training_jobs._TEST_BOOT_DISK_TYPE_DEFAULT, - "boot_disk_size_gb": test_training_jobs._TEST_BOOT_DISK_SIZE_GB_DEFAULT, + "boot_disk_type": _TEST_BOOT_DISK_TYPE_DEFAULT, + "boot_disk_size_gb": _TEST_BOOT_DISK_SIZE_GB_DEFAULT, }, "python_package_spec": { - "executor_image_uri": test_training_jobs._TEST_TRAINING_CONTAINER_IMAGE, - "python_module": test_training_jobs._TEST_MODULE_NAME, - "package_uris": [test_training_jobs._TEST_OUTPUT_PYTHON_PACKAGE_PATH], + "executor_image_uri": test_constants.TrainingJobConstants._TEST_TRAINING_CONTAINER_IMAGE, + "python_module": test_constants.TrainingJobConstants._TEST_MODULE_NAME, + "package_uris": [ + test_constants.TrainingJobConstants._TEST_OUTPUT_PYTHON_PACKAGE_PATH + ], "args": true_args, }, } true_fraction_split = gca_training_pipeline.FractionSplit( - training_fraction=test_training_jobs._TEST_TRAINING_FRACTION_SPLIT, - validation_fraction=test_training_jobs._TEST_VALIDATION_FRACTION_SPLIT, - test_fraction=test_training_jobs._TEST_TEST_FRACTION_SPLIT, + training_fraction=_TEST_TRAINING_FRACTION_SPLIT, + validation_fraction=_TEST_VALIDATION_FRACTION_SPLIT, + test_fraction=_TEST_TEST_FRACTION_SPLIT, ) true_container_spec = gca_model.ModelContainerSpec( - image_uri=test_training_jobs._TEST_SERVING_CONTAINER_IMAGE, - predict_route=test_training_jobs._TEST_SERVING_CONTAINER_PREDICTION_ROUTE, - health_route=test_training_jobs._TEST_SERVING_CONTAINER_HEALTH_ROUTE, + image_uri=_TEST_SERVING_CONTAINER_IMAGE, + predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE, + health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE, ) true_managed_model = gca_model.Model( - display_name=test_training_jobs._TEST_MODEL_DISPLAY_NAME, + display_name=_TEST_MODEL_DISPLAY_NAME, container_spec=true_container_spec, version_aliases=["default"], ) @@ -264,18 +303,18 @@ def test_dataset_create_to_model_predict( fraction_split=true_fraction_split, dataset_id=my_dataset.name, gcs_destination=gca_io.GcsDestination( - output_uri_prefix=test_training_jobs._TEST_BASE_OUTPUT_DIR + output_uri_prefix=_TEST_BASE_OUTPUT_DIR ), ) true_training_pipeline = gca_training_pipeline.TrainingPipeline( - display_name=test_training_jobs._TEST_DISPLAY_NAME, + display_name=_TEST_JOB_DISPLAY_NAME, training_task_definition=schema.training_job.definition.custom_task, training_task_inputs=json_format.ParseDict( { "worker_pool_specs": [true_worker_pool_spec], "base_output_directory": { - "output_uri_prefix": test_training_jobs._TEST_BASE_OUTPUT_DIR + "output_uri_prefix": _TEST_BASE_OUTPUT_DIR }, }, struct_pb2.Value(), @@ -295,7 +334,8 @@ def test_dataset_create_to_model_predict( ) mock_model_service_get.assert_called_once_with( - name=test_training_jobs._TEST_MODEL_NAME, retry=base._DEFAULT_RETRY + name=test_constants.TrainingJobConstants._TEST_MODEL_NAME, + retry=base._DEFAULT_RETRY, ) assert model_from_job._gca_resource is mock_model_service_get.return_value @@ -324,53 +364,53 @@ def test_dataset_create_to_model_predict_with_pipeline_fail( sync = False aiplatform.init( - project=test_datasets._TEST_PROJECT, - staging_bucket=test_training_jobs._TEST_BUCKET_NAME, - credentials=test_training_jobs._TEST_CREDENTIALS, + project=test_constants.ProjectConstants._TEST_PROJECT, + staging_bucket=test_constants.TrainingJobConstants._TEST_BUCKET_NAME, + credentials=_TEST_CREDENTIALS, encryption_spec_key_name=_TEST_ENCRYPTION_KEY_NAME, ) my_dataset = aiplatform.ImageDataset.create( - display_name=test_datasets._TEST_DISPLAY_NAME, + display_name=test_constants.DatasetConstants._TEST_DISPLAY_NAME, sync=sync, create_request_timeout=None, ) my_dataset.import_data( - gcs_source=test_datasets._TEST_SOURCE_URI_GCS, - import_schema_uri=test_datasets._TEST_IMPORT_SCHEMA_URI, - data_item_labels=test_datasets._TEST_DATA_LABEL_ITEMS, + gcs_source=_TEST_SOURCE_URI_GCS, + import_schema_uri=_TEST_IMPORT_SCHEMA_URI, + data_item_labels=_TEST_DATA_LABEL_ITEMS, sync=sync, import_request_timeout=None, ) job = aiplatform.CustomTrainingJob( - display_name=test_training_jobs._TEST_DISPLAY_NAME, - script_path=test_training_jobs._TEST_LOCAL_SCRIPT_FILE_NAME, - container_uri=test_training_jobs._TEST_TRAINING_CONTAINER_IMAGE, - model_serving_container_image_uri=test_training_jobs._TEST_SERVING_CONTAINER_IMAGE, - model_serving_container_predict_route=test_training_jobs._TEST_SERVING_CONTAINER_PREDICTION_ROUTE, - model_serving_container_health_route=test_training_jobs._TEST_SERVING_CONTAINER_HEALTH_ROUTE, + display_name=_TEST_JOB_DISPLAY_NAME, + script_path=test_constants.TrainingJobConstants._TEST_LOCAL_SCRIPT_FILE_NAME, + container_uri=test_constants.TrainingJobConstants._TEST_TRAINING_CONTAINER_IMAGE, + model_serving_container_image_uri=_TEST_SERVING_CONTAINER_IMAGE, + model_serving_container_predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE, + model_serving_container_health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE, ) created_endpoint = models.Endpoint.create( - display_name=test_endpoints._TEST_DISPLAY_NAME, + display_name=test_constants.EndpointConstants._TEST_DISPLAY_NAME, sync=sync, create_request_timeout=None, ) model_from_job = job.run( dataset=my_dataset, - base_output_dir=test_training_jobs._TEST_BASE_OUTPUT_DIR, - args=test_training_jobs._TEST_RUN_ARGS, + base_output_dir=_TEST_BASE_OUTPUT_DIR, + args=test_constants.TrainingJobConstants._TEST_RUN_ARGS, replica_count=1, - machine_type=test_training_jobs._TEST_MACHINE_TYPE, - accelerator_type=test_training_jobs._TEST_ACCELERATOR_TYPE, - accelerator_count=test_training_jobs._TEST_ACCELERATOR_COUNT, - model_display_name=test_training_jobs._TEST_MODEL_DISPLAY_NAME, - training_fraction_split=test_training_jobs._TEST_TRAINING_FRACTION_SPLIT, - validation_fraction_split=test_training_jobs._TEST_VALIDATION_FRACTION_SPLIT, - test_fraction_split=test_training_jobs._TEST_TEST_FRACTION_SPLIT, + machine_type=test_constants.TrainingJobConstants._TEST_MACHINE_TYPE, + accelerator_type=test_constants.TrainingJobConstants._TEST_ACCELERATOR_TYPE, + accelerator_count=test_constants.TrainingJobConstants._TEST_ACCELERATOR_COUNT, + model_display_name=_TEST_MODEL_DISPLAY_NAME, + training_fraction_split=_TEST_TRAINING_FRACTION_SPLIT, + validation_fraction_split=_TEST_VALIDATION_FRACTION_SPLIT, + test_fraction_split=_TEST_TEST_FRACTION_SPLIT, sync=sync, create_request_timeout=None, ) @@ -385,75 +425,77 @@ def test_dataset_create_to_model_predict_with_pipeline_fail( created_endpoint.wait() expected_dataset = gca_dataset.Dataset( - display_name=test_datasets._TEST_DISPLAY_NAME, - metadata_schema_uri=test_datasets._TEST_METADATA_SCHEMA_URI_NONTABULAR, - metadata=test_datasets._TEST_NONTABULAR_DATASET_METADATA, + display_name=test_constants.DatasetConstants._TEST_DISPLAY_NAME, + metadata_schema_uri=test_constants.DatasetConstants._TEST_METADATA_SCHEMA_URI_NONTABULAR, + metadata=test_constants.DatasetConstants._TEST_NONTABULAR_DATASET_METADATA, encryption_spec=_TEST_ENCRYPTION_SPEC, ) expected_import_config = gca_dataset.ImportDataConfig( - gcs_source=gca_io.GcsSource(uris=[test_datasets._TEST_SOURCE_URI_GCS]), - import_schema_uri=test_datasets._TEST_IMPORT_SCHEMA_URI, - data_item_labels=test_datasets._TEST_DATA_LABEL_ITEMS, + gcs_source=gca_io.GcsSource(uris=[_TEST_SOURCE_URI_GCS]), + import_schema_uri=_TEST_IMPORT_SCHEMA_URI, + data_item_labels=_TEST_DATA_LABEL_ITEMS, ) create_dataset_mock.assert_called_once_with( - parent=test_datasets._TEST_PARENT, + parent=test_constants.ProjectConstants._TEST_PARENT, dataset=expected_dataset, - metadata=test_datasets._TEST_REQUEST_METADATA, + metadata=_TEST_REQUEST_METADATA, timeout=None, ) import_data_mock.assert_called_once_with( - name=test_datasets._TEST_NAME, + name=_TEST_NAME, import_configs=[expected_import_config], timeout=None, ) - expected_dataset.name = test_datasets._TEST_NAME + expected_dataset.name = _TEST_NAME assert my_dataset._gca_resource == expected_dataset mock_python_package_to_gcs.assert_called_once_with( - gcs_staging_dir=test_training_jobs._TEST_BUCKET_NAME, - project=test_training_jobs._TEST_PROJECT, + gcs_staging_dir=test_constants.TrainingJobConstants._TEST_BUCKET_NAME, + project=test_constants.ProjectConstants._TEST_PROJECT, credentials=initializer.global_config.credentials, ) - true_args = test_training_jobs._TEST_RUN_ARGS + true_args = test_constants.TrainingJobConstants._TEST_RUN_ARGS true_worker_pool_spec = { - "replica_count": test_training_jobs._TEST_REPLICA_COUNT, + "replica_count": test_constants.TrainingJobConstants._TEST_REPLICA_COUNT, "machine_spec": { - "machine_type": test_training_jobs._TEST_MACHINE_TYPE, - "accelerator_type": test_training_jobs._TEST_ACCELERATOR_TYPE, - "accelerator_count": test_training_jobs._TEST_ACCELERATOR_COUNT, + "machine_type": test_constants.TrainingJobConstants._TEST_MACHINE_TYPE, + "accelerator_type": test_constants.TrainingJobConstants._TEST_ACCELERATOR_TYPE, + "accelerator_count": test_constants.TrainingJobConstants._TEST_ACCELERATOR_COUNT, }, "disk_spec": { - "boot_disk_type": test_training_jobs._TEST_BOOT_DISK_TYPE_DEFAULT, - "boot_disk_size_gb": test_training_jobs._TEST_BOOT_DISK_SIZE_GB_DEFAULT, + "boot_disk_type": _TEST_BOOT_DISK_TYPE_DEFAULT, + "boot_disk_size_gb": _TEST_BOOT_DISK_SIZE_GB_DEFAULT, }, "python_package_spec": { - "executor_image_uri": test_training_jobs._TEST_TRAINING_CONTAINER_IMAGE, - "python_module": test_training_jobs._TEST_MODULE_NAME, - "package_uris": [test_training_jobs._TEST_OUTPUT_PYTHON_PACKAGE_PATH], + "executor_image_uri": test_constants.TrainingJobConstants._TEST_TRAINING_CONTAINER_IMAGE, + "python_module": test_constants.TrainingJobConstants._TEST_MODULE_NAME, + "package_uris": [ + test_constants.TrainingJobConstants._TEST_OUTPUT_PYTHON_PACKAGE_PATH + ], "args": true_args, }, } true_fraction_split = gca_training_pipeline.FractionSplit( - training_fraction=test_training_jobs._TEST_TRAINING_FRACTION_SPLIT, - validation_fraction=test_training_jobs._TEST_VALIDATION_FRACTION_SPLIT, - test_fraction=test_training_jobs._TEST_TEST_FRACTION_SPLIT, + training_fraction=_TEST_TRAINING_FRACTION_SPLIT, + validation_fraction=_TEST_VALIDATION_FRACTION_SPLIT, + test_fraction=_TEST_TEST_FRACTION_SPLIT, ) true_container_spec = gca_model.ModelContainerSpec( - image_uri=test_training_jobs._TEST_SERVING_CONTAINER_IMAGE, - predict_route=test_training_jobs._TEST_SERVING_CONTAINER_PREDICTION_ROUTE, - health_route=test_training_jobs._TEST_SERVING_CONTAINER_HEALTH_ROUTE, + image_uri=_TEST_SERVING_CONTAINER_IMAGE, + predict_route=_TEST_SERVING_CONTAINER_PREDICTION_ROUTE, + health_route=_TEST_SERVING_CONTAINER_HEALTH_ROUTE, ) true_managed_model = gca_model.Model( - display_name=test_training_jobs._TEST_MODEL_DISPLAY_NAME, + display_name=_TEST_MODEL_DISPLAY_NAME, container_spec=true_container_spec, encryption_spec=_TEST_ENCRYPTION_SPEC, version_aliases=["default"], @@ -463,18 +505,18 @@ def test_dataset_create_to_model_predict_with_pipeline_fail( fraction_split=true_fraction_split, dataset_id=my_dataset.name, gcs_destination=gca_io.GcsDestination( - output_uri_prefix=test_training_jobs._TEST_BASE_OUTPUT_DIR + output_uri_prefix=_TEST_BASE_OUTPUT_DIR ), ) true_training_pipeline = gca_training_pipeline.TrainingPipeline( - display_name=test_training_jobs._TEST_DISPLAY_NAME, + display_name=_TEST_JOB_DISPLAY_NAME, training_task_definition=schema.training_job.definition.custom_task, training_task_inputs=json_format.ParseDict( { "worker_pool_specs": [true_worker_pool_spec], "base_output_directory": { - "output_uri_prefix": test_training_jobs._TEST_BASE_OUTPUT_DIR + "output_uri_prefix": _TEST_BASE_OUTPUT_DIR }, }, struct_pb2.Value(), diff --git a/tests/unit/aiplatform/test_endpoints.py b/tests/unit/aiplatform/test_endpoints.py index 90d6da8455..ff365439d8 100644 --- a/tests/unit/aiplatform/test_endpoints.py +++ b/tests/unit/aiplatform/test_endpoints.py @@ -52,50 +52,44 @@ io as gca_io, ) +import constants as test_constants -_TEST_PROJECT = "test-project" + +_TEST_PROJECT = test_constants.ProjectConstants._TEST_PROJECT _TEST_PROJECT_2 = "test-project-2" -_TEST_LOCATION = "us-central1" +_TEST_LOCATION = test_constants.ProjectConstants._TEST_LOCATION _TEST_LOCATION_2 = "europe-west4" -_TEST_DISPLAY_NAME = "test-display-name" -_TEST_DISPLAY_NAME_2 = "test-display-name-2" -_TEST_DISPLAY_NAME_3 = "test-display-name-3" -_TEST_ID = "1028944691210842416" -_TEST_ID_2 = "4366591682456584192" -_TEST_ID_3 = "5820582938582924817" +_TEST_DISPLAY_NAME = test_constants.EndpointConstants._TEST_DISPLAY_NAME +_TEST_DISPLAY_NAME_2 = test_constants.EndpointConstants._TEST_DISPLAY_NAME_2 +_TEST_DISPLAY_NAME_3 = test_constants.EndpointConstants._TEST_DISPLAY_NAME_3 +_TEST_ID = test_constants.EndpointConstants._TEST_ID +_TEST_ID_2 = test_constants.EndpointConstants._TEST_ID_2 +_TEST_ID_3 = test_constants.EndpointConstants._TEST_ID_3 _TEST_DESCRIPTION = "test-description" _TEST_REQUEST_METADATA = () _TEST_TIMEOUT = None -_TEST_ENDPOINT_NAME = ( - f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/endpoints/{_TEST_ID}" -) +_TEST_ENDPOINT_NAME = test_constants.EndpointConstants._TEST_ENDPOINT_NAME _TEST_ENDPOINT_NAME_ALT_LOCATION = ( f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION_2}/endpoints/{_TEST_ID}" ) -_TEST_PARENT = f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}" -_TEST_MODEL_NAME = ( - f"projects/{_TEST_PROJECT}/locations/{_TEST_LOCATION}/models/{_TEST_ID}" -) +_TEST_PARENT = test_constants.ProjectConstants._TEST_PARENT +_TEST_MODEL_NAME = test_constants.EndpointConstants._TEST_MODEL_NAME -_TEST_VERSION_ID = "1" +_TEST_VERSION_ID = test_constants.EndpointConstants._TEST_VERSION_ID _TEST_NETWORK = f"projects/{_TEST_PROJECT}/global/networks/{_TEST_ID}" -_TEST_MODEL_ID = "1028944691210842416" -_TEST_PREDICTION = [[1.0, 2.0, 3.0], [3.0, 3.0, 1.0]] +_TEST_MODEL_ID = test_constants.EndpointConstants._TEST_MODEL_ID +_TEST_PREDICTION = test_constants.EndpointConstants._TEST_PREDICTION _TEST_INSTANCES = [[1.0, 2.0, 3.0], [1.0, 3.0, 4.0]] _TEST_CREDENTIALS = mock.Mock(spec=auth_credentials.AnonymousCredentials()) -_TEST_SERVICE_ACCOUNT = "vinnys@my-project.iam.gserviceaccount.com" +_TEST_SERVICE_ACCOUNT = test_constants.ProjectConstants._TEST_SERVICE_ACCOUNT -_TEST_DEPLOYED_MODELS = [ - gca_endpoint.DeployedModel(id=_TEST_ID, display_name=_TEST_DISPLAY_NAME), - gca_endpoint.DeployedModel(id=_TEST_ID_2, display_name=_TEST_DISPLAY_NAME_2), - gca_endpoint.DeployedModel(id=_TEST_ID_3, display_name=_TEST_DISPLAY_NAME_3), -] +_TEST_DEPLOYED_MODELS = test_constants.EndpointConstants._TEST_DEPLOYED_MODELS -_TEST_TRAFFIC_SPLIT = {_TEST_ID: 0, _TEST_ID_2: 100, _TEST_ID_3: 0} +_TEST_TRAFFIC_SPLIT = test_constants.EndpointConstants._TEST_TRAFFIC_SPLIT _TEST_LONG_TRAFFIC_SPLIT = { "m1": 40, diff --git a/tests/unit/aiplatform/test_initializer.py b/tests/unit/aiplatform/test_initializer.py index 1d053060d0..f8b833d7d9 100644 --- a/tests/unit/aiplatform/test_initializer.py +++ b/tests/unit/aiplatform/test_initializer.py @@ -33,10 +33,11 @@ from google.cloud.aiplatform.compat.services import ( model_service_client, ) +import constants as test_constants -_TEST_PROJECT = "test-project" +_TEST_PROJECT = test_constants.ProjectConstants._TEST_PROJECT _TEST_PROJECT_2 = "test-project-2" -_TEST_LOCATION = "us-central1" +_TEST_LOCATION = test_constants.ProjectConstants._TEST_LOCATION _TEST_LOCATION_2 = "europe-west4" _TEST_INVALID_LOCATION = "test-invalid-location" _TEST_EXPERIMENT = "test-experiment" diff --git a/tests/unit/aiplatform/test_metadata_store.py b/tests/unit/aiplatform/test_metadata_store.py index 78d1618f4a..c3585b519e 100644 --- a/tests/unit/aiplatform/test_metadata_store.py +++ b/tests/unit/aiplatform/test_metadata_store.py @@ -28,7 +28,9 @@ from google.cloud.aiplatform.metadata import metadata_store from google.cloud.aiplatform_v1 import MetadataServiceClient from google.cloud.aiplatform_v1 import MetadataStore as GapicMetadataStore -from google.cloud.aiplatform.compat.types import encryption_spec as gca_encryption_spec +from google.cloud.aiplatform.compat.types import ( + encryption_spec as gca_encryption_spec, +) from google.cloud.aiplatform.compat.types import metadata_service # project diff --git a/tests/unit/aiplatform/test_prediction.py b/tests/unit/aiplatform/test_prediction.py index 23b33bc96a..0eced3daa6 100644 --- a/tests/unit/aiplatform/test_prediction.py +++ b/tests/unit/aiplatform/test_prediction.py @@ -57,14 +57,24 @@ from google.cloud.aiplatform.prediction import LocalEndpoint from google.cloud.aiplatform.prediction import handler_utils from google.cloud.aiplatform.prediction import local_endpoint -from google.cloud.aiplatform.prediction import model_server as model_server_module +from google.cloud.aiplatform.prediction import ( + model_server as model_server_module, +) from google.cloud.aiplatform.prediction.handler import Handler from google.cloud.aiplatform.prediction.handler import PredictionHandler from google.cloud.aiplatform.prediction.model_server import CprModelServer -from google.cloud.aiplatform.prediction.local_model import _DEFAULT_HANDLER_CLASS -from google.cloud.aiplatform.prediction.local_model import _DEFAULT_HANDLER_MODULE -from google.cloud.aiplatform.prediction.local_model import _DEFAULT_PYTHON_MODULE -from google.cloud.aiplatform.prediction.local_model import _DEFAULT_SDK_REQUIREMENTS +from google.cloud.aiplatform.prediction.local_model import ( + _DEFAULT_HANDLER_CLASS, +) +from google.cloud.aiplatform.prediction.local_model import ( + _DEFAULT_HANDLER_MODULE, +) +from google.cloud.aiplatform.prediction.local_model import ( + _DEFAULT_PYTHON_MODULE, +) +from google.cloud.aiplatform.prediction.local_model import ( + _DEFAULT_SDK_REQUIREMENTS, +) from google.cloud.aiplatform.prediction.predictor import Predictor from google.cloud.aiplatform.prediction.serializer import DefaultSerializer from google.cloud.aiplatform.utils import prediction_utils diff --git a/tests/unit/aiplatform/test_uploader_main.py b/tests/unit/aiplatform/test_uploader_main.py index 417c865c27..70a7e17ea5 100644 --- a/tests/unit/aiplatform/test_uploader_main.py +++ b/tests/unit/aiplatform/test_uploader_main.py @@ -23,8 +23,12 @@ from google.cloud import aiplatform from google.cloud.aiplatform import initializer from google.cloud.aiplatform.tensorboard import uploader_main -from google.cloud.aiplatform.compat.types import job_state as gca_job_state_compat -from google.cloud.aiplatform.compat.types import custom_job as gca_custom_job_compat +from google.cloud.aiplatform.compat.types import ( + job_state as gca_job_state_compat, +) +from google.cloud.aiplatform.compat.types import ( + custom_job as gca_custom_job_compat, +) from google.cloud.aiplatform.compat.services import ( job_service_client, )