Skip to content

Commit

Permalink
chore: temporarily disable tabular batch prediction job test
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 523231672
  • Loading branch information
sararob authored and copybara-github committed Apr 10, 2023
1 parent 94a63b8 commit e55a177
Showing 1 changed file with 17 additions and 16 deletions.
33 changes: 17 additions & 16 deletions tests/system/aiplatform/test_e2e_tabular.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

from google.cloud import aiplatform
from google.cloud.aiplatform.compat.types import (
job_state as gca_job_state,
# job_state as gca_job_state,
pipeline_state as gca_pipeline_state,
)
from tests.system.aiplatform import e2e_base
Expand Down Expand Up @@ -135,22 +135,23 @@ def test_end_to_end_tabular(self, shared_state):
automl_endpoint = automl_model.deploy(machine_type="n1-standard-4", sync=False)
shared_state["resources"].extend([automl_endpoint, custom_endpoint])

custom_batch_prediction_job = custom_model.batch_predict(
job_display_name=self._make_display_name("automl-housing-model"),
instances_format="csv",
machine_type="n1-standard-4",
gcs_source=dataset_gcs_source,
gcs_destination_prefix=f'gs://{shared_state["staging_bucket_name"]}/bp_results/',
sync=False,
)
# TODO(b/275569167) Uncomment this after timeout issue is resolved
# custom_batch_prediction_job = custom_model.batch_predict(
# job_display_name=self._make_display_name("automl-housing-model"),
# instances_format="csv",
# machine_type="n1-standard-4",
# gcs_source=dataset_gcs_source,
# gcs_destination_prefix=f'gs://{shared_state["staging_bucket_name"]}/bp_results/',
# sync=False,
# )

shared_state["resources"].append(custom_batch_prediction_job)
# shared_state["resources"].append(custom_batch_prediction_job)

in_progress_done_check = custom_job.done()
custom_job.wait_for_resource_creation()

automl_job.wait_for_resource_creation()
custom_batch_prediction_job.wait_for_resource_creation()
# custom_batch_prediction_job.wait_for_resource_creation()

# Send online prediction with same instance to both deployed models
# This sample is taken from an observation where median_house_value = 94600
Expand All @@ -170,7 +171,7 @@ def test_end_to_end_tabular(self, shared_state):

custom_prediction = custom_endpoint.predict([_INSTANCE], timeout=180.0)

custom_batch_prediction_job.wait()
# custom_batch_prediction_job.wait()

automl_endpoint.wait()
automl_prediction = automl_endpoint.predict(
Expand All @@ -193,10 +194,10 @@ def test_end_to_end_tabular(self, shared_state):
automl_job.state
== gca_pipeline_state.PipelineState.PIPELINE_STATE_SUCCEEDED
)
assert (
custom_batch_prediction_job.state
== gca_job_state.JobState.JOB_STATE_SUCCEEDED
)
# assert (
# custom_batch_prediction_job.state
# == gca_job_state.JobState.JOB_STATE_SUCCEEDED
# )

# Ensure a single prediction was returned
assert len(custom_prediction.predictions) == 1
Expand Down

0 comments on commit e55a177

Please sign in to comment.