Skip to content

Commit

Permalink
chore: cleanup on dataset system tests (#1214)
Browse files Browse the repository at this point in the history
  • Loading branch information
sararob committed May 10, 2022
1 parent 50bdb01 commit 243c6f6
Showing 1 changed file with 15 additions and 11 deletions.
26 changes: 15 additions & 11 deletions tests/system/aiplatform/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def setup_method(self):

@pytest.fixture()
def storage_client(self):
yield storage.Client(project=e2e_base._PROJECT)
yield storage.Client(project=_TEST_PROJECT)

@pytest.fixture()
def staging_bucket(self, storage_client):
Expand Down Expand Up @@ -174,7 +174,7 @@ def test_get_new_dataset_and_import(self, dataset_gapic_client):

try:
text_dataset = aiplatform.TextDataset.create(
display_name=f"temp_sdk_integration_test_create_text_dataset_{uuid.uuid4()}",
display_name=self._make_display_name(key="get_new_dataset_and_import"),
)

my_dataset = aiplatform.TextDataset(dataset_name=text_dataset.name)
Expand All @@ -189,7 +189,6 @@ def test_get_new_dataset_and_import(self, dataset_gapic_client):
my_dataset.import_data(
gcs_source=_TEST_TEXT_ENTITY_EXTRACTION_GCS_SOURCE,
import_schema_uri=_TEST_TEXT_ENTITY_IMPORT_SCHEMA,
import_request_timeout=600.0,
)

data_items_post_import = dataset_gapic_client.list_data_items(
Expand All @@ -198,8 +197,7 @@ def test_get_new_dataset_and_import(self, dataset_gapic_client):

assert len(list(data_items_post_import)) == 469
finally:
if text_dataset is not None:
text_dataset.delete()
text_dataset.delete()

@vpcsc_config.skip_if_inside_vpcsc
def test_create_and_import_image_dataset(self, dataset_gapic_client):
Expand All @@ -208,7 +206,9 @@ def test_create_and_import_image_dataset(self, dataset_gapic_client):

try:
img_dataset = aiplatform.ImageDataset.create(
display_name=f"temp_sdk_integration_create_and_import_dataset_{uuid.uuid4()}",
display_name=self._make_display_name(
key="create_and_import_image_dataset"
),
gcs_source=_TEST_IMAGE_OBJECT_DETECTION_GCS_SOURCE,
import_schema_uri=_TEST_IMAGE_OBJ_DET_IMPORT_SCHEMA,
create_request_timeout=None,
Expand All @@ -230,7 +230,7 @@ def test_create_tabular_dataset(self):

try:
tabular_dataset = aiplatform.TabularDataset.create(
display_name=f"temp_sdk_integration_create_and_import_dataset_{uuid.uuid4()}",
display_name=self._make_display_name(key="create_tabular_dataset"),
gcs_source=[_TEST_TABULAR_CLASSIFICATION_GCS_SOURCE],
create_request_timeout=None,
)
Expand All @@ -250,13 +250,15 @@ def test_create_tabular_dataset(self):
tabular_dataset.delete()

def test_create_tabular_dataset_from_dataframe(self, bigquery_dataset):
bq_staging_table = f"bq://{e2e_base._PROJECT}.{bigquery_dataset.dataset_id}.test_table{uuid.uuid4()}"
bq_staging_table = f"bq://{_TEST_PROJECT}.{bigquery_dataset.dataset_id}.test_table{uuid.uuid4()}"

try:
tabular_dataset = aiplatform.TabularDataset.create_from_dataframe(
df_source=_TEST_DATAFRAME,
staging_path=bq_staging_table,
display_name=f"temp_sdk_integration_create_and_import_dataset_from_dataframe{uuid.uuid4()}",
display_name=self._make_display_name(
key="create_and_import_dataset_from_dataframe"
),
)

"""Use the Dataset.create_from_dataframe() method to create a new tabular dataset.
Expand All @@ -281,12 +283,14 @@ def test_create_tabular_dataset_from_dataframe_with_provided_schema(
created and references the BQ source."""

try:
bq_staging_table = f"bq://{e2e_base._PROJECT}.{bigquery_dataset.dataset_id}.test_table{uuid.uuid4()}"
bq_staging_table = f"bq://{_TEST_PROJECT}.{bigquery_dataset.dataset_id}.test_table{uuid.uuid4()}"

tabular_dataset = aiplatform.TabularDataset.create_from_dataframe(
df_source=_TEST_DATAFRAME,
staging_path=bq_staging_table,
display_name=f"temp_sdk_integration_create_and_import_dataset_from_dataframe{uuid.uuid4()}",
display_name=self._make_display_name(
key="create_and_import_dataset_from_dataframe"
),
bq_schema=_TEST_DATAFRAME_BQ_SCHEMA,
)

Expand Down

0 comments on commit 243c6f6

Please sign in to comment.