Skip to content

Commit

Permalink
feat!: move to microgen (#61)
Browse files Browse the repository at this point in the history
See UPGRADING.md
  • Loading branch information
busunkim96 authored and dandhlee committed Nov 17, 2022
1 parent 0f97486 commit 6aa69df
Show file tree
Hide file tree
Showing 87 changed files with 331 additions and 336 deletions.
18 changes: 12 additions & 6 deletions automl/beta/batch_predict.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,21 +27,27 @@ def batch_predict(
prediction_client = automl.PredictionServiceClient()

# Get the full path of the model.
model_full_id = prediction_client.model_path(
model_full_id = automl.AutoMlClient.model_path(
project_id, "us-central1", model_id
)

gcs_source = automl.types.GcsSource(input_uris=[input_uri])
gcs_source = automl.GcsSource(input_uris=[input_uri])

input_config = automl.types.BatchPredictInputConfig(gcs_source=gcs_source)
gcs_destination = automl.types.GcsDestination(output_uri_prefix=output_uri)
output_config = automl.types.BatchPredictOutputConfig(
input_config = automl.BatchPredictInputConfig(gcs_source=gcs_source)
gcs_destination = automl.GcsDestination(output_uri_prefix=output_uri)
output_config = automl.BatchPredictOutputConfig(
gcs_destination=gcs_destination
)
params = {}

request = automl.BatchPredictRequest(
name=model_full_id,
input_config=input_config,
output_config=output_config,
params=params
)
response = prediction_client.batch_predict(
model_full_id, input_config, output_config, params=params
request=request
)

print("Waiting for operation to complete...")
Expand Down
2 changes: 1 addition & 1 deletion automl/beta/cancel_operation.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def sample_cancel_operation(project, operation_id):

client = automl_v1beta1.AutoMlClient()

operations_client = client.transport._operations_client
operations_client = client._transport.operations_client

# project = '[Google Cloud Project ID]'
# operation_id = '[Operation ID]'
Expand Down
2 changes: 1 addition & 1 deletion automl/beta/delete_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def delete_dataset(project_id="YOUR_PROJECT_ID", dataset_id="YOUR_DATASET_ID"):
dataset_full_id = client.dataset_path(
project_id, "us-central1", dataset_id
)
response = client.delete_dataset(dataset_full_id)
response = client.delete_dataset(name=dataset_full_id)

print("Dataset deleted. {}".format(response.result()))
# [END automl_delete_dataset_beta]
8 changes: 4 additions & 4 deletions automl/beta/delete_dataset_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,13 @@
@pytest.fixture(scope="function")
def dataset_id():
client = automl.AutoMlClient()
project_location = client.location_path(PROJECT_ID, "us-central1")
project_location = f"projects/{PROJECT_ID}/locations/us-central1"
display_name = "test_{}".format(uuid.uuid4()).replace("-", "")[:32]
metadata = automl.types.TextExtractionDatasetMetadata()
dataset = automl.types.Dataset(
metadata = automl.TextExtractionDatasetMetadata()
dataset = automl.Dataset(
display_name=display_name, text_extraction_dataset_metadata=metadata
)
response = client.create_dataset(project_location, dataset)
response = client.create_dataset(parent=project_location, dataset=dataset)
dataset_id = response.name.split("/")[-1]

yield dataset_id
Expand Down
2 changes: 1 addition & 1 deletion automl/beta/delete_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def delete_model(project_id, model_id):
client = automl.AutoMlClient()
# Get the full path of the model.
model_full_id = client.model_path(project_id, "us-central1", model_id)
response = client.delete_model(model_full_id)
response = client.delete_model(name=model_full_id)

print("Model deleted. {}".format(response.result()))
# [END automl_delete_model_beta]
8 changes: 3 additions & 5 deletions automl/beta/get_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ def get_model(project_id, model_id):
client = automl.AutoMlClient()
# Get the full path of the model.
model_full_id = client.model_path(project_id, "us-central1", model_id)
model = client.get_model(model_full_id)
model = client.get_model(name=model_full_id)

# Retrieve deployment state.
if model.deployment_state == automl.enums.Model.DeploymentState.DEPLOYED:
if model.deployment_state == automl.Model.DeploymentState.DEPLOYED:
deployment_state = "deployed"
else:
deployment_state = "undeployed"
Expand All @@ -37,8 +37,6 @@ def get_model(project_id, model_id):
print("Model name: {}".format(model.name))
print("Model id: {}".format(model.name.split("/")[-1]))
print("Model display name: {}".format(model.display_name))
print("Model create time:")
print("\tseconds: {}".format(model.create_time.seconds))
print("\tnanos: {}".format(model.create_time.nanos))
print("Model create time: {}".format(model.create_time))
print("Model deployment state: {}".format(deployment_state))
# [END automl_get_model_beta]
11 changes: 4 additions & 7 deletions automl/beta/get_model_evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,18 +26,15 @@ def get_model_evaluation(project_id, model_id, model_evaluation_id):

client = automl.AutoMlClient()
# Get the full path of the model evaluation.
model_evaluation_full_id = client.model_evaluation_path(
project_id, "us-central1", model_id, model_evaluation_id
)
model_path = client.model_path(project_id, "us-central1", model_id)
model_evaluation_full_id = f"{model_path}/modelEvaluations/{model_evaluation_id}"

# Get complete detail of the model evaluation.
response = client.get_model_evaluation(model_evaluation_full_id)
response = client.get_model_evaluation(name=model_evaluation_full_id)

print("Model evaluation name: {}".format(response.name))
print("Model annotation spec id: {}".format(response.annotation_spec_id))
print("Create Time:")
print("\tseconds: {}".format(response.create_time.seconds))
print("\tnanos: {}".format(response.create_time.nanos / 1e9))
print("Create Time: {}".format(response.create_time))
print(
"Evaluation example count: {}".format(response.evaluated_example_count)
)
Expand Down
9 changes: 6 additions & 3 deletions automl/beta/get_model_evaluation_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,12 @@
def model_evaluation_id():
client = automl.AutoMlClient()
model_full_id = client.model_path(PROJECT_ID, "us-central1", MODEL_ID)
generator = client.list_model_evaluations(model_full_id, "").pages
page = next(generator)
evaluation = page.next()
request = automl.ListModelEvaluationsRequest(
parent=model_full_id,
filter=""
)
evaluations = client.list_model_evaluations(request=request)
evaluation = next(iter(evaluations))
model_evaluation_id = evaluation.name.split(
"{}/modelEvaluations/".format(MODEL_ID)
)[1].split("\n")[0]
Expand Down
2 changes: 1 addition & 1 deletion automl/beta/get_operation_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def get_operation_status(
client = automl.AutoMlClient()

# Get the latest state of a long-running operation.
response = client.transport._operations_client.get_operation(
response = client._transport.operations_client.get_operation(
operation_full_id
)

Expand Down
4 changes: 2 additions & 2 deletions automl/beta/get_operation_status_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
@pytest.fixture(scope="function")
def operation_id():
client = automl.AutoMlClient()
project_location = client.location_path(PROJECT_ID, "us-central1")
generator = client.transport._operations_client.list_operations(
project_location = f"projects/{PROJECT_ID}/locations/us-central1"
generator = client._transport.operations_client.list_operations(
project_location, filter_=""
).pages
page = next(generator)
Expand Down
6 changes: 3 additions & 3 deletions automl/beta/import_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@ def import_dataset(
)
# Get the multiple Google Cloud Storage URIs
input_uris = path.split(",")
gcs_source = automl.types.GcsSource(input_uris=input_uris)
input_config = automl.types.InputConfig(gcs_source=gcs_source)
gcs_source = automl.GcsSource(input_uris=input_uris)
input_config = automl.InputConfig(gcs_source=gcs_source)
# Import data from the input URI
response = client.import_data(dataset_full_id, input_config)
response = client.import_data(name=dataset_full_id, input_config=input_config)

print("Processing import...")
print("Data imported. {}".format(response.result()))
Expand Down
9 changes: 4 additions & 5 deletions automl/beta/list_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,19 +22,18 @@ def list_datasets(project_id="YOUR_PROJECT_ID"):
"""List datasets."""
client = automl.AutoMlClient()
# A resource that represents Google Cloud Platform location.
project_location = client.location_path(project_id, "us-central1")
project_location = f"projects/{project_id}/locations/us-central1"

# List all the datasets available in the region.
response = client.list_datasets(project_location, "")
request = automl.ListDatasetsRequest(parent=project_location, filter="")
response = client.list_datasets(request=request)

print("List of datasets:")
for dataset in response:
print("Dataset name: {}".format(dataset.name))
print("Dataset id: {}".format(dataset.name.split("/")[-1]))
print("Dataset display name: {}".format(dataset.display_name))
print("Dataset create time:")
print("\tseconds: {}".format(dataset.create_time.seconds))
print("\tnanos: {}".format(dataset.create_time.nanos))
print("Dataset create time: {}".format(dataset.create_time))
# [END automl_video_object_tracking_list_datasets_beta]

print(
Expand Down
11 changes: 5 additions & 6 deletions automl/beta/list_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,16 @@ def list_models(project_id):

client = automl.AutoMlClient()
# A resource that represents Google Cloud Platform location.
project_location = client.location_path(project_id, "us-central1")
response = client.list_models(project_location, "")
project_location = f"projects/{project_id}/locations/us-central1"
request = automl.ListModelsRequest(parent=project_location, filter="")
response = client.list_models(request=request)

print("List of models:")
for model in response:
# Display the model information.
if (
model.deployment_state
== automl.enums.Model.DeploymentState.DEPLOYED
== automl.Model.DeploymentState.DEPLOYED
):
deployment_state = "deployed"
else:
Expand All @@ -40,8 +41,6 @@ def list_models(project_id):
print("Model name: {}".format(model.name))
print("Model id: {}".format(model.name.split("/")[-1]))
print("Model display name: {}".format(model.display_name))
print("Model create time:")
print("\tseconds: {}".format(model.create_time.seconds))
print("\tnanos: {}".format(model.create_time.nanos))
print("Model create time: {}".format(model.create_time))
print("Model deployment state: {}".format(deployment_state))
# [END automl_list_models_beta]
9 changes: 7 additions & 2 deletions automl/beta/set_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,18 @@ def set_endpoint(project_id):

# A resource that represents Google Cloud Platform location.
# project_id = 'YOUR_PROJECT_ID'
project_location = client.location_path(project_id, 'eu')
project_location = f"projects/{project_id}/locations/eu"
# [END automl_set_endpoint]

# List all the datasets available
# Note: Create a dataset in `eu`, before calling `list_datasets`.
request = automl.ListDatasetsRequest(
parent=project_location,
filter=""
)
response = client.list_datasets(
project_location, filter_='')
request=request
)

for dataset in response:
print(dataset)
8 changes: 4 additions & 4 deletions automl/beta/video_classification_create_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,15 @@ def create_dataset(
client = automl.AutoMlClient()

# A resource that represents Google Cloud Platform location.
project_location = client.location_path(project_id, "us-central1")
metadata = automl.types.VideoClassificationDatasetMetadata()
dataset = automl.types.Dataset(
project_location = f"projects/{project_id}/locations/us-central1"
metadata = automl.VideoClassificationDatasetMetadata()
dataset = automl.Dataset(
display_name=display_name,
video_classification_dataset_metadata=metadata,
)

# Create a dataset with the dataset metadata in the region.
created_dataset = client.create_dataset(project_location, dataset)
created_dataset = client.create_dataset(parent=project_location, dataset=dataset)

# Display the dataset information
print("Dataset name: {}".format(created_dataset.name))
Expand Down
2 changes: 1 addition & 1 deletion automl/beta/video_classification_create_dataset_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def teardown():
dataset_full_id = client.dataset_path(
PROJECT_ID, "us-central1", DATASET_ID
)
response = client.delete_dataset(dataset_full_id)
response = client.delete_dataset(name=dataset_full_id)
response.result()


Expand Down
9 changes: 5 additions & 4 deletions automl/beta/video_classification_create_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,18 +25,19 @@ def create_model(
client = automl.AutoMlClient()

# A resource that represents Google Cloud Platform location.
project_location = client.location_path(project_id, "us-central1")
project_location = f"projects/{project_id}/locations/us-central1"
# Leave model unset to use the default base model provided by Google
metadata = automl.types.VideoClassificationModelMetadata()
model = automl.types.Model(
metadata = automl.VideoClassificationModelMetadata()
model = automl.Model(
display_name=display_name,
dataset_id=dataset_id,
video_classification_model_metadata=metadata,
)

# Create a model with the model metadata in the region.
response = client.create_model(project_location, model)
response = client.create_model(parent=project_location, model=model)

print("Training operation name: {}".format(response.operation.name))
print("Training started...")
# [END automl_video_classification_create_model_beta]
return response
3 changes: 2 additions & 1 deletion automl/beta/video_classification_create_model_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,15 @@ def teardown():

# Cancel the training operation
client = automl.AutoMlClient()
client.transport._operations_client.cancel_operation(OPERATION_ID)
client._transport.operations_client.cancel_operation(OPERATION_ID)


def test_video_classification_create_model(capsys):
model_name = "test_{}".format(uuid.uuid4()).replace("-", "")[:32]
video_classification_create_model.create_model(
PROJECT_ID, DATASET_ID, model_name
)

out, _ = capsys.readouterr()
assert "Training started" in out

Expand Down
8 changes: 4 additions & 4 deletions automl/beta/video_object_tracking_create_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@ def create_dataset(
client = automl.AutoMlClient()

# A resource that represents Google Cloud Platform location.
project_location = client.location_path(project_id, "us-central1")
metadata = automl.types.VideoObjectTrackingDatasetMetadata()
dataset = automl.types.Dataset(
project_location = f"projects/{project_id}/locations/us-central1"
metadata = automl.VideoObjectTrackingDatasetMetadata()
dataset = automl.Dataset(
display_name=display_name,
video_object_tracking_dataset_metadata=metadata,
)

# Create a dataset with the dataset metadata in the region.
created_dataset = client.create_dataset(project_location, dataset)
created_dataset = client.create_dataset(parent=project_location, dataset=dataset)
# Display the dataset information
print("Dataset name: {}".format(created_dataset.name))
print("Dataset id: {}".format(created_dataset.name.split("/")[-1]))
Expand Down
2 changes: 1 addition & 1 deletion automl/beta/video_object_tracking_create_dataset_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def teardown():
dataset_full_id = client.dataset_path(
PROJECT_ID, "us-central1", DATASET_ID
)
response = client.delete_dataset(dataset_full_id)
response = client.delete_dataset(name=dataset_full_id)
response.result()


Expand Down
8 changes: 4 additions & 4 deletions automl/beta/video_object_tracking_create_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,17 +25,17 @@ def create_model(
client = automl.AutoMlClient()

# A resource that represents Google Cloud Platform loacation.
project_location = client.location_path(project_id, "us-central1")
project_location = f"projects/{project_id}/locations/us-central1"
# Leave model unset to use the default base model provided by Google
metadata = automl.types.VideoObjectTrackingModelMetadata()
model = automl.types.Model(
metadata = automl.VideoObjectTrackingModelMetadata()
model = automl.Model(
display_name=display_name,
dataset_id=dataset_id,
video_object_tracking_model_metadata=metadata,
)

# Create a model with the model metadata in the region.
response = client.create_model(project_location, model)
response = client.create_model(parent=project_location, model=model)

print("Training operation name: {}".format(response.operation.name))
print("Training started...")
Expand Down
2 changes: 1 addition & 1 deletion automl/beta/video_object_tracking_create_model_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def teardown():

# Cancel the training operation
client = automl.AutoMlClient()
client.transport._operations_client.cancel_operation(OPERATION_ID)
client._transport.operations_client.cancel_operation(OPERATION_ID)


def test_video_classification_create_model(capsys):
Expand Down
Loading

0 comments on commit 6aa69df

Please sign in to comment.