/
test_model_upload.py
84 lines (66 loc) · 2.96 KB
/
test_model_upload.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
import pytest
from google.cloud import aiplatform
from google.cloud import storage
from tests.system.aiplatform import e2e_base
_XGBOOST_MODEL_URI = "gs://cloud-samples-data-us-central1/vertex-ai/google-cloud-aiplatform-ci-artifacts/models/iris_xgboost/model.bst"
@pytest.mark.usefixtures("tear_down_resources")
class TestModelUploadAndUpdate(e2e_base.TestEndToEnd):
_temp_prefix = "temp_vertex_sdk_e2e_model_upload_test"
def test_upload_and_deploy_xgboost_model(self, shared_state):
"""Upload XGBoost model from local file and deploy it for prediction. Additionally, update model name, description and labels"""
aiplatform.init(
project=e2e_base._PROJECT,
location=e2e_base._LOCATION,
)
storage_client = storage.Client(project=e2e_base._PROJECT)
model_blob = storage.Blob.from_string(
uri=_XGBOOST_MODEL_URI, client=storage_client
)
model_path = tempfile.mktemp() + ".my_model.xgb"
model_blob.download_to_filename(filename=model_path)
model = aiplatform.Model.upload_xgboost_model_file(
model_file_path=model_path,
)
shared_state["resources"] = [model]
staging_bucket = storage.Blob.from_string(
uri=model.uri, client=storage_client
).bucket
# Checking that the bucket is auto-generated
assert "-vertex-staging-" in staging_bucket.name
# Currently we need to explicitly specify machine type.
# See https://github.com/googleapis/python-aiplatform/issues/773
endpoint = model.deploy(machine_type="n1-standard-2")
shared_state["resources"].append(endpoint)
# test model update
model = model.update(
display_name="new_name",
description="new_description",
labels={"my_label": "updated"},
)
assert model.display_name == "new_name"
assert model.description == "new_description"
assert model.labels == {"my_label": "updated"}
assert len(endpoint.list_models()) == 1
endpoint.deploy(model, traffic_percentage=100)
assert len(endpoint.list_models()) == 2
traffic_split = {
deployed_model.id: 50 for deployed_model in endpoint.list_models()
}
endpoint.update(traffic_split=traffic_split)
assert endpoint.traffic_split == traffic_split