Skip to content

Commit

Permalink
feat(v1beta1): add service_account to BatchPredictionJob in `batc…
Browse files Browse the repository at this point in the history
…h_prediction_job.proto` (#1084)

* feat: add `service_account` to `BatchPredictionJob` in aiplatform `v1beta1` `batch_prediction_job.proto`

PiperOrigin-RevId: 434935416

Source-Link: googleapis/googleapis@dfdd5ee

Source-Link: googleapis/googleapis-gen@2774684
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjc3NDY4NGMzYjhiMjI2Mzk5MDk2ODhkMjk3ZDRmYTU5MWZkZTllYiJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] committed Mar 16, 2022
1 parent 38f3711 commit b7a5177
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 31 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -183,16 +183,19 @@ def parse_annotated_dataset_path(path: str) -> Dict[str, str]:
return m.groupdict() if m else {}

@staticmethod
def dataset_path(project: str, dataset: str,) -> str:
def dataset_path(project: str, location: str, dataset: str,) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
return m.groupdict() if m else {}

@staticmethod
Expand All @@ -212,19 +215,16 @@ def parse_dataset_path(path: str) -> Dict[str, str]:
return m.groupdict() if m else {}

@staticmethod
def dataset_path(project: str, location: str, dataset: str,) -> str:
def dataset_path(project: str, dataset: str,) -> str:
"""Returns a fully-qualified dataset string."""
return "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
return "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
)

@staticmethod
def parse_dataset_path(path: str) -> Dict[str, str]:
"""Parses a dataset path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$",
path,
)
m = re.match(r"^projects/(?P<project>.+?)/datasets/(?P<dataset>.+?)$", path)
return m.groupdict() if m else {}

@staticmethod
Expand Down
11 changes: 11 additions & 0 deletions google/cloud/aiplatform_v1beta1/types/batch_prediction_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,16 @@ class BatchPredictionJob(proto.Message):
DEDICATED_RESOURCES this config may be provided (and the job
will use these resources), if the Model doesn't support
AUTOMATIC_RESOURCES, this config must be provided.
service_account (str):
The service account that the DeployedModel's container runs
as. If not specified, a system generated one will be used,
which has minimal permissions and the custom container, if
used, may not have enough permission to access other GCP
resources.
Users deploying the Model must have the
``iam.serviceAccounts.actAs`` permission on this service
account.
manual_batch_tuning_parameters (google.cloud.aiplatform_v1beta1.types.ManualBatchTuningParameters):
Immutable. Parameters configuring the batch behavior.
Currently only applicable when
Expand Down Expand Up @@ -381,6 +391,7 @@ class OutputInfo(proto.Message):
dedicated_resources = proto.Field(
proto.MESSAGE, number=7, message=machine_resources.BatchDedicatedResources,
)
service_account = proto.Field(proto.STRING, number=29,)
manual_batch_tuning_parameters = proto.Field(
proto.MESSAGE,
number=8,
Expand Down
8 changes: 8 additions & 0 deletions tests/unit/gapic/aiplatform_v1beta1/test_job_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -4494,6 +4494,7 @@ def test_create_batch_prediction_job(request_type, transport: str = "grpc"):
name="name_value",
display_name="display_name_value",
model="model_value",
service_account="service_account_value",
generate_explanation=True,
state=job_state.JobState.JOB_STATE_QUEUED,
)
Expand All @@ -4509,6 +4510,7 @@ def test_create_batch_prediction_job(request_type, transport: str = "grpc"):
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.model == "model_value"
assert response.service_account == "service_account_value"
assert response.generate_explanation is True
assert response.state == job_state.JobState.JOB_STATE_QUEUED

Expand Down Expand Up @@ -4553,6 +4555,7 @@ async def test_create_batch_prediction_job_async(
name="name_value",
display_name="display_name_value",
model="model_value",
service_account="service_account_value",
generate_explanation=True,
state=job_state.JobState.JOB_STATE_QUEUED,
)
Expand All @@ -4569,6 +4572,7 @@ async def test_create_batch_prediction_job_async(
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.model == "model_value"
assert response.service_account == "service_account_value"
assert response.generate_explanation is True
assert response.state == job_state.JobState.JOB_STATE_QUEUED

Expand Down Expand Up @@ -4750,6 +4754,7 @@ def test_get_batch_prediction_job(request_type, transport: str = "grpc"):
name="name_value",
display_name="display_name_value",
model="model_value",
service_account="service_account_value",
generate_explanation=True,
state=job_state.JobState.JOB_STATE_QUEUED,
)
Expand All @@ -4765,6 +4770,7 @@ def test_get_batch_prediction_job(request_type, transport: str = "grpc"):
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.model == "model_value"
assert response.service_account == "service_account_value"
assert response.generate_explanation is True
assert response.state == job_state.JobState.JOB_STATE_QUEUED

Expand Down Expand Up @@ -4809,6 +4815,7 @@ async def test_get_batch_prediction_job_async(
name="name_value",
display_name="display_name_value",
model="model_value",
service_account="service_account_value",
generate_explanation=True,
state=job_state.JobState.JOB_STATE_QUEUED,
)
Expand All @@ -4825,6 +4832,7 @@ async def test_get_batch_prediction_job_async(
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.model == "model_value"
assert response.service_account == "service_account_value"
assert response.generate_explanation is True
assert response.state == job_state.JobState.JOB_STATE_QUEUED

Expand Down
40 changes: 20 additions & 20 deletions tests/unit/gapic/aiplatform_v1beta1/test_migration_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -1795,18 +1795,20 @@ def test_parse_annotated_dataset_path():

def test_dataset_path():
project = "cuttlefish"
dataset = "mussel"
expected = "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
location = "mussel"
dataset = "winkle"
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
)
actual = MigrationServiceClient.dataset_path(project, dataset)
actual = MigrationServiceClient.dataset_path(project, location, dataset)
assert expected == actual


def test_parse_dataset_path():
expected = {
"project": "winkle",
"dataset": "nautilus",
"project": "nautilus",
"location": "scallop",
"dataset": "abalone",
}
path = MigrationServiceClient.dataset_path(**expected)

Expand All @@ -1816,9 +1818,9 @@ def test_parse_dataset_path():


def test_dataset_path():
project = "scallop"
location = "abalone"
dataset = "squid"
project = "squid"
location = "clam"
dataset = "whelk"
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
)
Expand All @@ -1828,9 +1830,9 @@ def test_dataset_path():

def test_parse_dataset_path():
expected = {
"project": "clam",
"location": "whelk",
"dataset": "octopus",
"project": "octopus",
"location": "oyster",
"dataset": "nudibranch",
}
path = MigrationServiceClient.dataset_path(**expected)

Expand All @@ -1840,20 +1842,18 @@ def test_parse_dataset_path():


def test_dataset_path():
project = "oyster"
location = "nudibranch"
dataset = "cuttlefish"
expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(
project=project, location=location, dataset=dataset,
project = "cuttlefish"
dataset = "mussel"
expected = "projects/{project}/datasets/{dataset}".format(
project=project, dataset=dataset,
)
actual = MigrationServiceClient.dataset_path(project, location, dataset)
actual = MigrationServiceClient.dataset_path(project, dataset)
assert expected == actual


def test_parse_dataset_path():
expected = {
"project": "mussel",
"location": "winkle",
"project": "winkle",
"dataset": "nautilus",
}
path = MigrationServiceClient.dataset_path(**expected)
Expand Down

0 comments on commit b7a5177

Please sign in to comment.