Skip to content

Commit

Permalink
feat: add TPU_V4_POD to AcceleratorType in aiplatform v1beta1 acceler…
Browse files Browse the repository at this point in the history
…ator_type.proto

feat: add split to ExportDataConfig in aiplatform v1beta1 dataset.proto
feat: add evaluated_annotation.proto to aiplatform v1beta1
feat: add cpu_utilization_target to Featurestore.OnlineServingConfig.Scaling in aiplatform v1beta1 featurestore.proto
feat: add large_model_reference to Model in aiplatform v1beta1 model.proto
feat: add slice_spec to ModelEvaluationSlice in aiplatform v1beta1 model_evaluation_slice.proto
feat: add BatchImportEvaluatedAnnotations rpc to aiplatform v1beta1 model_service.proto
docs: deprecated enable_restricted_image_training in NasJob in aiplatform v1beta1 nas_job.proto

PiperOrigin-RevId: 513669538
  • Loading branch information
Google APIs authored and Copybara-Service committed Mar 3, 2023
1 parent 9716b12 commit 01293cf
Show file tree
Hide file tree
Showing 26 changed files with 495 additions and 32 deletions.
1 change: 1 addition & 0 deletions google/cloud/aiplatform/v1beta1/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ proto_library(
"endpoint_service.proto",
"entity_type.proto",
"env_var.proto",
"evaluated_annotation.proto",
"event.proto",
"execution.proto",
"explanation.proto",
Expand Down
4 changes: 4 additions & 0 deletions google/cloud/aiplatform/v1beta1/accelerator_type.proto
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ option php_namespace = "Google\\Cloud\\AIPlatform\\V1beta1";
option ruby_package = "Google::Cloud::AIPlatform::V1beta1";

// Represents a hardware accelerator type.
// NEXT ID: 11.
enum AcceleratorType {
// Unspecified accelerator type, which means no accelerator.
ACCELERATOR_TYPE_UNSPECIFIED = 0;
Expand Down Expand Up @@ -55,4 +56,7 @@ enum AcceleratorType {

// TPU v3.
TPU_V3 = 7;

// TPU v4.
TPU_V4_POD = 10;
}
5 changes: 5 additions & 0 deletions google/cloud/aiplatform/v1beta1/aiplatform_v1beta1.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,7 @@ http:
- post: '/ui/{name=projects/*/locations/*/studies/*/trials/*/operations/*}:cancel'
- post: '/ui/{name=projects/*/locations/*/trainingPipelines/*/operations/*}:cancel'
- post: '/ui/{name=projects/*/locations/*/pipelineJobs/*/operations/*}:cancel'
- post: '/ui/{name=projects/*/locations/*/schedules/*/operations/*}:cancel'
- post: '/ui/{name=projects/*/locations/*/specialistPools/*/operations/*}:cancel'
- post: '/ui/{name=projects/*/locations/*/tensorboards/*/operations/*}:cancel'
- post: '/ui/{name=projects/*/locations/*/tensorboards/*/experiments/*/operations/*}:cancel'
Expand Down Expand Up @@ -302,6 +303,7 @@ http:
- delete: '/ui/{name=projects/*/locations/*/studies/*/trials/*/operations/*}'
- delete: '/ui/{name=projects/*/locations/*/trainingPipelines/*/operations/*}'
- delete: '/ui/{name=projects/*/locations/*/pipelineJobs/*/operations/*}'
- delete: '/ui/{name=projects/*/locations/*/schedules/*/operations/*}'
- delete: '/ui/{name=projects/*/locations/*/specialistPools/*/operations/*}'
- delete: '/ui/{name=projects/*/locations/*/tensorboards/*/operations/*}'
- delete: '/ui/{name=projects/*/locations/*/tensorboards/*/experiments/*/operations/*}'
Expand Down Expand Up @@ -367,6 +369,7 @@ http:
- get: '/ui/{name=projects/*/locations/*/studies/*/trials/*/operations/*}'
- get: '/ui/{name=projects/*/locations/*/trainingPipelines/*/operations/*}'
- get: '/ui/{name=projects/*/locations/*/pipelineJobs/*/operations/*}'
- get: '/ui/{name=projects/*/locations/*/schedules/*/operations/*}'
- get: '/ui/{name=projects/*/locations/*/specialistPools/*/operations/*}'
- get: '/ui/{name=projects/*/locations/*/tensorboards/*/operations/*}'
- get: '/ui/{name=projects/*/locations/*/tensorboards/*/experiments/*/operations/*}'
Expand Down Expand Up @@ -431,6 +434,7 @@ http:
- get: '/ui/{name=projects/*/locations/*/studies/*/trials/*}/operations'
- get: '/ui/{name=projects/*/locations/*/trainingPipelines/*}/operations'
- get: '/ui/{name=projects/*/locations/*/pipelineJobs/*}/operations'
- get: '/ui/{name=projects/*/locations/*/schedules/*}/operations'
- get: '/ui/{name=projects/*/locations/*/specialistPools/*}/operations'
- get: '/ui/{name=projects/*/locations/*/tensorboards/*}/operations'
- get: '/ui/{name=projects/*/locations/*/tensorboards/*/experiments/*}/operations'
Expand Down Expand Up @@ -495,6 +499,7 @@ http:
- post: '/ui/{name=projects/*/locations/*/studies/*/trials/*/operations/*}:wait'
- post: '/ui/{name=projects/*/locations/*/trainingPipelines/*/operations/*}:wait'
- post: '/ui/{name=projects/*/locations/*/pipelineJobs/*/operations/*}:wait'
- post: '/ui/{name=projects/*/locations/*/schedules/*/operations/*}:wait'
- post: '/ui/{name=projects/*/locations/*/specialistPools/*/operations/*}:wait'
- post: '/ui/{name=projects/*/locations/*/tensorboards/*/operations/*}:wait'
- post: '/ui/{name=projects/*/locations/*/tensorboards/*/experiments/*/operations/*}:wait'
Expand Down
6 changes: 5 additions & 1 deletion google/cloud/aiplatform/v1beta1/batch_prediction_job.proto
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,11 @@ message BatchPredictionJob {
// Exactly one of model and unmanaged_container_model must be set.
//
// The model resource name may contain version id or version alias to specify
// the version, if no version is specified, the default version will be used.
// the version.
// Example: `projects/{project}/locations/{location}/models/{model}@2`
// or
// `projects/{project}/locations/{location}/models/{model}@golden`
// if no version is specified, the default version will be deployed.
string model = 3 [(google.api.resource_reference) = {
type: "aiplatform.googleapis.com/Model"
}];
Expand Down
24 changes: 24 additions & 0 deletions google/cloud/aiplatform/v1beta1/dataset.proto
Original file line number Diff line number Diff line change
Expand Up @@ -170,9 +170,33 @@ message ExportDataConfig {
GcsDestination gcs_destination = 1;
}

// The instructions how the export data should be split between the
// training, validation and test sets.
oneof split {
// Split based on fractions defining the size of each set.
ExportFractionSplit fraction_split = 5;
}

// A filter on Annotations of the Dataset. Only Annotations on to-be-exported
// DataItems(specified by [data_items_filter][]) that match this filter will
// be exported. The filter syntax is the same as in
// [ListAnnotations][google.cloud.aiplatform.v1beta1.DatasetService.ListAnnotations].
string annotations_filter = 2;
}

// Assigns the input data to training, validation, and test sets as per the
// given fractions. Any of `training_fraction`, `validation_fraction` and
// `test_fraction` may optionally be provided, they must sum to up to 1. If the
// provided ones sum to less than 1, the remainder is assigned to sets as
// decided by Vertex AI. If none of the fractions are set, by default roughly
// 80% of data is used for training, 10% for validation, and 10% for test.
message ExportFractionSplit {
// The fraction of the input data that is to be used to train the Model.
double training_fraction = 1;

// The fraction of the input data that is to be used to validate the Model.
double validation_fraction = 2;

// The fraction of the input data that is to be used to evaluate the Model.
double test_fraction = 3;
}
1 change: 1 addition & 0 deletions google/cloud/aiplatform/v1beta1/dataset_service.proto
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import "google/cloud/aiplatform/v1beta1/dataset.proto";
import "google/cloud/aiplatform/v1beta1/operation.proto";
import "google/cloud/aiplatform/v1beta1/saved_query.proto";
import "google/longrunning/operations.proto";
import "google/protobuf/empty.proto";
import "google/protobuf/field_mask.proto";

option csharp_namespace = "Google.Cloud.AIPlatform.V1Beta1";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import "google/cloud/aiplatform/v1beta1/deployment_resource_pool.proto";
import "google/cloud/aiplatform/v1beta1/endpoint.proto";
import "google/cloud/aiplatform/v1beta1/operation.proto";
import "google/longrunning/operations.proto";
import "google/protobuf/empty.proto";

option csharp_namespace = "Google.Cloud.AIPlatform.V1Beta1";
option go_package = "cloud.google.com/go/aiplatform/apiv1beta1/aiplatformpb;aiplatformpb";
Expand Down
6 changes: 5 additions & 1 deletion google/cloud/aiplatform/v1beta1/endpoint.proto
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,11 @@ message DeployedModel {
// Endpoint.
//
// The resource name may contain version id or version alias to specify the
// version, if no version is specified, the default version will be deployed.
// version.
// Example: `projects/{project}/locations/{location}/models/{model}@2`
// or
// `projects/{project}/locations/{location}/models/{model}@golden`
// if no version is specified, the default version will be deployed.
string model = 2 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
Expand Down
1 change: 1 addition & 0 deletions google/cloud/aiplatform/v1beta1/endpoint_service.proto
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import "google/api/resource.proto";
import "google/cloud/aiplatform/v1beta1/endpoint.proto";
import "google/cloud/aiplatform/v1beta1/operation.proto";
import "google/longrunning/operations.proto";
import "google/protobuf/empty.proto";
import "google/protobuf/field_mask.proto";

option csharp_namespace = "Google.Cloud.AIPlatform.V1Beta1";
Expand Down
185 changes: 185 additions & 0 deletions google/cloud/aiplatform/v1beta1/evaluated_annotation.proto
Original file line number Diff line number Diff line change
@@ -0,0 +1,185 @@
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

syntax = "proto3";

package google.cloud.aiplatform.v1beta1;

import "google/api/field_behavior.proto";
import "google/cloud/aiplatform/v1beta1/explanation.proto";
import "google/protobuf/struct.proto";

option csharp_namespace = "Google.Cloud.AIPlatform.V1Beta1";
option go_package = "cloud.google.com/go/aiplatform/apiv1beta1/aiplatformpb;aiplatformpb";
option java_multiple_files = true;
option java_outer_classname = "EvaluatedAnnotationProto";
option java_package = "com.google.cloud.aiplatform.v1beta1";
option php_namespace = "Google\\Cloud\\AIPlatform\\V1beta1";
option ruby_package = "Google::Cloud::AIPlatform::V1beta1";

// True positive, false positive, or false negative.
//
// EvaluatedAnnotation is only available under ModelEvaluationSlice with slice
// of `annotationSpec` dimension.
message EvaluatedAnnotation {
// Describes the type of the EvaluatedAnnotation. The type is determined
enum EvaluatedAnnotationType {
// Invalid value.
EVALUATED_ANNOTATION_TYPE_UNSPECIFIED = 0;

// The EvaluatedAnnotation is a true positive. It has a prediction created
// by the Model and a ground truth Annotation which the prediction matches.
TRUE_POSITIVE = 1;

// The EvaluatedAnnotation is false positive. It has a prediction created by
// the Model which does not match any ground truth annotation.
FALSE_POSITIVE = 2;

// The EvaluatedAnnotation is false negative. It has a ground truth
// annotation which is not matched by any of the model created predictions.
FALSE_NEGATIVE = 3;
}

// Output only. Type of the EvaluatedAnnotation.
EvaluatedAnnotationType type = 1 [(google.api.field_behavior) = OUTPUT_ONLY];

// Output only. The model predicted annotations.
//
// For true positive, there is one and only one prediction, which matches the
// only one ground truth annotation in
// [ground_truths][google.cloud.aiplatform.v1beta1.EvaluatedAnnotation.ground_truths].
//
// For false positive, there is one and only one prediction, which doesn't
// match any ground truth annotation of the corresponding
// [data_item_view_id][EvaluatedAnnotation.data_item_view_id].
//
// For false negative, there are zero or more predictions which are similar to
// the only ground truth annotation in
// [ground_truths][google.cloud.aiplatform.v1beta1.EvaluatedAnnotation.ground_truths]
// but not enough for a match.
//
// The schema of the prediction is stored in
// [ModelEvaluation.annotation_schema_uri][]
repeated google.protobuf.Value predictions = 2
[(google.api.field_behavior) = OUTPUT_ONLY];

// Output only. The ground truth Annotations, i.e. the Annotations that exist
// in the test data the Model is evaluated on.
//
// For true positive, there is one and only one ground truth annotation, which
// matches the only prediction in
// [predictions][google.cloud.aiplatform.v1beta1.EvaluatedAnnotation.predictions].
//
// For false positive, there are zero or more ground truth annotations that
// are similar to the only prediction in
// [predictions][google.cloud.aiplatform.v1beta1.EvaluatedAnnotation.predictions],
// but not enough for a match.
//
// For false negative, there is one and only one ground truth annotation,
// which doesn't match any predictions created by the model.
//
// The schema of the ground truth is stored in
// [ModelEvaluation.annotation_schema_uri][]
repeated google.protobuf.Value ground_truths = 3
[(google.api.field_behavior) = OUTPUT_ONLY];

// Output only. The data item payload that the Model predicted this
// EvaluatedAnnotation on.
google.protobuf.Value data_item_payload = 5
[(google.api.field_behavior) = OUTPUT_ONLY];

// Output only. ID of the EvaluatedDataItemView under the same ancestor
// ModelEvaluation. The EvaluatedDataItemView consists of all ground truths
// and predictions on
// [data_item_payload][google.cloud.aiplatform.v1beta1.EvaluatedAnnotation.data_item_payload].
//
// Can be passed in
// [GetEvaluatedDataItemView's][ModelService.GetEvaluatedDataItemView][]
// [id][GetEvaluatedDataItemViewRequest.id].
string evaluated_data_item_view_id = 6
[(google.api.field_behavior) = OUTPUT_ONLY];

// Explanations of
// [predictions][google.cloud.aiplatform.v1beta1.EvaluatedAnnotation.predictions].
// Each element of the explanations indicates the explanation for one
// explanation Method.
//
// The attributions list in the
// [EvaluatedAnnotationExplanation.explanation][google.cloud.aiplatform.v1beta1.EvaluatedAnnotationExplanation.explanation]
// object corresponds to the
// [predictions][google.cloud.aiplatform.v1beta1.EvaluatedAnnotation.predictions]
// list. For example, the second element in the attributions list explains the
// second element in the predictions list.
repeated EvaluatedAnnotationExplanation explanations = 8;

// Annotations of model error analysis results.
repeated ErrorAnalysisAnnotation error_analysis_annotations = 9;
}

// Explanation result of the prediction produced by the Model.
message EvaluatedAnnotationExplanation {
// Explanation type.
//
// For AutoML Image Classification models, possible values are:
//
// * `image-integrated-gradients`
// * `image-xrai`
string explanation_type = 1;

// Explanation attribution response details.
Explanation explanation = 2;
}

// Model error analysis for each annotation.
message ErrorAnalysisAnnotation {
// Attributed items for a given annotation, typically representing neighbors
// from the training sets constrained by the query type.
message AttributedItem {
// The unique ID for each annotation. Used by FE to allocate the annotation
// in DB.
string annotation_resource_name = 1;

// The distance of this item to the annotation.
double distance = 2;
}

// The query type used for finding the attributed items.
enum QueryType {
// Unspecified query type for model error analysis.
QUERY_TYPE_UNSPECIFIED = 0;

// Query similar samples across all classes in the dataset.
ALL_SIMILAR = 1;

// Query similar samples from the same class of the input sample.
SAME_CLASS_SIMILAR = 2;

// Query dissimilar samples from the same class of the input sample.
SAME_CLASS_DISSIMILAR = 3;
}

// Attributed items for a given annotation, typically representing neighbors
// from the training sets constrained by the query type.
repeated AttributedItem attributed_items = 1;

// The query type used for finding the attributed items.
QueryType query_type = 2;

// The outlier score of this annotated item. Usually defined as the min of all
// distances from attributed items.
double outlier_score = 3;

// The threshold used to determine if this annotation is an outlier or not.
double outlier_threshold = 4;
}
9 changes: 9 additions & 0 deletions google/cloud/aiplatform/v1beta1/featurestore.proto
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,15 @@ message Featurestore {
// The maximum number of nodes to scale up to. Must be greater than
// min_node_count, and less than or equal to 10 times of 'min_node_count'.
int32 max_node_count = 2;

// Optional. The cpu utilization that the Autoscaler should be trying to
// achieve. This number is on a scale from 0 (no utilization) to 100
// (total utilization), and is limited between 10 and 80. When a cluster's
// CPU utilization exceeds the target that you have set, Bigtable
// immediately adds nodes to the cluster. When CPU utilization is
// substantially lower than the target, Bigtable removes nodes. If not set
// or set to 0, default to 50.
int32 cpu_utilization_target = 3 [(google.api.field_behavior) = OPTIONAL];
}

// The number of nodes for the online store. The number of nodes doesn't
Expand Down
21 changes: 12 additions & 9 deletions google/cloud/aiplatform/v1beta1/featurestore_monitoring.proto
Original file line number Diff line number Diff line change
Expand Up @@ -48,17 +48,16 @@ message FeaturestoreMonitoringConfig {

// Configuration of the snapshot analysis based monitoring pipeline running
// interval. The value is rolled up to full day.
// If both
// [monitoring_interval_days][google.cloud.aiplatform.v1beta1.FeaturestoreMonitoringConfig.SnapshotAnalysis.monitoring_interval_days]
// and the deprecated `monitoring_interval` field
// are set when creating/updating EntityTypes/Features,
// [monitoring_interval_days][google.cloud.aiplatform.v1beta1.FeaturestoreMonitoringConfig.SnapshotAnalysis.monitoring_interval_days]
// will be used.
google.protobuf.Duration monitoring_interval = 2 [deprecated = true];

// Configuration of the snapshot analysis based monitoring pipeline
// running interval. The value indicates number of days.
// If both
// [FeaturestoreMonitoringConfig.SnapshotAnalysis.monitoring_interval_days][google.cloud.aiplatform.v1beta1.FeaturestoreMonitoringConfig.SnapshotAnalysis.monitoring_interval_days]
// and
// [FeaturestoreMonitoringConfig.SnapshotAnalysis.monitoring_interval][google.cloud.aiplatform.v1beta1.FeaturestoreMonitoringConfig.SnapshotAnalysis.monitoring_interval]
// are set when creating/updating EntityTypes/Features,
// [FeaturestoreMonitoringConfig.SnapshotAnalysis.monitoring_interval_days][google.cloud.aiplatform.v1beta1.FeaturestoreMonitoringConfig.SnapshotAnalysis.monitoring_interval_days]
// will be used.
int32 monitoring_interval_days = 3;

// Customized export features time window for snapshot analysis. Unit is one
Expand All @@ -69,7 +68,9 @@ message FeaturestoreMonitoringConfig {

// Configuration of the Featurestore's ImportFeature Analysis Based
// Monitoring. This type of analysis generates statistics for values of each
// Feature imported by every [ImportFeatureValues][] operation.
// Feature imported by every
// [ImportFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.ImportFeatureValues]
// operation.
message ImportFeaturesAnalysis {
// The state defines whether to enable ImportFeature analysis.
enum State {
Expand All @@ -96,7 +97,9 @@ message FeaturestoreMonitoringConfig {
}

// Defines the baseline to do anomaly detection for feature values imported
// by each [ImportFeatureValues][] operation.
// by each
// [ImportFeatureValues][google.cloud.aiplatform.v1beta1.FeaturestoreService.ImportFeatureValues]
// operation.
enum Baseline {
// Should not be used.
BASELINE_UNSPECIFIED = 0;
Expand Down
Loading

0 comments on commit 01293cf

Please sign in to comment.