From 9691f513420651ce2d303f3e056f88054702b2c3 Mon Sep 17 00:00:00 2001 From: Google APIs Date: Thu, 3 Nov 2022 13:47:20 -0700 Subject: [PATCH] feat: add annotation_labels to ImportDataConfig in aiplatform v1 dataset.proto feat: add start_time to BatchReadFeatureValuesRequest in aiplatform v1 featurestore_service.proto feat: add metadata_artifact to Model in aiplatform v1 model.proto feat: add failed_main_jobs and failed_pre_caching_check_jobs to ContainerDetail in aiplatform v1 pipeline_job.proto feat: add persist_ml_use_assignment to InputDataConfig in aiplatform v1 training_pipeline.proto PiperOrigin-RevId: 485963171 --- google/cloud/aiplatform/v1/artifact.proto | 1 + google/cloud/aiplatform/v1/context.proto | 1 + google/cloud/aiplatform/v1/dataset.proto | 9 +++++++++ google/cloud/aiplatform/v1/execution.proto | 1 + google/cloud/aiplatform/v1/featurestore.proto | 6 ++++-- .../aiplatform/v1/featurestore_service.proto | 7 ++++++- google/cloud/aiplatform/v1/index_endpoint.proto | 2 +- google/cloud/aiplatform/v1/metadata_service.proto | 13 +++++++++---- google/cloud/aiplatform/v1/model.proto | 9 +++++++-- google/cloud/aiplatform/v1/pipeline_job.proto | 15 +++++++++++++-- .../cloud/aiplatform/v1/tensorboard_service.proto | 8 ++++---- .../cloud/aiplatform/v1/training_pipeline.proto | 3 +++ .../aiplatform/v1/user_action_reference.proto | 4 ++-- 13 files changed, 61 insertions(+), 18 deletions(-) diff --git a/google/cloud/aiplatform/v1/artifact.proto b/google/cloud/aiplatform/v1/artifact.proto index d7ef997ad685b..be52fcc63d009 100644 --- a/google/cloud/aiplatform/v1/artifact.proto +++ b/google/cloud/aiplatform/v1/artifact.proto @@ -101,6 +101,7 @@ message Artifact { string schema_version = 15; // Properties of the Artifact. + // Top level metadata keys' heading and trailing spaces will be trimmed. // The size of this field should not exceed 200KB. google.protobuf.Struct metadata = 16; diff --git a/google/cloud/aiplatform/v1/context.proto b/google/cloud/aiplatform/v1/context.proto index b602a55044e65..1055ea93d5a86 100644 --- a/google/cloud/aiplatform/v1/context.proto +++ b/google/cloud/aiplatform/v1/context.proto @@ -86,6 +86,7 @@ message Context { string schema_version = 14; // Properties of the Context. + // Top level metadata keys' heading and trailing spaces will be trimmed. // The size of this field should not exceed 200KB. google.protobuf.Struct metadata = 15; diff --git a/google/cloud/aiplatform/v1/dataset.proto b/google/cloud/aiplatform/v1/dataset.proto index 6c686986a2c7e..c2771f8a997d8 100644 --- a/google/cloud/aiplatform/v1/dataset.proto +++ b/google/cloud/aiplatform/v1/dataset.proto @@ -112,6 +112,15 @@ message ImportDataConfig { // file referenced by [import_schema_uri][google.cloud.aiplatform.v1.ImportDataConfig.import_schema_uri], e.g. jsonl file. map data_item_labels = 2; + // Labels that will be applied to newly imported Annotations. If two + // Annotations are identical, one of them will be deduped. Two Annotations are + // considered identical if their [payload][google.cloud.aiplatform.v1.Annotation.payload], + // [payload_schema_uri][google.cloud.aiplatform.v1.Annotation.payload_schema_uri] and all of their + // [labels][google.cloud.aiplatform.v1.Annotation.labels] are the same. + // These labels will be overridden by Annotation labels specified inside index + // file referenced by [import_schema_uri][google.cloud.aiplatform.v1.ImportDataConfig.import_schema_uri], e.g. jsonl file. + map annotation_labels = 3; + // Required. Points to a YAML file stored on Google Cloud Storage describing the import // format. Validation will be done against the schema. The schema is defined // as an [OpenAPI 3.0.2 Schema diff --git a/google/cloud/aiplatform/v1/execution.proto b/google/cloud/aiplatform/v1/execution.proto index a2b7c2b905202..0d9dcea5a3785 100644 --- a/google/cloud/aiplatform/v1/execution.proto +++ b/google/cloud/aiplatform/v1/execution.proto @@ -107,6 +107,7 @@ message Execution { string schema_version = 14; // Properties of the Execution. + // Top level metadata keys' heading and trailing spaces will be trimmed. // The size of this field should not exceed 200KB. google.protobuf.Struct metadata = 15; diff --git a/google/cloud/aiplatform/v1/featurestore.proto b/google/cloud/aiplatform/v1/featurestore.proto index 4c8a72f59f89f..46b19f60ec3c9 100644 --- a/google/cloud/aiplatform/v1/featurestore.proto +++ b/google/cloud/aiplatform/v1/featurestore.proto @@ -116,8 +116,10 @@ message Featurestore { // and are immutable. map labels = 6 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Config for online storage resources. If unset, the featurestore will - // not have an online store and cannot be used for online serving. + // Optional. Config for online storage resources. The field should not co-exist with the + // field of `OnlineStoreReplicationConfig`. If both of it and + // OnlineStoreReplicationConfig are unset, the feature store will not have an + // online store and cannot be used for online serving. OnlineServingConfig online_serving_config = 7 [(google.api.field_behavior) = OPTIONAL]; // Output only. State of the featurestore. diff --git a/google/cloud/aiplatform/v1/featurestore_service.proto b/google/cloud/aiplatform/v1/featurestore_service.proto index 516092ee15970..96d8400eaf18a 100644 --- a/google/cloud/aiplatform/v1/featurestore_service.proto +++ b/google/cloud/aiplatform/v1/featurestore_service.proto @@ -614,6 +614,11 @@ message BatchReadFeatureValuesRequest { // specifying entity IDs in the EntityType in // [BatchReadFeatureValuesRequest.request][] . repeated EntityTypeSpec entity_type_specs = 7 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Excludes Feature values with feature generation timestamp before this + // timestamp. If not set, retrieve oldest values kept in Feature Store. + // Timestamp, if present, must not have higher than millisecond precision. + google.protobuf.Timestamp start_time = 11 [(google.api.field_behavior) = OPTIONAL]; } // Request message for [FeaturestoreService.ExportFeatureValues][google.cloud.aiplatform.v1.FeaturestoreService.ExportFeatureValues]. @@ -898,7 +903,7 @@ message CreateFeatureRequest { // Required. The ID to use for the Feature, which will become the final component of // the Feature's resource name. // - // This value may be up to 60 characters, and valid characters are + // This value may be up to 128 characters, and valid characters are // `[a-z0-9_]`. The first character cannot be a number. // // The value must be unique within an EntityType. diff --git a/google/cloud/aiplatform/v1/index_endpoint.proto b/google/cloud/aiplatform/v1/index_endpoint.proto index 9f1e9941bdea6..a5e36557e7724 100644 --- a/google/cloud/aiplatform/v1/index_endpoint.proto +++ b/google/cloud/aiplatform/v1/index_endpoint.proto @@ -85,7 +85,7 @@ message IndexEndpoint { // are mutually exclusive. // // [Format](https://cloud.google.com/compute/docs/reference/rest/v1/networks/insert): - // projects/{project}/global/networks/{network}. + // `projects/{project}/global/networks/{network}`. // Where {project} is a project number, as in '12345', and {network} is // network name. string network = 9 [(google.api.field_behavior) = OPTIONAL]; diff --git a/google/cloud/aiplatform/v1/metadata_service.proto b/google/cloud/aiplatform/v1/metadata_service.proto index e1dd4fcc077d2..683f8ebd92384 100644 --- a/google/cloud/aiplatform/v1/metadata_service.proto +++ b/google/cloud/aiplatform/v1/metadata_service.proto @@ -556,7 +556,8 @@ message ListArtifactsRequest { // `in_context("projects//locations//metadataStores//contexts/")` // // Each of the above supported filter types can be combined together using - // logical operators (`AND` & `OR`). + // logical operators (`AND` & `OR`). Maximum nested expression depth allowed + // is 5. // // For example: `display_name = "test" AND metadata.field1.bool_value = true`. string filter = 4; @@ -750,7 +751,8 @@ message ListContextsRequest { // ``` // // Each of the above supported filters can be combined together using - // logical operators (`AND` & `OR`). + // logical operators (`AND` & `OR`). Maximum nested expression depth allowed + // is 5. // // For example: `display_name = "test" AND metadata.field1.bool_value = true`. string filter = 4; @@ -1048,7 +1050,9 @@ message ListExecutionsRequest { // `in_context("projects//locations//metadataStores//contexts/")` // // Each of the above supported filters can be combined together using - // logical operators (`AND` & `OR`). + // logical operators (`AND` & `OR`). Maximum nested expression depth allowed + // is 5. + // // For example: `display_name = "test" AND metadata.field1.bool_value = true`. string filter = 4; @@ -1310,7 +1314,8 @@ message QueryArtifactLineageSubgraphRequest { // For example: `metadata.field_1.number_value = 10.0` // // Each of the above supported filter types can be combined together using - // logical operators (`AND` & `OR`). + // logical operators (`AND` & `OR`). Maximum nested expression depth allowed + // is 5. // // For example: `display_name = "test" AND metadata.field1.bool_value = true`. string filter = 3; diff --git a/google/cloud/aiplatform/v1/model.proto b/google/cloud/aiplatform/v1/model.proto index f34fad6ed281f..960a987db21bf 100644 --- a/google/cloud/aiplatform/v1/model.proto +++ b/google/cloud/aiplatform/v1/model.proto @@ -118,9 +118,9 @@ message Model { // User provided version aliases so that a model version can be referenced via // alias (i.e. - // projects/{project}/locations/{location}/models/{model_id}@{version_alias} + // `projects/{project}/locations/{location}/models/{model_id}@{version_alias}` // instead of auto-generated version id (i.e. - // projects/{project}/locations/{location}/models/{model_id}@{version_id}). + // `projects/{project}/locations/{location}/models/{model_id}@{version_id})`. // The format is [a-z][a-zA-Z0-9-]{0,126}[a-z0-9] to distinguish from // version_id. A default version alias will be created for the first version // of the model, and there must be exactly one default version alias for a @@ -332,6 +332,11 @@ message Model { // Output only. Source of a model. It can either be automl training pipeline, custom // training pipeline, BigQuery ML, or existing Vertex AI Model. ModelSourceInfo model_source_info = 38 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The resource name of the Artifact that was created in MetadataStore when + // creating the Model. The Artifact resource name pattern is + // `projects/{project}/locations/{location}/metadataStores/{metadata_store}/artifacts/{artifact}`. + string metadata_artifact = 44 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Contains the schemata used in Model's predictions and explanations via diff --git a/google/cloud/aiplatform/v1/pipeline_job.proto b/google/cloud/aiplatform/v1/pipeline_job.proto index 656a385b9aa7b..ca3415bfa431d 100644 --- a/google/cloud/aiplatform/v1/pipeline_job.proto +++ b/google/cloud/aiplatform/v1/pipeline_job.proto @@ -54,8 +54,8 @@ message PipelineJob { message InputArtifact { oneof kind { // Artifact resource id from MLMD. Which is the last portion of an - // artifact resource - // name(projects/{project}/locations/{location}/metadataStores/default/artifacts/{artifact_id}). + // artifact resource name: + // `projects/{project}/locations/{location}/metadataStores/default/artifacts/{artifact_id}`. // The artifact must stay within the same project, location and default // metadatastore as the pipeline. string artifact_id = 1; @@ -337,6 +337,17 @@ message PipelineTaskExecutorDetail { type: "aiplatform.googleapis.com/CustomJob" } ]; + + // Output only. The names of the previously failed [CustomJob][google.cloud.aiplatform.v1.CustomJob] for the main container + // executions. The list includes the all attempts in chronological order. + repeated string failed_main_jobs = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The names of the previously failed [CustomJob][google.cloud.aiplatform.v1.CustomJob] for the + // pre-caching-check container executions. This job will be available if the + // [PipelineJob.pipeline_spec][google.cloud.aiplatform.v1.PipelineJob.pipeline_spec] specifies the `pre_caching_check` hook in + // the lifecycle events. + // The list includes the all attempts in chronological order. + repeated string failed_pre_caching_check_jobs = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The detailed info for a custom job executor. diff --git a/google/cloud/aiplatform/v1/tensorboard_service.proto b/google/cloud/aiplatform/v1/tensorboard_service.proto index ed81dd1f28776..ec59778bd1a1e 100644 --- a/google/cloud/aiplatform/v1/tensorboard_service.proto +++ b/google/cloud/aiplatform/v1/tensorboard_service.proto @@ -467,7 +467,7 @@ message GetTensorboardExperimentRequest { message ListTensorboardExperimentsRequest { // Required. The resource name of the Tensorboard to list TensorboardExperiments. // Format: - // 'projects/{project}/locations/{location}/tensorboards/{tensorboard}' + // `projects/{project}/locations/{location}/tensorboards/{tensorboard}` string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -605,7 +605,7 @@ message GetTensorboardRunRequest { message ReadTensorboardBlobDataRequest { // Required. The resource name of the TensorboardTimeSeries to list Blobs. // Format: - // 'projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}' + // `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}` string time_series = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -627,7 +627,7 @@ message ReadTensorboardBlobDataResponse { message ListTensorboardRunsRequest { // Required. The resource name of the TensorboardExperiment to list TensorboardRuns. // Format: - // 'projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}' + // `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}` string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -767,7 +767,7 @@ message GetTensorboardTimeSeriesRequest { message ListTensorboardTimeSeriesRequest { // Required. The resource name of the TensorboardRun to list TensorboardTimeSeries. // Format: - // 'projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}' + // `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}` string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { diff --git a/google/cloud/aiplatform/v1/training_pipeline.proto b/google/cloud/aiplatform/v1/training_pipeline.proto index a0de8cf400fd9..ef5f35398e12b 100644 --- a/google/cloud/aiplatform/v1/training_pipeline.proto +++ b/google/cloud/aiplatform/v1/training_pipeline.proto @@ -290,6 +290,9 @@ message InputDataConfig { // Only one of [saved_query_id][google.cloud.aiplatform.v1.InputDataConfig.saved_query_id] and [annotation_schema_uri][google.cloud.aiplatform.v1.InputDataConfig.annotation_schema_uri] should be // specified as both of them represent the same thing: problem type. string saved_query_id = 7; + + // Whether to persist the ML use assignment to data item system labels. + bool persist_ml_use_assignment = 11; } // Assigns the input data to training, validation, and test sets as per the diff --git a/google/cloud/aiplatform/v1/user_action_reference.proto b/google/cloud/aiplatform/v1/user_action_reference.proto index bf4c9f933fc05..102dac6d47fc6 100644 --- a/google/cloud/aiplatform/v1/user_action_reference.proto +++ b/google/cloud/aiplatform/v1/user_action_reference.proto @@ -31,13 +31,13 @@ message UserActionReference { // For API calls that return a long running operation. // Resource name of the long running operation. // Format: - // 'projects/{project}/locations/{location}/operations/{operation}' + // `projects/{project}/locations/{location}/operations/{operation}` string operation = 1; // For API calls that start a LabelingJob. // Resource name of the LabelingJob. // Format: - // 'projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}' + // `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}` string data_labeling_job = 2; }