Skip to content

Commit

Permalink
feat(dataproc): update the API
Browse files Browse the repository at this point in the history
#### dataproc:v1

The following keys were added:
- schemas.DiagnoseClusterRequest.properties.tarballAccess.description
- schemas.DiagnoseClusterRequest.properties.tarballAccess.enum
- schemas.DiagnoseClusterRequest.properties.tarballAccess.enumDescriptions
- schemas.DiagnoseClusterRequest.properties.tarballAccess.type
- schemas.EncryptionConfig.properties.kmsKey.description
- schemas.EncryptionConfig.properties.kmsKey.type
- schemas.GoogleCloudDataprocV1WorkflowTemplateEncryptionConfig.description
- schemas.GoogleCloudDataprocV1WorkflowTemplateEncryptionConfig.id
- schemas.GoogleCloudDataprocV1WorkflowTemplateEncryptionConfig.properties.kmsKey.description
- schemas.GoogleCloudDataprocV1WorkflowTemplateEncryptionConfig.properties.kmsKey.type
- schemas.GoogleCloudDataprocV1WorkflowTemplateEncryptionConfig.type
- schemas.ListJobsResponse.properties.unreachable.description
- schemas.ListJobsResponse.properties.unreachable.items.type
- schemas.ListJobsResponse.properties.unreachable.readOnly
- schemas.ListJobsResponse.properties.unreachable.type
- schemas.ListWorkflowTemplatesResponse.properties.unreachable.description
- schemas.ListWorkflowTemplatesResponse.properties.unreachable.items.type
- schemas.ListWorkflowTemplatesResponse.properties.unreachable.readOnly
- schemas.ListWorkflowTemplatesResponse.properties.unreachable.type
- schemas.UsageMetrics.properties.acceleratorType.description
- schemas.UsageMetrics.properties.acceleratorType.type
- schemas.UsageMetrics.properties.milliAcceleratorSeconds.description
- schemas.UsageMetrics.properties.milliAcceleratorSeconds.format
- schemas.UsageMetrics.properties.milliAcceleratorSeconds.type
- schemas.UsageSnapshot.properties.acceleratorType.description
- schemas.UsageSnapshot.properties.acceleratorType.type
- schemas.UsageSnapshot.properties.milliAccelerator.description
- schemas.UsageSnapshot.properties.milliAccelerator.format
- schemas.UsageSnapshot.properties.milliAccelerator.type
- schemas.WorkflowTemplate.properties.encryptionConfig.$ref
- schemas.WorkflowTemplate.properties.encryptionConfig.description

The following keys were changed:
- schemas.NodeGroupOperationMetadata.properties.operationType.enum
- schemas.NodeGroupOperationMetadata.properties.operationType.enumDescriptions
  • Loading branch information
yoshi-automation authored and sofisl committed Nov 27, 2023
1 parent e44128d commit 9ab8a14
Show file tree
Hide file tree
Showing 2 changed files with 117 additions and 3 deletions.
75 changes: 72 additions & 3 deletions discovery/dataproc-v1.json
Original file line number Diff line number Diff line change
Expand Up @@ -3006,7 +3006,7 @@
}
}
},
"revision": "20231012",
"revision": "20231109",
"rootUrl": "https://dataproc.googleapis.com/",
"schemas": {
"AcceleratorConfig": {
Expand Down Expand Up @@ -3757,6 +3757,20 @@
},
"type": "array"
},
"tarballAccess": {
"description": "Optional. (Optional) The access type to the diagnostic tarball. If not specified, falls back to default access of the bucket",
"enum": [
"TARBALL_ACCESS_UNSPECIFIED",
"GOOGLE_CLOUD_SUPPORT",
"GOOGLE_DATAPROC_DIAGNOSE"
],
"enumDescriptions": [
"Tarball Access unspecified. Falls back to default access of the bucket",
"Google Cloud Support group has read access to the diagnostic tarball",
"Google Cloud Dataproc Diagnose service account has read access to the diagnostic tarball"
],
"type": "string"
},
"tarballGcsDir": {
"description": "Optional. (Optional) The output Cloud Storage directory for the diagnostic tarball. If not specified, a task-specific directory in the cluster's staging bucket will be used.",
"type": "string"
Expand Down Expand Up @@ -3843,6 +3857,10 @@
"gcePdKmsKeyName": {
"description": "Optional. The Cloud KMS key name to use for PD disk encryption for all instances in the cluster.",
"type": "string"
},
"kmsKey": {
"description": "Optional. The Cloud KMS key name to use for encrypting customer core content in spanner and cluster PD disk for all instances in the cluster.",
"type": "string"
}
},
"type": "object"
Expand Down Expand Up @@ -4252,6 +4270,17 @@
},
"type": "object"
},
"GoogleCloudDataprocV1WorkflowTemplateEncryptionConfig": {
"description": "Encryption settings for the encrypting customer core content. NEXT ID: 2",
"id": "GoogleCloudDataprocV1WorkflowTemplateEncryptionConfig",
"properties": {
"kmsKey": {
"description": "Optional. The Cloud KMS key name to use for encrypting customer core content.",
"type": "string"
}
},
"type": "object"
},
"HadoopJob": {
"description": "A Dataproc job for running Apache Hadoop MapReduce (https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html) jobs on Apache Hadoop YARN (https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html).",
"id": "HadoopJob",
Expand Down Expand Up @@ -5102,6 +5131,14 @@
"nextPageToken": {
"description": "Optional. This token is included in the response if there are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent ListJobsRequest.",
"type": "string"
},
"unreachable": {
"description": "Output only. List of jobs that could not be included in the response. Attempting to get one of these resources may indicate why it was not included in the list response.",
"items": {
"type": "string"
},
"readOnly": true,
"type": "array"
}
},
"type": "object"
Expand Down Expand Up @@ -5178,6 +5215,14 @@
},
"readOnly": true,
"type": "array"
},
"unreachable": {
"description": "Output only. List of workflow templates that could not be included in the response. Attempting to get one of these resources may indicate why it was not included in the list response.",
"items": {
"type": "string"
},
"readOnly": true,
"type": "array"
}
},
"type": "object"
Expand Down Expand Up @@ -5411,14 +5456,16 @@
"CREATE",
"UPDATE",
"DELETE",
"RESIZE"
"RESIZE",
"REPAIR"
],
"enumDescriptions": [
"Node group operation type is unknown.",
"Create node group operation type.",
"Update node group operation type.",
"Delete node group operation type.",
"Resize node group operation type."
"Resize node group operation type.",
"Repair node group operation type."
],
"type": "string"
},
Expand Down Expand Up @@ -6915,6 +6962,15 @@
"description": "Usage metrics represent approximate total resources consumed by a workload.",
"id": "UsageMetrics",
"properties": {
"acceleratorType": {
"description": "Optional. Accelerator type being used, if any",
"type": "string"
},
"milliAcceleratorSeconds": {
"description": "Optional. Accelerator usage in (milliAccelerator x seconds) (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)).",
"format": "int64",
"type": "string"
},
"milliDcuSeconds": {
"description": "Optional. DCU (Dataproc Compute Units) usage in (milliDCU x seconds) (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)).",
"format": "int64",
Expand All @@ -6932,6 +6988,15 @@
"description": "The usage snapshot represents the resources consumed by a workload at a specified time.",
"id": "UsageSnapshot",
"properties": {
"acceleratorType": {
"description": "Optional. Accelerator type being used, if any",
"type": "string"
},
"milliAccelerator": {
"description": "Optional. Milli (one-thousandth) accelerator. (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing))",
"format": "int64",
"type": "string"
},
"milliDcu": {
"description": "Optional. Milli (one-thousandth) Dataproc Compute Units (DCUs) (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)).",
"format": "int64",
Expand Down Expand Up @@ -7171,6 +7236,10 @@
"format": "google-duration",
"type": "string"
},
"encryptionConfig": {
"$ref": "GoogleCloudDataprocV1WorkflowTemplateEncryptionConfig",
"description": "Optional. Encryption settings for the encrypting customer core content."
},
"id": {
"type": "string"
},
Expand Down
45 changes: 45 additions & 0 deletions src/apis/dataproc/v1.ts
Original file line number Diff line number Diff line change
Expand Up @@ -645,6 +645,10 @@ export namespace dataproc_v1 {
* Optional. Specifies a list of jobs on which diagnosis is to be performed. Format: projects/{project\}/regions/{region\}/jobs/{job\}
*/
jobs?: string[] | null;
/**
* Optional. (Optional) The access type to the diagnostic tarball. If not specified, falls back to default access of the bucket
*/
tarballAccess?: string | null;
/**
* Optional. (Optional) The output Cloud Storage directory for the diagnostic tarball. If not specified, a task-specific directory in the cluster's staging bucket will be used.
*/
Expand Down Expand Up @@ -713,6 +717,10 @@ export namespace dataproc_v1 {
* Optional. The Cloud KMS key name to use for PD disk encryption for all instances in the cluster.
*/
gcePdKmsKeyName?: string | null;
/**
* Optional. The Cloud KMS key name to use for encrypting customer core content in spanner and cluster PD disk for all instances in the cluster.
*/
kmsKey?: string | null;
}
/**
* Endpoint config for this cluster
Expand Down Expand Up @@ -1020,6 +1028,15 @@ export namespace dataproc_v1 {
*/
roles?: string[] | null;
}
/**
* Encryption settings for the encrypting customer core content. NEXT ID: 2
*/
export interface Schema$GoogleCloudDataprocV1WorkflowTemplateEncryptionConfig {
/**
* Optional. The Cloud KMS key name to use for encrypting customer core content.
*/
kmsKey?: string | null;
}
/**
* A Dataproc job for running Apache Hadoop MapReduce (https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html) jobs on Apache Hadoop YARN (https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html).
*/
Expand Down Expand Up @@ -1634,6 +1651,10 @@ export namespace dataproc_v1 {
* Optional. This token is included in the response if there are more results to fetch. To fetch additional results, provide this value as the page_token in a subsequent ListJobsRequest.
*/
nextPageToken?: string | null;
/**
* Output only. List of jobs that could not be included in the response. Attempting to get one of these resources may indicate why it was not included in the list response.
*/
unreachable?: string[] | null;
}
/**
* The response message for Operations.ListOperations.
Expand Down Expand Up @@ -1686,6 +1707,10 @@ export namespace dataproc_v1 {
* Output only. WorkflowTemplates list.
*/
templates?: Schema$WorkflowTemplate[];
/**
* Output only. List of workflow templates that could not be included in the response. Attempting to get one of these resources may indicate why it was not included in the list response.
*/
unreachable?: string[] | null;
}
/**
* The runtime logging config of the job.
Expand Down Expand Up @@ -2855,6 +2880,14 @@ export namespace dataproc_v1 {
* Usage metrics represent approximate total resources consumed by a workload.
*/
export interface Schema$UsageMetrics {
/**
* Optional. Accelerator type being used, if any
*/
acceleratorType?: string | null;
/**
* Optional. Accelerator usage in (milliAccelerator x seconds) (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)).
*/
milliAcceleratorSeconds?: string | null;
/**
* Optional. DCU (Dataproc Compute Units) usage in (milliDCU x seconds) (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)).
*/
Expand All @@ -2868,6 +2901,14 @@ export namespace dataproc_v1 {
* The usage snapshot represents the resources consumed by a workload at a specified time.
*/
export interface Schema$UsageSnapshot {
/**
* Optional. Accelerator type being used, if any
*/
acceleratorType?: string | null;
/**
* Optional. Milli (one-thousandth) accelerator. (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing))
*/
milliAccelerator?: string | null;
/**
* Optional. Milli (one-thousandth) Dataproc Compute Units (DCUs) (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)).
*/
Expand Down Expand Up @@ -3022,6 +3063,10 @@ export namespace dataproc_v1 {
* Optional. Timeout duration for the DAG of jobs, expressed in seconds (see JSON representation of duration (https://developers.google.com/protocol-buffers/docs/proto3#json)). The timeout duration must be from 10 minutes ("600s") to 24 hours ("86400s"). The timer begins when the first job is submitted. If the workflow is running at the end of the timeout period, any remaining jobs are cancelled, the workflow is ended, and if the workflow was running on a managed cluster, the cluster is deleted.
*/
dagTimeout?: string | null;
/**
* Optional. Encryption settings for the encrypting customer core content.
*/
encryptionConfig?: Schema$GoogleCloudDataprocV1WorkflowTemplateEncryptionConfig;
id?: string | null;
/**
* Required. The Directed Acyclic Graph of Jobs to submit.
Expand Down

0 comments on commit 9ab8a14

Please sign in to comment.