From 2ff7db7a5936997ad476ce840a9b7203bbd84a0c Mon Sep 17 00:00:00 2001 From: yoshi-automation Date: Fri, 10 Jul 2020 04:25:01 -0700 Subject: [PATCH] feat(dataproc): update the API #### dataproc:v1 The following keys were added: - schemas.ClusterConfig.properties.endpointConfig.$ref - schemas.ClusterConfig.properties.endpointConfig.description - schemas.ClusterConfig.properties.tempBucket.description - schemas.ClusterConfig.properties.tempBucket.type - schemas.EndpointConfig.description - schemas.EndpointConfig.id - schemas.EndpointConfig.properties.enableHttpPortAccess.description - schemas.EndpointConfig.properties.enableHttpPortAccess.type - schemas.EndpointConfig.properties.httpPorts.additionalProperties.type - schemas.EndpointConfig.properties.httpPorts.description - schemas.EndpointConfig.properties.httpPorts.type - schemas.EndpointConfig.type The following keys were changed: - schemas.JobReference.properties.projectId.description - schemas.SoftwareConfig.properties.optionalComponents.enumDescriptions - schemas.WorkflowTemplate.properties.parameters.description #### dataproc:v1beta2 The following keys were added: - schemas.ClusterConfig.properties.tempBucket.description - schemas.ClusterConfig.properties.tempBucket.type - schemas.WorkflowMetadata.properties.dagEndTime.description - schemas.WorkflowMetadata.properties.dagEndTime.format - schemas.WorkflowMetadata.properties.dagEndTime.type - schemas.WorkflowMetadata.properties.dagStartTime.description - schemas.WorkflowMetadata.properties.dagStartTime.format - schemas.WorkflowMetadata.properties.dagStartTime.type - schemas.WorkflowMetadata.properties.dagTimeout.description - schemas.WorkflowMetadata.properties.dagTimeout.format - schemas.WorkflowMetadata.properties.dagTimeout.type - schemas.WorkflowTemplate.properties.dagTimeout.description - schemas.WorkflowTemplate.properties.dagTimeout.format - schemas.WorkflowTemplate.properties.dagTimeout.type The following keys were changed: - schemas.JobReference.properties.projectId.description - schemas.SoftwareConfig.properties.optionalComponents.enumDescriptions - schemas.WorkflowTemplate.description --- discovery/dataproc-v1.json | 34 +++++++++++++++++++++++++++---- discovery/dataproc-v1beta2.json | 32 +++++++++++++++++++++++++---- src/apis/dataproc/v1.ts | 25 +++++++++++++++++++++-- src/apis/dataproc/v1beta2.ts | 36 +++++++++++++++++++++++++++++++-- 4 files changed, 115 insertions(+), 12 deletions(-) diff --git a/discovery/dataproc-v1.json b/discovery/dataproc-v1.json index 392212dbcc..674c54a433 100644 --- a/discovery/dataproc-v1.json +++ b/discovery/dataproc-v1.json @@ -2129,7 +2129,7 @@ } } }, - "revision": "20200528", + "revision": "20200619", "rootUrl": "https://dataproc.googleapis.com/", "schemas": { "AcceleratorConfig": { @@ -2322,6 +2322,10 @@ "$ref": "EncryptionConfig", "description": "Optional. Encryption settings for the cluster." }, + "endpointConfig": { + "$ref": "EndpointConfig", + "description": "Optional. Port/endpoint configuration for this cluster" + }, "gceClusterConfig": { "$ref": "GceClusterConfig", "description": "Optional. The shared Compute Engine config settings for all instances in a cluster." @@ -2353,6 +2357,10 @@ "$ref": "SoftwareConfig", "description": "Optional. The config settings for software inside the cluster." }, + "tempBucket": { + "description": "Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.", + "type": "string" + }, "workerConfig": { "$ref": "InstanceGroupConfig", "description": "Optional. The Compute Engine config settings for worker instances in a cluster." @@ -2609,6 +2617,24 @@ }, "type": "object" }, + "EndpointConfig": { + "description": "Endpoint config for this cluster", + "id": "EndpointConfig", + "properties": { + "enableHttpPortAccess": { + "description": "Optional. If true, enable http access to specific ports on the cluster from external sources. Defaults to false.", + "type": "boolean" + }, + "httpPorts": { + "additionalProperties": { + "type": "string" + }, + "description": "Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true.", + "type": "object" + } + }, + "type": "object" + }, "Expr": { "description": "Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec.Example (Comparison):\ntitle: \"Summary size limit\"\ndescription: \"Determines if a summary is less than 100 chars\"\nexpression: \"document.summary.size() < 100\"\nExample (Equality):\ntitle: \"Requestor is owner\"\ndescription: \"Determines if requestor is the document owner\"\nexpression: \"document.owner == request.auth.claims.email\"\nExample (Logic):\ntitle: \"Public documents\"\ndescription: \"Determine whether the document should be publicly visible\"\nexpression: \"document.type != 'private' && document.type != 'internal'\"\nExample (Data Manipulation):\ntitle: \"Notification string\"\ndescription: \"Create a notification string with a timestamp.\"\nexpression: \"'New message received at ' + string(document.create_time)\"\nThe exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information.", "id": "Expr", @@ -3050,7 +3076,7 @@ "type": "string" }, "projectId": { - "description": "Required. The ID of the Google Cloud Platform project that the job belongs to.", + "description": "Optional. The ID of the Google Cloud Platform project that the job belongs to. If specified, must match the request project ID.", "type": "string" } }, @@ -3763,7 +3789,7 @@ "optionalComponents": { "description": "Optional. The set of components to activate on the cluster.", "enumDescriptions": [ - "Unspecified component.", + "Unspecified component. Specifying this will cause Cluster creation to fail.", "The Anaconda python distribution.", "The Hive Web HCatalog (the REST service for accessing HCatalog).", "The Jupyter Notebook.", @@ -4202,7 +4228,7 @@ "type": "string" }, "parameters": { - "description": "Optional. emplate parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.", + "description": "Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated.", "items": { "$ref": "TemplateParameter" }, diff --git a/discovery/dataproc-v1beta2.json b/discovery/dataproc-v1beta2.json index 2a3ef8bcf9..4ff63c342c 100644 --- a/discovery/dataproc-v1beta2.json +++ b/discovery/dataproc-v1beta2.json @@ -2242,7 +2242,7 @@ } } }, - "revision": "20200528", + "revision": "20200619", "rootUrl": "https://dataproc.googleapis.com/", "schemas": { "AcceleratorConfig": { @@ -2474,6 +2474,10 @@ "$ref": "SoftwareConfig", "description": "Optional. The config settings for software inside the cluster." }, + "tempBucket": { + "description": "Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket.", + "type": "string" + }, "workerConfig": { "$ref": "InstanceGroupConfig", "description": "Optional. The Compute Engine config settings for worker instances in a cluster." @@ -3214,7 +3218,7 @@ "type": "string" }, "projectId": { - "description": "Required. The ID of the Google Cloud Platform project that the job belongs to.", + "description": "Optional. The ID of the Google Cloud Platform project that the job belongs to. If specified, must match the request project ID.", "type": "string" } }, @@ -3942,7 +3946,7 @@ "optionalComponents": { "description": "The set of optional components to activate on the cluster.", "enumDescriptions": [ - "Unspecified component.", + "Unspecified component. Specifying this will cause Cluster creation to fail.", "The Anaconda python distribution.", "The Druid query engine.", "HBase.", @@ -4291,6 +4295,21 @@ "$ref": "ClusterOperation", "description": "Output only. The create cluster operation metadata." }, + "dagEndTime": { + "description": "Output only. DAG end time, only set for workflows with dag_timeout when DAG ends.", + "format": "google-datetime", + "type": "string" + }, + "dagStartTime": { + "description": "Output only. DAG start time, only set for workflows with dag_timeout when DAG begins.", + "format": "google-datetime", + "type": "string" + }, + "dagTimeout": { + "description": "Output only. The timeout duration for the DAG of jobs. Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed as a google.protobuf.Duration. For example, \"1800\" = 1800 seconds/30 minutes duration.", + "format": "google-duration", + "type": "string" + }, "deleteCluster": { "$ref": "ClusterOperation", "description": "Output only. The delete cluster operation metadata." @@ -4391,7 +4410,7 @@ "type": "object" }, "WorkflowTemplate": { - "description": "A Dataproc workflow template resource. Next ID: 11", + "description": "A Dataproc workflow template resource.", "id": "WorkflowTemplate", "properties": { "createTime": { @@ -4399,6 +4418,11 @@ "format": "google-datetime", "type": "string" }, + "dagTimeout": { + "description": "Optional. Timeout for DAG of jobs. The timer begins when the first job is submitted. Minimum duration of 10 minutes, max of 24 hours.", + "format": "google-duration", + "type": "string" + }, "id": { "description": "Required. The template id.The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters..", "type": "string" diff --git a/src/apis/dataproc/v1.ts b/src/apis/dataproc/v1.ts index 261ad3de80..229855a74b 100644 --- a/src/apis/dataproc/v1.ts +++ b/src/apis/dataproc/v1.ts @@ -283,6 +283,10 @@ export namespace dataproc_v1 { * Optional. Encryption settings for the cluster. */ encryptionConfig?: Schema$EncryptionConfig; + /** + * Optional. Port/endpoint configuration for this cluster + */ + endpointConfig?: Schema$EndpointConfig; /** * Optional. The shared Compute Engine config settings for all instances in a cluster. */ @@ -311,6 +315,10 @@ export namespace dataproc_v1 { * Optional. The config settings for software inside the cluster. */ softwareConfig?: Schema$SoftwareConfig; + /** + * Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket. + */ + tempBucket?: string | null; /** * Optional. The Compute Engine config settings for worker instances in a cluster. */ @@ -481,6 +489,19 @@ export namespace dataproc_v1 { */ gcePdKmsKeyName?: string | null; } + /** + * Endpoint config for this cluster + */ + export interface Schema$EndpointConfig { + /** + * Optional. If true, enable http access to specific ports on the cluster from external sources. Defaults to false. + */ + enableHttpPortAccess?: boolean | null; + /** + * Output only. The map of port descriptions to URLs. Will only be populated if enable_http_port_access is true. + */ + httpPorts?: {[key: string]: string} | null; + } /** * Represents a textual expression in the Common Expression Language (CEL) syntax. CEL is a C-like expression language. The syntax and semantics of CEL are documented at https://github.com/google/cel-spec.Example (Comparison): title: "Summary size limit" description: "Determines if a summary is less than 100 chars" expression: "document.summary.size() < 100" Example (Equality): title: "Requestor is owner" description: "Determines if requestor is the document owner" expression: "document.owner == request.auth.claims.email" Example (Logic): title: "Public documents" description: "Determine whether the document should be publicly visible" expression: "document.type != 'private' && document.type != 'internal'" Example (Data Manipulation): title: "Notification string" description: "Create a notification string with a timestamp." expression: "'New message received at ' + string(document.create_time)" The exact variables and functions that may be referenced within an expression are determined by the service that evaluates it. See the service documentation for additional information. */ @@ -830,7 +851,7 @@ export namespace dataproc_v1 { */ jobId?: string | null; /** - * Required. The ID of the Google Cloud Platform project that the job belongs to. + * Optional. The ID of the Google Cloud Platform project that the job belongs to. If specified, must match the request project ID. */ projectId?: string | null; } @@ -1626,7 +1647,7 @@ export namespace dataproc_v1 { */ name?: string | null; /** - * Optional. emplate parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated. + * Optional. Template parameters whose values are substituted into the template. Values for parameters must be provided when the template is instantiated. */ parameters?: Schema$TemplateParameter[]; /** diff --git a/src/apis/dataproc/v1beta2.ts b/src/apis/dataproc/v1beta2.ts index 6205906593..eb91d6a10b 100644 --- a/src/apis/dataproc/v1beta2.ts +++ b/src/apis/dataproc/v1beta2.ts @@ -319,6 +319,10 @@ export namespace dataproc_v1beta2 { * Optional. The config settings for software inside the cluster. */ softwareConfig?: Schema$SoftwareConfig; + /** + * Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, such as Spark and MapReduce history files. If you do not specify a temp bucket, Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your cluster's temp bucket according to the Compute Engine zone where your cluster is deployed, and then create and manage this project-level, per-location bucket. The default bucket has a TTL of 90 days, but you can use any TTL (or none) if you specify a bucket. + */ + tempBucket?: string | null; /** * Optional. The Compute Engine config settings for worker instances in a cluster. */ @@ -868,7 +872,7 @@ export namespace dataproc_v1beta2 { */ jobId?: string | null; /** - * Required. The ID of the Google Cloud Platform project that the job belongs to. + * Optional. The ID of the Google Cloud Platform project that the job belongs to. If specified, must match the request project ID. */ projectId?: string | null; } @@ -1623,6 +1627,18 @@ export namespace dataproc_v1beta2 { * Output only. The create cluster operation metadata. */ createCluster?: Schema$ClusterOperation; + /** + * Output only. DAG end time, only set for workflows with dag_timeout when DAG ends. + */ + dagEndTime?: string | null; + /** + * Output only. DAG start time, only set for workflows with dag_timeout when DAG begins. + */ + dagStartTime?: string | null; + /** + * Output only. The timeout duration for the DAG of jobs. Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed as a google.protobuf.Duration. For example, "1800" = 1800 seconds/30 minutes duration. + */ + dagTimeout?: string | null; /** * Output only. The delete cluster operation metadata. */ @@ -1682,13 +1698,17 @@ export namespace dataproc_v1beta2 { stepId?: string | null; } /** - * A Dataproc workflow template resource. Next ID: 11 + * A Dataproc workflow template resource. */ export interface Schema$WorkflowTemplate { /** * Output only. The time template was created. */ createTime?: string | null; + /** + * Optional. Timeout for DAG of jobs. The timer begins when the first job is submitted. Minimum duration of 10 minutes, max of 24 hours. + */ + dagTimeout?: string | null; /** * Required. The template id.The id must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters.. */ @@ -3076,6 +3096,7 @@ export namespace dataproc_v1beta2 { * // request body parameters * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {}, @@ -3092,6 +3113,7 @@ export namespace dataproc_v1beta2 { * // Example response * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {}, @@ -3369,6 +3391,7 @@ export namespace dataproc_v1beta2 { * // Example response * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {}, @@ -3800,6 +3823,7 @@ export namespace dataproc_v1beta2 { * // request body parameters * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {}, @@ -4399,6 +4423,7 @@ export namespace dataproc_v1beta2 { * // request body parameters * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {}, @@ -4415,6 +4440,7 @@ export namespace dataproc_v1beta2 { * // Example response * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {}, @@ -10603,6 +10629,7 @@ export namespace dataproc_v1beta2 { * // request body parameters * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {}, @@ -10619,6 +10646,7 @@ export namespace dataproc_v1beta2 { * // Example response * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {}, @@ -10896,6 +10924,7 @@ export namespace dataproc_v1beta2 { * // Example response * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {}, @@ -11327,6 +11356,7 @@ export namespace dataproc_v1beta2 { * // request body parameters * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {}, @@ -11926,6 +11956,7 @@ export namespace dataproc_v1beta2 { * // request body parameters * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {}, @@ -11942,6 +11973,7 @@ export namespace dataproc_v1beta2 { * // Example response * // { * // "createTime": "my_createTime", + * // "dagTimeout": "my_dagTimeout", * // "id": "my_id", * // "jobs": [], * // "labels": {},