Skip to content

Commit

Permalink
[CHANGE ME] Re-generated dataproc to pick up changes in the API or cl…
Browse files Browse the repository at this point in the history
…ient library generator. (#8152)
  • Loading branch information
yoshi-automation authored and sduskis committed May 28, 2019
1 parent ebaae2d commit 2e4f575
Show file tree
Hide file tree
Showing 14 changed files with 292 additions and 159 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,25 @@
import enum


class Component(enum.IntEnum):
"""
Cluster components that can be activated.
Attributes:
COMPONENT_UNSPECIFIED (int): Unspecified component.
ANACONDA (int): The Anaconda python distribution.
HIVE_WEBHCAT (int): The Hive Web HCatalog (the REST service for accessing HCatalog).
JUPYTER (int): The Jupyter Notebook.
ZEPPELIN (int): The Zeppelin notebook.
"""

COMPONENT_UNSPECIFIED = 0
ANACONDA = 5
HIVE_WEBHCAT = 3
JUPYTER = 1
ZEPPELIN = 4


class ClusterOperationStatus(object):
class State(enum.IntEnum):
"""
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright 2018 Google LLC.
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
Expand All @@ -18,7 +18,7 @@ syntax = "proto3";
package google.cloud.dataproc.v1;

import "google/api/annotations.proto";
import "google/cloud/dataproc/v1/operations.proto";
import "google/cloud/dataproc/v1/shared.proto";
import "google/longrunning/operations.proto";
import "google/protobuf/duration.proto";
import "google/protobuf/field_mask.proto";
Expand All @@ -33,26 +33,23 @@ option java_package = "com.google.cloud.dataproc.v1";
// of Compute Engine instances.
service ClusterController {
// Creates a cluster in a project.
rpc CreateCluster(CreateClusterRequest)
returns (google.longrunning.Operation) {
rpc CreateCluster(CreateClusterRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
post: "/v1/projects/{project_id}/regions/{region}/clusters"
body: "cluster"
};
}

// Updates a cluster in a project.
rpc UpdateCluster(UpdateClusterRequest)
returns (google.longrunning.Operation) {
rpc UpdateCluster(UpdateClusterRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
patch: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}"
body: "cluster"
};
}

// Deletes a cluster in a project.
rpc DeleteCluster(DeleteClusterRequest)
returns (google.longrunning.Operation) {
rpc DeleteCluster(DeleteClusterRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
delete: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}"
};
Expand All @@ -75,8 +72,7 @@ service ClusterController {
// Gets cluster diagnostic information.
// After the operation completes, the Operation.response field
// contains `DiagnoseClusterOutputLocation`.
rpc DiagnoseCluster(DiagnoseClusterRequest)
returns (google.longrunning.Operation) {
rpc DiagnoseCluster(DiagnoseClusterRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
post: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose"
body: "*"
Expand Down Expand Up @@ -126,15 +122,18 @@ message Cluster {

// The cluster config.
message ClusterConfig {
// Optional. A Cloud Storage staging bucket used for sharing generated
// SSH keys and config. If you do not specify a staging bucket, Cloud
// Dataproc will determine an appropriate Cloud Storage location (US,
// Optional. A Google Cloud Storage bucket used to stage job
// dependencies, config files, and job driver console output.
// If you do not specify a staging bucket, Cloud
// Dataproc will determine a Cloud Storage location (US,
// ASIA, or EU) for your cluster's staging bucket according to the Google
// Compute Engine zone where your cluster is deployed, and then it will create
// and manage this project-level, per-location bucket for you.
// Compute Engine zone where your cluster is deployed, and then create
// and manage this project-level, per-location bucket (see
// [Cloud Dataproc staging
// bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)).
string config_bucket = 1;

// Required. The shared Compute Engine config settings for
// Optional. The shared Compute Engine config settings for
// all instances in a cluster.
GceClusterConfig gce_cluster_config = 8;

Expand Down Expand Up @@ -213,8 +212,8 @@ message GceClusterConfig {
//
// A full URL, partial URI, or short name are valid. Examples:
//
// * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
// * `projects/[project_id]/regions/us-east1/sub0`
// * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0`
// * `projects/[project_id]/regions/us-east1/subnetworks/sub0`
// * `sub0`
string subnetwork_uri = 6;

Expand Down Expand Up @@ -447,13 +446,13 @@ message SoftwareConfig {
// such as "1.2" (including a subminor version, such as "1.2.29"), or the
// ["preview"
// version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
// If unspecified, it defaults to the latest version.
// If unspecified, it defaults to the latest Debian version.
string image_version = 1;

// Optional. The properties to set on daemon config files.
//
// Property keys are specified in `prefix:property` format, such as
// `core:fs.defaultFS`. The following are supported prefixes
// Property keys are specified in `prefix:property` format, for example
// `core:hadoop.tmp.dir`. The following are supported prefixes
// and their mappings:
//
// * capacity-scheduler: `capacity-scheduler.xml`
Expand All @@ -469,6 +468,9 @@ message SoftwareConfig {
// For more information, see
// [Cluster properties](/dataproc/docs/concepts/cluster-properties).
map<string, string> properties = 2;

// The set of optional components to activate on the cluster.
repeated Component optional_components = 3;
}

// Contains cluster daemon metrics, such as HDFS and YARN stats.
Expand Down Expand Up @@ -496,11 +498,10 @@ message CreateClusterRequest {
Cluster cluster = 2;

// Optional. A unique id used to identify the request. If the server
// receives two
// [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest]
// requests with the same id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created
// and stored in the backend is returned.
// receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests with the same
// id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
// is returned.
//
// It is recommended to always set this value to a
// [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
Expand Down Expand Up @@ -585,11 +586,10 @@ message UpdateClusterRequest {
google.protobuf.FieldMask update_mask = 4;

// Optional. A unique id used to identify the request. If the server
// receives two
// [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest]
// requests with the same id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created
// and stored in the backend is returned.
// receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests with the same
// id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
// backend is returned.
//
// It is recommended to always set this value to a
// [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
Expand All @@ -616,11 +616,10 @@ message DeleteClusterRequest {
string cluster_uuid = 4;

// Optional. A unique id used to identify the request. If the server
// receives two
// [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest]
// requests with the same id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created
// and stored in the backend is returned.
// receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests with the same
// id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
// backend is returned.
//
// It is recommended to always set this value to a
// [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright 2018 Google LLC.
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -473,11 +473,12 @@ message JobReference {
// belongs to.
string project_id = 1;

// Optional. The job ID, which must be unique within the project. The job ID
// is generated by the server upon job submission or provided by the user as a
// means to perform retries without creating duplicate jobs. The ID must
// contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or
// hyphens (-). The maximum length is 100 characters.
// Optional. The job ID, which must be unique within the project.
//
// The ID must contain only letters (a-z, A-Z), numbers (0-9),
// underscores (_), or hyphens (-). The maximum length is 100 characters.
//
// If not specified by the caller, the job ID will be provided by the server.
string job_id = 2;
}

Expand Down Expand Up @@ -634,8 +635,8 @@ message SubmitJobRequest {
Job job = 2;

// Optional. A unique id used to identify the request. If the server
// receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest]
// requests with the same id, then the second request will be ignored and the
// receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests with the same
// id, then the second request will be ignored and the
// first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
// is returned.
//
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright 2018 Google LLC.
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright 2018 Google LLC.
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
Expand All @@ -15,12 +15,29 @@

syntax = "proto3";

package google.cloud.dataproc.v1beta2;
package google.cloud.dataproc.v1;

import "google/api/annotations.proto";

option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc";
option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc";
option java_multiple_files = true;
option java_outer_classname = "SharedProto";
option java_package = "com.google.cloud.dataproc.v1beta2";
option java_package = "com.google.cloud.dataproc.v1";

// Cluster components that can be activated.
enum Component {
// Unspecified component.
COMPONENT_UNSPECIFIED = 0;

// The Anaconda python distribution.
ANACONDA = 5;

// The Hive Web HCatalog (the REST service for accessing HCatalog).
HIVE_WEBHCAT = 3;

// The Jupyter Notebook.
JUPYTER = 1;

// The Zeppelin notebook.
ZEPPELIN = 4;
}

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc

0 comments on commit 2e4f575

Please sign in to comment.