Skip to content

Commit

Permalink
feat: add support for cluster components (#221)
Browse files Browse the repository at this point in the history
  • Loading branch information
JustinBeckwith authored and callmehiphop committed May 31, 2019
1 parent 2d3ec73 commit bca24ea
Show file tree
Hide file tree
Showing 16 changed files with 178 additions and 147 deletions.
8 changes: 4 additions & 4 deletions packages/google-cloud-dataproc/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,13 @@
"eslint-config-prettier": "^4.0.0",
"eslint-plugin-node": "^9.0.0",
"eslint-plugin-prettier": "^3.0.0",
"jsdoc-baseline": "^0.1.0",
"intelli-espower-loader": "^1.0.1",
"jsdoc": "^3.5.5",
"jsdoc": "^3.6.2",
"jsdoc-baseline": "^0.1.0",
"linkinator": "^1.1.2",
"mocha": "^6.0.0",
"nyc": "^14.0.0",
"power-assert": "^1.5.0",
"prettier": "^1.13.3",
"linkinator": "^1.1.2"
"prettier": "^1.13.3"
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright 2018 Google LLC.
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
Expand All @@ -19,6 +19,7 @@ package google.cloud.dataproc.v1;

import "google/api/annotations.proto";
import "google/cloud/dataproc/v1/operations.proto";
import "google/cloud/dataproc/v1/shared.proto";
import "google/longrunning/operations.proto";
import "google/protobuf/duration.proto";
import "google/protobuf/field_mask.proto";
Expand All @@ -33,26 +34,23 @@ option java_package = "com.google.cloud.dataproc.v1";
// of Compute Engine instances.
service ClusterController {
// Creates a cluster in a project.
rpc CreateCluster(CreateClusterRequest)
returns (google.longrunning.Operation) {
rpc CreateCluster(CreateClusterRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
post: "/v1/projects/{project_id}/regions/{region}/clusters"
body: "cluster"
};
}

// Updates a cluster in a project.
rpc UpdateCluster(UpdateClusterRequest)
returns (google.longrunning.Operation) {
rpc UpdateCluster(UpdateClusterRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
patch: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}"
body: "cluster"
};
}

// Deletes a cluster in a project.
rpc DeleteCluster(DeleteClusterRequest)
returns (google.longrunning.Operation) {
rpc DeleteCluster(DeleteClusterRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
delete: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}"
};
Expand All @@ -75,8 +73,7 @@ service ClusterController {
// Gets cluster diagnostic information.
// After the operation completes, the Operation.response field
// contains `DiagnoseClusterOutputLocation`.
rpc DiagnoseCluster(DiagnoseClusterRequest)
returns (google.longrunning.Operation) {
rpc DiagnoseCluster(DiagnoseClusterRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
post: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose"
body: "*"
Expand Down Expand Up @@ -126,15 +123,18 @@ message Cluster {

// The cluster config.
message ClusterConfig {
// Optional. A Cloud Storage staging bucket used for sharing generated
// SSH keys and config. If you do not specify a staging bucket, Cloud
// Dataproc will determine an appropriate Cloud Storage location (US,
// Optional. A Google Cloud Storage bucket used to stage job
// dependencies, config files, and job driver console output.
// If you do not specify a staging bucket, Cloud
// Dataproc will determine a Cloud Storage location (US,
// ASIA, or EU) for your cluster's staging bucket according to the Google
// Compute Engine zone where your cluster is deployed, and then it will create
// and manage this project-level, per-location bucket for you.
// Compute Engine zone where your cluster is deployed, and then create
// and manage this project-level, per-location bucket (see
// [Cloud Dataproc staging
// bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)).
string config_bucket = 1;

// Required. The shared Compute Engine config settings for
// Optional. The shared Compute Engine config settings for
// all instances in a cluster.
GceClusterConfig gce_cluster_config = 8;

Expand Down Expand Up @@ -213,8 +213,8 @@ message GceClusterConfig {
//
// A full URL, partial URI, or short name are valid. Examples:
//
// * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
// * `projects/[project_id]/regions/us-east1/sub0`
// * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0`
// * `projects/[project_id]/regions/us-east1/subnetworks/sub0`
// * `sub0`
string subnetwork_uri = 6;

Expand Down Expand Up @@ -447,13 +447,13 @@ message SoftwareConfig {
// such as "1.2" (including a subminor version, such as "1.2.29"), or the
// ["preview"
// version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
// If unspecified, it defaults to the latest version.
// If unspecified, it defaults to the latest Debian version.
string image_version = 1;

// Optional. The properties to set on daemon config files.
//
// Property keys are specified in `prefix:property` format, such as
// `core:fs.defaultFS`. The following are supported prefixes
// Property keys are specified in `prefix:property` format, for example
// `core:hadoop.tmp.dir`. The following are supported prefixes
// and their mappings:
//
// * capacity-scheduler: `capacity-scheduler.xml`
Expand All @@ -469,6 +469,9 @@ message SoftwareConfig {
// For more information, see
// [Cluster properties](/dataproc/docs/concepts/cluster-properties).
map<string, string> properties = 2;

// The set of optional components to activate on the cluster.
repeated Component optional_components = 3;
}

// Contains cluster daemon metrics, such as HDFS and YARN stats.
Expand Down Expand Up @@ -496,11 +499,10 @@ message CreateClusterRequest {
Cluster cluster = 2;

// Optional. A unique id used to identify the request. If the server
// receives two
// [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest]
// requests with the same id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created
// and stored in the backend is returned.
// receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests with the same
// id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
// is returned.
//
// It is recommended to always set this value to a
// [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
Expand Down Expand Up @@ -585,11 +587,10 @@ message UpdateClusterRequest {
google.protobuf.FieldMask update_mask = 4;

// Optional. A unique id used to identify the request. If the server
// receives two
// [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest]
// requests with the same id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created
// and stored in the backend is returned.
// receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests with the same
// id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
// backend is returned.
//
// It is recommended to always set this value to a
// [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
Expand All @@ -616,11 +617,10 @@ message DeleteClusterRequest {
string cluster_uuid = 4;

// Optional. A unique id used to identify the request. If the server
// receives two
// [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest]
// requests with the same id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created
// and stored in the backend is returned.
// receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests with the same
// id, then the second request will be ignored and the
// first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
// backend is returned.
//
// It is recommended to always set this value to a
// [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright 2018 Google LLC.
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -473,11 +473,12 @@ message JobReference {
// belongs to.
string project_id = 1;

// Optional. The job ID, which must be unique within the project. The job ID
// is generated by the server upon job submission or provided by the user as a
// means to perform retries without creating duplicate jobs. The ID must
// contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or
// hyphens (-). The maximum length is 100 characters.
// Optional. The job ID, which must be unique within the project.
//
// The ID must contain only letters (a-z, A-Z), numbers (0-9),
// underscores (_), or hyphens (-). The maximum length is 100 characters.
//
// If not specified by the caller, the job ID will be provided by the server.
string job_id = 2;
}

Expand Down Expand Up @@ -634,8 +635,8 @@ message SubmitJobRequest {
Job job = 2;

// Optional. A unique id used to identify the request. If the server
// receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest]
// requests with the same id, then the second request will be ignored and the
// receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests with the same
// id, then the second request will be ignored and the
// first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
// is returned.
//
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright 2018 Google LLC.
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//

syntax = "proto3";

package google.cloud.dataproc.v1;

import "google/api/annotations.proto";

option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc";
option java_multiple_files = true;
option java_outer_classname = "SharedProto";
option java_package = "com.google.cloud.dataproc.v1";

// Cluster components that can be activated.
enum Component {
// Unspecified component.
COMPONENT_UNSPECIFIED = 0;

// The Anaconda python distribution.
ANACONDA = 5;

// The Hive Web HCatalog (the REST service for accessing HCatalog).
HIVE_WEBHCAT = 3;

// The Jupyter Notebook.
JUPYTER = 1;

// The Zeppelin notebook.
ZEPPELIN = 4;
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright 2018 Google LLC.
// Copyright 2019 Google LLC.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -33,8 +33,7 @@ option java_package = "com.google.cloud.dataproc.v1";
// Cloud Dataproc API.
service WorkflowTemplateService {
// Creates new workflow template.
rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest)
returns (WorkflowTemplate) {
rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) returns (WorkflowTemplate) {
option (google.api.http) = {
post: "/v1/{parent=projects/*/locations/*}/workflowTemplates"
body: "template"
Expand All @@ -49,8 +48,7 @@ service WorkflowTemplateService {
//
// Can retrieve previously instantiated template by specifying optional
// version parameter.
rpc GetWorkflowTemplate(GetWorkflowTemplateRequest)
returns (WorkflowTemplate) {
rpc GetWorkflowTemplate(GetWorkflowTemplateRequest) returns (WorkflowTemplate) {
option (google.api.http) = {
get: "/v1/{name=projects/*/locations/*/workflowTemplates/*}"
additional_bindings {
Expand All @@ -77,8 +75,7 @@ service WorkflowTemplateService {
// On successful completion,
// [Operation.response][google.longrunning.Operation.response] will be
// [Empty][google.protobuf.Empty].
rpc InstantiateWorkflowTemplate(InstantiateWorkflowTemplateRequest)
returns (google.longrunning.Operation) {
rpc InstantiateWorkflowTemplate(InstantiateWorkflowTemplateRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
post: "/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate"
body: "*"
Expand All @@ -92,8 +89,7 @@ service WorkflowTemplateService {
// Instantiates a template and begins execution.
//
// This method is equivalent to executing the sequence
// [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate],
// [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
// [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
// [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate].
//
// The returned Operation can be used to track execution of
Expand All @@ -112,9 +108,7 @@ service WorkflowTemplateService {
// On successful completion,
// [Operation.response][google.longrunning.Operation.response] will be
// [Empty][google.protobuf.Empty].
rpc InstantiateInlineWorkflowTemplate(
InstantiateInlineWorkflowTemplateRequest)
returns (google.longrunning.Operation) {
rpc InstantiateInlineWorkflowTemplate(InstantiateInlineWorkflowTemplateRequest) returns (google.longrunning.Operation) {
option (google.api.http) = {
post: "/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline"
body: "template"
Expand All @@ -127,8 +121,7 @@ service WorkflowTemplateService {

// Updates (replaces) workflow template. The updated template
// must contain version that matches the current server version.
rpc UpdateWorkflowTemplate(UpdateWorkflowTemplateRequest)
returns (WorkflowTemplate) {
rpc UpdateWorkflowTemplate(UpdateWorkflowTemplateRequest) returns (WorkflowTemplate) {
option (google.api.http) = {
put: "/v1/{template.name=projects/*/locations/*/workflowTemplates/*}"
body: "template"
Expand All @@ -140,8 +133,7 @@ service WorkflowTemplateService {
}

// Lists workflows that match the specified filter in the request.
rpc ListWorkflowTemplates(ListWorkflowTemplatesRequest)
returns (ListWorkflowTemplatesResponse) {
rpc ListWorkflowTemplates(ListWorkflowTemplatesRequest) returns (ListWorkflowTemplatesResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*}/workflowTemplates"
additional_bindings {
Expand All @@ -151,8 +143,7 @@ service WorkflowTemplateService {
}

// Deletes a workflow template. It does not cancel in-progress workflows.
rpc DeleteWorkflowTemplate(DeleteWorkflowTemplateRequest)
returns (google.protobuf.Empty) {
rpc DeleteWorkflowTemplate(DeleteWorkflowTemplateRequest) returns (google.protobuf.Empty) {
option (google.api.http) = {
delete: "/v1/{name=projects/*/locations/*/workflowTemplates/*}"
additional_bindings {
Expand Down Expand Up @@ -284,8 +275,8 @@ message OrderedJob {
//
// The step id is used as prefix for job id, as job
// `goog-dataproc-workflow-step-id` label, and in
// [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids]
// field from other steps.
// [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other
// steps.
//
// The id must contain only letters (a-z, A-Z), numbers (0-9),
// underscores (_), and hyphens (-). Cannot begin or end with underscore
Expand Down Expand Up @@ -356,10 +347,10 @@ message TemplateParameter {
// A field is allowed to appear in at most one parameter's list of field
// paths.
//
// A field path is similar in syntax to a
// [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a
// field path that references the zone field of a workflow template's cluster
// selector would be specified as `placement.clusterSelector.zone`.
// A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask].
// For example, a field path that references the zone field of a workflow
// template's cluster selector would be specified as
// `placement.clusterSelector.zone`.
//
// Also, field paths can reference fields using the following syntax:
//
Expand Down
Loading

0 comments on commit bca24ea

Please sign in to comment.