Skip to content

Commit

Permalink
Implementing Bigtable Cluster.update().
Browse files Browse the repository at this point in the history
Also adding utility for processing UpdateClusterMetadata.
  • Loading branch information
dhermes committed Dec 5, 2015
1 parent c4ab6de commit a2efbed
Show file tree
Hide file tree
Showing 2 changed files with 121 additions and 1 deletion.
30 changes: 30 additions & 0 deletions gcloud/bigtable/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,10 @@
_TYPE_URL_BASE = 'type.googleapis.com/google.bigtable.'
_ADMIN_TYPE_URL_BASE = _TYPE_URL_BASE + 'admin.cluster.v1.'
_CLUSTER_CREATE_METADATA = _ADMIN_TYPE_URL_BASE + 'CreateClusterMetadata'
_UPDATE_CREATE_METADATA = _ADMIN_TYPE_URL_BASE + 'UpdateClusterMetadata'
_TYPE_URL_MAP = {
_CLUSTER_CREATE_METADATA: messages_pb2.CreateClusterMetadata,
_UPDATE_CREATE_METADATA: messages_pb2.UpdateClusterMetadata,
}


Expand Down Expand Up @@ -317,6 +319,34 @@ def create(self):
self._operation_id, self._operation_begin = _process_operation(
cluster_pb.current_operation)

def update(self):
"""Update this cluster.
.. note::
Updates the ``display_name`` and ``serve_nodes``. If you'd like to
change them before updating, reset the values via
.. code:: python
cluster.display_name = 'New display name'
cluster.serve_nodes = 3
before calling :meth:`update`.
"""
request_pb = data_pb2.Cluster(
name=self.name,
display_name=self.display_name,
serve_nodes=self.serve_nodes,
)
# We expect a `._generated.bigtable_cluster_data_pb2.Cluster`.
cluster_pb = self._client._cluster_stub.UpdateCluster(
request_pb, self._client.timeout_seconds)

self._operation_type = 'update'
self._operation_id, self._operation_begin = _process_operation(
cluster_pb.current_operation)

def delete(self):
"""Delete this cluster.
Expand Down
92 changes: 91 additions & 1 deletion gcloud/bigtable/test_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ def mock_prep_create_req(cluster):

def mock_process_operation(operation_pb):
process_operation_called.append(operation_pb)
return (op_id, op_begin)
return op_id, op_begin

# Perform the method and check the result.
with _Monkey(MUT, _prepare_create_request=mock_prep_create_req,
Expand All @@ -295,6 +295,69 @@ def mock_process_operation(operation_pb):
self.assertEqual(prep_create_called, [cluster])
self.assertEqual(process_operation_called, [current_op])

def test_update(self):
from gcloud._testing import _Monkey
from gcloud.bigtable._generated import (
bigtable_cluster_data_pb2 as data_pb2)
from gcloud.bigtable._generated import operations_pb2
from gcloud.bigtable._testing import _FakeStub
from gcloud.bigtable import cluster as MUT

project = 'PROJECT'
zone = 'zone'
cluster_id = 'cluster-id'
serve_nodes = 81
display_name = 'display_name'
timeout_seconds = 9

client = _Client(project, timeout_seconds=timeout_seconds)
cluster = self._makeOne(zone, cluster_id, client,
display_name=display_name,
serve_nodes=serve_nodes)

# Create request_pb
cluster_name = ('projects/' + project + '/zones/' + zone +
'/clusters/' + cluster_id)
request_pb = data_pb2.Cluster(
name=cluster_name,
display_name=display_name,
serve_nodes=serve_nodes,
)

# Create response_pb
current_op = operations_pb2.Operation()
response_pb = data_pb2.Cluster(current_operation=current_op)

# Patch the stub used by the API method.
client._cluster_stub = stub = _FakeStub(response_pb)

# Create expected_result.
expected_result = None

# Create mocks
op_id = 5678
op_begin = object()
process_operation_called = []

def mock_process_operation(operation_pb):
process_operation_called.append(operation_pb)
return op_id, op_begin

# Perform the method and check the result.
with _Monkey(MUT, _process_operation=mock_process_operation):
result = cluster.update()

self.assertEqual(result, expected_result)
self.assertEqual(stub.method_calls, [(
'UpdateCluster',
(request_pb, timeout_seconds),
{},
)])
self.assertEqual(cluster._operation_type, 'update')
self.assertEqual(cluster._operation_id, op_id)
self.assertTrue(cluster._operation_begin is op_begin)
self.assertEqual(process_operation_called, [current_op])

def test_delete(self):
from gcloud.bigtable._generated import (
bigtable_cluster_service_messages_pb2 as messages_pb2)
Expand Down Expand Up @@ -468,6 +531,33 @@ def test_with_create_cluster_metadata(self):
result = self._callFUT(any_val)
self.assertEqual(result, metadata)

def test_with_update_cluster_metadata(self):
from gcloud.bigtable._generated import any_pb2
from gcloud.bigtable._generated import (
bigtable_cluster_data_pb2 as data_pb2)
from gcloud.bigtable._generated import (
bigtable_cluster_service_messages_pb2 as messages_pb2)
from gcloud.bigtable._generated.timestamp_pb2 import Timestamp

type_url = ('type.googleapis.com/' +
messages_pb2._UPDATECLUSTERMETADATA.full_name)
metadata = messages_pb2.UpdateClusterMetadata(
request_time=Timestamp(seconds=1, nanos=1234),
finish_time=Timestamp(seconds=10, nanos=891011),
cancel_time=Timestamp(seconds=100, nanos=76543),
original_request=data_pb2.Cluster(
display_name='the-end',
serve_nodes=42,
),
)

any_val = any_pb2.Any(
type_url=type_url,
value=metadata.SerializeToString(),
)
result = self._callFUT(any_val)
self.assertEqual(result, metadata)

def test_unknown_type_url(self):
from gcloud._testing import _Monkey
from gcloud.bigtable._generated import any_pb2
Expand Down

0 comments on commit a2efbed

Please sign in to comment.