From 07ba7bac726dcd0163bf67b8afc11fbeadbe8bdc Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Dec 2015 21:07:56 -0800 Subject: [PATCH] Implemented Bigtable Cluster.undelete(). Also adding utility for processing UndeleteClusterMetadata. --- gcloud/bigtable/cluster.py | 35 ++++++++++++++++ gcloud/bigtable/test_cluster.py | 73 +++++++++++++++++++++++++++++++++ 2 files changed, 108 insertions(+) diff --git a/gcloud/bigtable/cluster.py b/gcloud/bigtable/cluster.py index f8d7ab271930..de811cbef6e2 100644 --- a/gcloud/bigtable/cluster.py +++ b/gcloud/bigtable/cluster.py @@ -39,9 +39,11 @@ _ADMIN_TYPE_URL_BASE = _TYPE_URL_BASE + 'admin.cluster.v1.' _CLUSTER_CREATE_METADATA = _ADMIN_TYPE_URL_BASE + 'CreateClusterMetadata' _UPDATE_CREATE_METADATA = _ADMIN_TYPE_URL_BASE + 'UpdateClusterMetadata' +_UNDELETE_CREATE_METADATA = _ADMIN_TYPE_URL_BASE + 'UndeleteClusterMetadata' _TYPE_URL_MAP = { _CLUSTER_CREATE_METADATA: messages_pb2.CreateClusterMetadata, _UPDATE_CREATE_METADATA: messages_pb2.UpdateClusterMetadata, + _UNDELETE_CREATE_METADATA: messages_pb2.UndeleteClusterMetadata, } @@ -447,6 +449,39 @@ def delete(self): self._client._cluster_stub.DeleteCluster( request_pb, self._client.timeout_seconds) + def undelete(self): + """Undelete this cluster. + + Cancels the scheduled deletion of an cluster and begins preparing it to + resume serving. The returned operation will also be embedded as the + cluster's ``current_operation``. + + Immediately upon completion of this request: + + * The cluster's ``delete_time`` field will be unset, protecting it from + automatic deletion. + + Until completion of the returned operation: + + * The operation cannot be cancelled. + + Upon completion of the returned operation: + + * Billing for the cluster's resources will resume. + * All tables within the cluster will be available. + + :rtype: :class:`Operation` + :returns: The long-running operation corresponding to the + undelete operation. + """ + request_pb = messages_pb2.UndeleteClusterRequest(name=self.name) + # We expect a `._generated.operations_pb2.Operation` + operation_pb2 = self._client._cluster_stub.UndeleteCluster( + request_pb, self._client.timeout_seconds) + + op_id, op_begin = _process_operation(operation_pb2) + return Operation('undelete', op_id, op_begin) + def list_tables(self): """List the tables in this cluster. diff --git a/gcloud/bigtable/test_cluster.py b/gcloud/bigtable/test_cluster.py index 29b941823abc..2c14ced56572 100644 --- a/gcloud/bigtable/test_cluster.py +++ b/gcloud/bigtable/test_cluster.py @@ -511,6 +511,57 @@ def test_delete(self): {}, )]) + def test_undelete(self): + from gcloud._testing import _Monkey + from gcloud.bigtable._generated import ( + bigtable_cluster_service_messages_pb2 as messages_pb2) + from gcloud.bigtable._generated import operations_pb2 + from gcloud.bigtable._testing import _FakeStub + from gcloud.bigtable import cluster as MUT + + project = 'PROJECT' + zone = 'zone' + cluster_id = 'cluster-id' + timeout_seconds = 78 + + client = _Client(project, timeout_seconds=timeout_seconds) + cluster = self._makeOne(zone, cluster_id, client) + + # Create request_pb + cluster_name = ('projects/' + project + '/zones/' + zone + + '/clusters/' + cluster_id) + request_pb = messages_pb2.UndeleteClusterRequest(name=cluster_name) + + # Create response_pb + response_pb = operations_pb2.Operation() + + # Patch the stub used by the API method. + client._cluster_stub = stub = _FakeStub(response_pb) + + # Create expected_result. + op_id = 5678 + op_begin = object() + expected_result = MUT.Operation('undelete', op_id, op_begin) + + # Create the mocks. + process_operation_called = [] + + def mock_process_operation(operation_pb): + process_operation_called.append(operation_pb) + return op_id, op_begin + + # Perform the method and check the result. + with _Monkey(MUT, _process_operation=mock_process_operation): + result = cluster.undelete() + + self.assertEqual(result, expected_result) + self.assertEqual(stub.method_calls, [( + 'UndeleteCluster', + (request_pb, timeout_seconds), + {}, + )]) + self.assertEqual(process_operation_called, [response_pb]) + def _list_tables_helper(self, table_id, table_name=None): from gcloud.bigtable._generated import ( bigtable_table_data_pb2 as table_data_pb2) @@ -738,6 +789,28 @@ def test_with_update_cluster_metadata(self): result = self._callFUT(any_val) self.assertEqual(result, metadata) + def test_with_undelete_cluster_metadata(self): + from gcloud.bigtable._generated import any_pb2 + from gcloud.bigtable._generated import ( + bigtable_cluster_data_pb2 as data_pb2) + from gcloud.bigtable._generated import ( + bigtable_cluster_service_messages_pb2 as messages_pb2) + from gcloud.bigtable._generated.timestamp_pb2 import Timestamp + + type_url = ('type.googleapis.com/' + + messages_pb2._UNDELETECLUSTERMETADATA.full_name) + metadata = messages_pb2.UndeleteClusterMetadata( + request_time=Timestamp(seconds=1, nanos=1234), + finish_time=Timestamp(seconds=10, nanos=891011), + ) + + any_val = any_pb2.Any( + type_url=type_url, + value=metadata.SerializeToString(), + ) + result = self._callFUT(any_val) + self.assertEqual(result, metadata) + def test_unknown_type_url(self): from gcloud._testing import _Monkey from gcloud.bigtable._generated import any_pb2