Skip to content

Commit 9bcb777

Browse files
committed
Adding helpers to parse Bigtable create cluster operation.
A CreateCluster request response doesn't actual indicate success or failure. Rather it returns a cluster object with the validated request parts inside and a `current_operation` attached. We implement `_process_operation` so that we can determine the ID of that long-running operation (so it can be checked for completion / success, if desired by the user). In addition we seek to notify the user when the request began. From the [service definition][1] we know that the `current_operation` is a [long-running operation][2] and that: > The embedded operation's "metadata" field type is `CreateClusterMetadata`, > The embedded operation's "response" field type is `Cluster`, if successful. The [`Operation` metadata][3] is of type [`Any`][4] (which uses a `type_url` and raw bytes to provide **any** protobuf message type in a single field, but still allow it to be parsed into it's true type after the fact). So we expect `CreateCluster` responses to have long-running operations with a type URL matching [`CreateClusterMetadata`][5]. As a result, we introduce a utility (`_parse_pb_any_to_native`) for parsing an `Any` field into the native protobuf type specified by the type URL. Since we know we need to handle `CreateClusterMetadata` values, we add a default mapping (`_TYPE_URL_MAP`) from the corresponding type url for that message type to the native Python type. The `CreateClusterMetadata` type has `request_time` and `finish_time` fields of type [`Timestamp`][6] so we also add the `_pb_timestamp_to_datetime` helper for converting protobuf messages into native Python `datetime.datetime` objects. [1]: https://github.com/GoogleCloudPlatform/cloud-bigtable-client/blob/8e363d72eb39d921dfdf5daf4a36032aa9d003e2/bigtable-protos/src/main/proto/google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto#L64 [2]: https://github.com/GoogleCloudPlatform/cloud-bigtable-client/blob/8e363d72eb39d921dfdf5daf4a36032aa9d003e2/bigtable-protos/src/main/proto/google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto#L74 [3]: https://github.com/GoogleCloudPlatform/cloud-bigtable-client/blob/8e363d72eb39d921dfdf5daf4a36032aa9d003e2/bigtable-protos/src/main/proto/google/longrunning/operations.proto#L82 [4]: https://github.com/GoogleCloudPlatform/cloud-bigtable-client/blob/8e363d72eb39d921dfdf5daf4a36032aa9d003e2/bigtable-protos/src/main/proto/google/protobuf/any.proto#L58 [5]: https://github.com/GoogleCloudPlatform/cloud-bigtable-client/blob/8e363d72eb39d921dfdf5daf4a36032aa9d003e2/bigtable-protos/src/main/proto/google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto#L83-L92 [6]: https://github.com/GoogleCloudPlatform/cloud-bigtable-client/blob/8e363d72eb39d921dfdf5daf4a36032aa9d003e2/bigtable-protos/src/main/proto/google/protobuf/timestamp.proto#L78
1 parent 9c95d66 commit 9bcb777

2 files changed

Lines changed: 265 additions & 5 deletions

File tree

gcloud/bigtable/cluster.py

Lines changed: 84 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,10 @@
1515
"""User friendly container for Google Cloud Bigtable Cluster."""
1616

1717

18+
import datetime
1819
import re
1920

21+
from gcloud._helpers import _EPOCH
2022
from gcloud.bigtable._generated import bigtable_cluster_data_pb2 as data_pb2
2123
from gcloud.bigtable._generated import (
2224
bigtable_cluster_service_messages_pb2 as messages_pb2)
@@ -26,7 +28,16 @@
2628
_CLUSTER_NAME_RE = re.compile(r'^projects/(?P<project>[^/]+)/'
2729
r'zones/(?P<zone>[^/]+)/clusters/'
2830
r'(?P<cluster_id>[a-z][-a-z0-9]*)$')
31+
_OPERATION_NAME_RE = re.compile(r'^operations/projects/([^/]+)/zones/([^/]+)/'
32+
r'clusters/([a-z][-a-z0-9]*)/operations/'
33+
r'(?P<operation_id>\d+)$')
2934
_DEFAULT_SERVE_NODES = 3
35+
_TYPE_URL_BASE = 'type.googleapis.com/google.bigtable.'
36+
_ADMIN_TYPE_URL_BASE = _TYPE_URL_BASE + 'admin.cluster.v1.'
37+
_CLUSTER_CREATE_METADATA = _ADMIN_TYPE_URL_BASE + 'CreateClusterMetadata'
38+
_TYPE_URL_MAP = {
39+
_CLUSTER_CREATE_METADATA: messages_pb2.CreateClusterMetadata,
40+
}
3041

3142

3243
def _get_pb_property_value(message_pb, property_name):
@@ -74,6 +85,73 @@ def _prepare_create_request(cluster):
7485
)
7586

7687

88+
def _pb_timestamp_to_datetime(timestamp):
89+
"""Convert a Timestamp protobuf to a datetime object.
90+
91+
:type timestamp: :class:`._generated.timestamp_pb2.Timestamp`
92+
:param timestamp: A Google returned timestamp protobuf.
93+
94+
:rtype: :class:`datetime.datetime`
95+
:returns: A UTC datetime object converted from a protobuf timestamp.
96+
"""
97+
return (
98+
_EPOCH +
99+
datetime.timedelta(
100+
seconds=timestamp.seconds,
101+
microseconds=(timestamp.nanos / 1000.0),
102+
)
103+
)
104+
105+
106+
def _parse_pb_any_to_native(any_val, expected_type=None):
107+
"""Convert a serialized "google.protobuf.Any" value to actual type.
108+
109+
:type any_val: :class:`gcloud.bigtable._generated.any_pb2.Any`
110+
:param any_val: A serialized protobuf value container.
111+
112+
:type expected_type: str
113+
:param expected_type: (Optional) The type URL we expect ``any_val``
114+
to have.
115+
116+
:rtype: object
117+
:returns: The de-serialized object.
118+
:raises: :class:`ValueError <exceptions.ValueError>` if the
119+
``expected_type`` does not match the ``type_url`` on the input.
120+
"""
121+
if expected_type is not None and expected_type != any_val.type_url:
122+
raise ValueError('Expected type: %s, Received: %s' % (
123+
expected_type, any_val.type_url))
124+
container_class = _TYPE_URL_MAP[any_val.type_url]
125+
return container_class.FromString(any_val.value)
126+
127+
128+
def _process_operation(operation_pb):
129+
"""Processes a create protobuf response.
130+
131+
:type operation_pb: :class:`operations_pb2.Operation`
132+
:param operation_pb: The long-running operation response from a
133+
Create/Update/Undelete cluster request.
134+
135+
:rtype: tuple
136+
:returns: A pair of an integer and datetime stamp. The integer is the ID
137+
of the operation (``operation_id``) and the timestamp when
138+
the create operation began (``operation_begin``).
139+
:raises: :class:`ValueError <exceptions.ValueError>` if the operation name
140+
doesn't match the :data:`_OPERATION_NAME_RE` regex.
141+
"""
142+
match = _OPERATION_NAME_RE.match(operation_pb.name)
143+
if match is None:
144+
raise ValueError('Cluster create operation name was not in the '
145+
'expected format.', operation_pb.name)
146+
operation_id = int(match.group('operation_id'))
147+
148+
request_metadata = _parse_pb_any_to_native(operation_pb.metadata)
149+
operation_begin = _pb_timestamp_to_datetime(
150+
request_metadata.request_time)
151+
152+
return operation_id, operation_begin
153+
154+
77155
class Cluster(object):
78156
"""Representation of a Google Cloud Bigtable Cluster.
79157
@@ -105,7 +183,9 @@ def __init__(self, zone, cluster_id, client,
105183
self.display_name = display_name or cluster_id
106184
self.serve_nodes = serve_nodes
107185
self._client = client
108-
self._operation = None
186+
self._operation_type = None
187+
self._operation_id = None
188+
self._operation_begin = None
109189

110190
def table(self, table_id):
111191
"""Factory to create a table associated with this cluster.
@@ -217,4 +297,6 @@ def create(self):
217297
cluster_pb = self._client._cluster_stub.CreateCluster(
218298
request_pb, self._client.timeout_seconds)
219299

220-
self._operation = cluster_pb.current_operation
300+
self._operation_type = 'create'
301+
self._operation_id, self._operation_begin = _process_operation(
302+
cluster_pb.current_operation)

gcloud/bigtable/test_cluster.py

Lines changed: 181 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -233,6 +233,7 @@ def test_create(self):
233233

234234
# Create response_pb
235235
op_id = 5678
236+
op_begin = object()
236237
op_name = ('operations/projects/%s/zones/%s/clusters/%s/'
237238
'operations/%d' % (project, zone, cluster_id, op_id))
238239
current_op = operations_pb2.Operation(name=op_name)
@@ -244,14 +245,22 @@ def test_create(self):
244245
# Create expected_result.
245246
expected_result = None # create() has no return value.
246247

247-
# Perform the method and check the result.
248+
# Create the mocks.
248249
prep_create_called = []
249250

250251
def mock_prep_create_req(cluster):
251252
prep_create_called.append(cluster)
252253
return request_pb
253254

254-
with _Monkey(MUT, _prepare_create_request=mock_prep_create_req):
255+
process_operation_called = []
256+
257+
def mock_process_operation(operation_pb):
258+
process_operation_called.append(operation_pb)
259+
return (op_id, op_begin)
260+
261+
# Perform the method and check the result.
262+
with _Monkey(MUT, _prepare_create_request=mock_prep_create_req,
263+
_process_operation=mock_process_operation):
255264
result = cluster.create()
256265

257266
self.assertEqual(result, expected_result)
@@ -260,8 +269,11 @@ def mock_prep_create_req(cluster):
260269
(request_pb, timeout_seconds),
261270
{},
262271
)])
263-
self.assertEqual(cluster._operation, current_op)
272+
self.assertEqual(cluster._operation_type, 'create')
273+
self.assertEqual(cluster._operation_id, op_id)
274+
self.assertTrue(cluster._operation_begin is op_begin)
264275
self.assertEqual(prep_create_called, [cluster])
276+
self.assertEqual(process_operation_called, [current_op])
265277

266278

267279
class Test__get_pb_property_value(unittest2.TestCase):
@@ -319,6 +331,172 @@ def test_it(self):
319331
self.assertEqual(request_pb.cluster.serve_nodes, serve_nodes)
320332

321333

334+
class Test__pb_timestamp_to_datetime(unittest2.TestCase):
335+
336+
def _callFUT(self, timestamp):
337+
from gcloud.bigtable.cluster import _pb_timestamp_to_datetime
338+
return _pb_timestamp_to_datetime(timestamp)
339+
340+
def test_it(self):
341+
import datetime
342+
from gcloud._helpers import UTC
343+
from gcloud.bigtable._generated.timestamp_pb2 import Timestamp
344+
345+
# Epoch is midnight on January 1, 1970 ...
346+
dt_stamp = datetime.datetime(1970, month=1, day=1, hour=0,
347+
minute=1, second=1, microsecond=1234,
348+
tzinfo=UTC)
349+
# ... so 1 minute and 1 second after is 61 seconds and 1234
350+
# microseconds is 1234000 nanoseconds.
351+
timestamp = Timestamp(seconds=61, nanos=1234000)
352+
self.assertEqual(self._callFUT(timestamp), dt_stamp)
353+
354+
355+
class Test__parse_pb_any_to_native(unittest2.TestCase):
356+
357+
def _callFUT(self, any_val, expected_type=None):
358+
from gcloud.bigtable.cluster import _parse_pb_any_to_native
359+
return _parse_pb_any_to_native(any_val, expected_type=expected_type)
360+
361+
def test_with_known_type_url(self):
362+
from gcloud._testing import _Monkey
363+
from gcloud.bigtable._generated import any_pb2
364+
from gcloud.bigtable._generated import bigtable_data_pb2 as data_pb2
365+
from gcloud.bigtable import cluster as MUT
366+
367+
type_url = 'type.googleapis.com/' + data_pb2._CELL.full_name
368+
fake_type_url_map = {type_url: data_pb2.Cell}
369+
370+
cell = data_pb2.Cell(
371+
timestamp_micros=0,
372+
value=b'foobar',
373+
)
374+
any_val = any_pb2.Any(
375+
type_url=type_url,
376+
value=cell.SerializeToString(),
377+
)
378+
with _Monkey(MUT, _TYPE_URL_MAP=fake_type_url_map):
379+
result = self._callFUT(any_val)
380+
381+
self.assertEqual(result, cell)
382+
383+
def test_with_create_cluster_metadata(self):
384+
from gcloud.bigtable._generated import any_pb2
385+
from gcloud.bigtable._generated import (
386+
bigtable_cluster_data_pb2 as data_pb2)
387+
from gcloud.bigtable._generated import (
388+
bigtable_cluster_service_messages_pb2 as messages_pb2)
389+
from gcloud.bigtable._generated.timestamp_pb2 import Timestamp
390+
391+
type_url = ('type.googleapis.com/' +
392+
messages_pb2._CREATECLUSTERMETADATA.full_name)
393+
metadata = messages_pb2.CreateClusterMetadata(
394+
request_time=Timestamp(seconds=1, nanos=1234),
395+
finish_time=Timestamp(seconds=10, nanos=891011),
396+
original_request=messages_pb2.CreateClusterRequest(
397+
name='foo',
398+
cluster_id='bar',
399+
cluster=data_pb2.Cluster(
400+
display_name='quux',
401+
serve_nodes=1337,
402+
),
403+
),
404+
)
405+
406+
any_val = any_pb2.Any(
407+
type_url=type_url,
408+
value=metadata.SerializeToString(),
409+
)
410+
result = self._callFUT(any_val)
411+
self.assertEqual(result, metadata)
412+
413+
def test_unknown_type_url(self):
414+
from gcloud._testing import _Monkey
415+
from gcloud.bigtable._generated import any_pb2
416+
from gcloud.bigtable import cluster as MUT
417+
418+
fake_type_url_map = {}
419+
any_val = any_pb2.Any()
420+
with _Monkey(MUT, _TYPE_URL_MAP=fake_type_url_map):
421+
with self.assertRaises(KeyError):
422+
self._callFUT(any_val)
423+
424+
def test_disagreeing_type_url(self):
425+
from gcloud._testing import _Monkey
426+
from gcloud.bigtable._generated import any_pb2
427+
from gcloud.bigtable import cluster as MUT
428+
429+
type_url1 = 'foo'
430+
type_url2 = 'bar'
431+
fake_type_url_map = {type_url1: None}
432+
any_val = any_pb2.Any(type_url=type_url2)
433+
with _Monkey(MUT, _TYPE_URL_MAP=fake_type_url_map):
434+
with self.assertRaises(ValueError):
435+
self._callFUT(any_val, expected_type=type_url1)
436+
437+
438+
class Test__process_operation(unittest2.TestCase):
439+
440+
def _callFUT(self, operation_pb):
441+
from gcloud.bigtable.cluster import _process_operation
442+
return _process_operation(operation_pb)
443+
444+
def test_it(self):
445+
from gcloud._testing import _Monkey
446+
from gcloud.bigtable._generated import (
447+
bigtable_cluster_service_messages_pb2 as messages_pb2)
448+
from gcloud.bigtable._generated import operations_pb2
449+
from gcloud.bigtable import cluster as MUT
450+
451+
project = 'PROJECT'
452+
zone = 'zone'
453+
cluster_id = 'cluster-id'
454+
expected_operation_id = 234
455+
operation_name = ('operations/projects/%s/zones/%s/clusters/%s/'
456+
'operations/%d' % (project, zone, cluster_id,
457+
expected_operation_id))
458+
459+
current_op = operations_pb2.Operation(name=operation_name)
460+
461+
# Create mocks.
462+
request_metadata = messages_pb2.CreateClusterMetadata()
463+
parse_pb_any_called = []
464+
465+
def mock_parse_pb_any_to_native(any_val, expected_type=None):
466+
parse_pb_any_called.append((any_val, expected_type))
467+
return request_metadata
468+
469+
expected_operation_begin = object()
470+
ts_to_dt_called = []
471+
472+
def mock_pb_timestamp_to_datetime(timestamp):
473+
ts_to_dt_called.append(timestamp)
474+
return expected_operation_begin
475+
476+
# Exectute method with mocks in place.
477+
with _Monkey(MUT, _parse_pb_any_to_native=mock_parse_pb_any_to_native,
478+
_pb_timestamp_to_datetime=mock_pb_timestamp_to_datetime):
479+
operation_id, operation_begin = self._callFUT(current_op)
480+
481+
# Check outputs.
482+
self.assertEqual(operation_id, expected_operation_id)
483+
self.assertTrue(operation_begin is expected_operation_begin)
484+
485+
# Check mocks were used correctly.
486+
self.assertEqual(parse_pb_any_called, [(current_op.metadata, None)])
487+
self.assertEqual(ts_to_dt_called, [request_metadata.request_time])
488+
489+
def test_op_name_parsing_failure(self):
490+
from gcloud.bigtable._generated import (
491+
bigtable_cluster_data_pb2 as data_pb2)
492+
from gcloud.bigtable._generated import operations_pb2
493+
494+
current_op = operations_pb2.Operation(name='invalid')
495+
cluster = data_pb2.Cluster(current_operation=current_op)
496+
with self.assertRaises(ValueError):
497+
self._callFUT(cluster)
498+
499+
322500
class _Client(object):
323501

324502
def __init__(self, project, timeout_seconds=None):

0 commit comments

Comments
 (0)