diff --git a/gcloud/datastore/_entity_pb2.py b/gcloud/datastore/_entity_pb2.py new file mode 100644 index 000000000000..4c071ac38de1 --- /dev/null +++ b/gcloud/datastore/_entity_pb2.py @@ -0,0 +1,26 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Datastore shim to emulate v1beta3 module structure. + +This module intended to pair with entity.proto. +""" + +from gcloud.datastore import _datastore_v1_pb2 + + +PartitionId = _datastore_v1_pb2.PartitionId +Key = _datastore_v1_pb2.Key +Value = _datastore_v1_pb2.Value +Entity = _datastore_v1_pb2.Entity diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 69513bd5f043..b03937b90354 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -20,6 +20,7 @@ from gcloud.environment_vars import GCD_HOST from gcloud.exceptions import make_exception from gcloud.datastore import _datastore_v1_pb2 as datastore_pb +from gcloud.datastore import _entity_pb2 class Connection(connection.Connection): @@ -150,8 +151,8 @@ def lookup(self, dataset_id, key_pbs, Maps the ``DatastoreService.Lookup`` protobuf RPC. This method deals only with protobufs - (:class:`gcloud.datastore._datastore_v1_pb2.Key` and - :class:`gcloud.datastore._datastore_v1_pb2.Entity`) and is used + (:class:`gcloud.datastore._entity_pb2.Key` and + :class:`gcloud.datastore._entity_pb2.Entity`) and is used under the hood in :func:`gcloud.datastore.get`: >>> from gcloud import datastore @@ -167,7 +168,7 @@ def lookup(self, dataset_id, key_pbs, :type dataset_id: string :param dataset_id: The ID of the dataset to look up the keys. - :type key_pbs: list of :class:`gcloud.datastore._datastore_v1_pb2.Key` + :type key_pbs: list of :class:`gcloud.datastore._entity_pb2.Key` :param key_pbs: The keys to retrieve from the datastore. :type eventual: boolean @@ -183,9 +184,9 @@ def lookup(self, dataset_id, key_pbs, :rtype: tuple :returns: A triple of (``results``, ``missing``, ``deferred``) where both ``results`` and ``missing`` are lists of - :class:`gcloud.datastore._datastore_v1_pb2.Entity` and + :class:`gcloud.datastore._entity_pb2.Entity` and ``deferred`` is a list of - :class:`gcloud.datastore._datastore_v1_pb2.Key`. + :class:`gcloud.datastore._entity_pb2.Key`. """ lookup_request = datastore_pb.LookupRequest() _set_read_options(lookup_request, eventual, transaction_id) @@ -348,10 +349,10 @@ def allocate_ids(self, dataset_id, key_pbs): :param dataset_id: The ID of the dataset to which the transaction belongs. - :type key_pbs: list of :class:`gcloud.datastore._datastore_v1_pb2.Key` + :type key_pbs: list of :class:`gcloud.datastore._entity_pb2.Key` :param key_pbs: The keys for which the backend should allocate IDs. - :rtype: list of :class:`gcloud.datastore._datastore_v1_pb2.Key` + :rtype: list of :class:`gcloud.datastore._entity_pb2.Key` :returns: An equal number of keys, with IDs filled in by the backend. """ request = datastore_pb.AllocateIdsRequest() @@ -387,15 +388,15 @@ def _prepare_key_for_request(key_pb): # pragma: NO COVER copied from helpers This is copied from `helpers` to avoid a cycle: _implicit_environ -> connection -> helpers -> key -> _implicit_environ - :type key_pb: :class:`gcloud.datastore._datastore_v1_pb2.Key` + :type key_pb: :class:`gcloud.datastore._entity_pb2.Key` :param key_pb: A key to be added to a request. - :rtype: :class:`gcloud.datastore._datastore_v1_pb2.Key` + :rtype: :class:`gcloud.datastore._entity_pb2.Key` :returns: A key which will be added to a request. It will be the original if nothing needs to be changed. """ if key_pb.partition_id.HasField('dataset_id'): - new_key_pb = datastore_pb.Key() + new_key_pb = _entity_pb2.Key() new_key_pb.CopyFrom(key_pb) new_key_pb.partition_id.ClearField('dataset_id') key_pb = new_key_pb @@ -408,7 +409,7 @@ def _add_keys_to_request(request_field_pb, key_pbs): :type request_field_pb: `RepeatedCompositeFieldContainer` :param request_field_pb: A repeated proto field that contains keys. - :type key_pbs: list of :class:`gcloud.datastore._datastore_v1_pb2.Key` + :type key_pbs: list of :class:`gcloud.datastore._entity_pb2.Key` :param key_pbs: The keys to add to a request. """ for key_pb in key_pbs: diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py index 427df84e726f..e3fcaba1df26 100644 --- a/gcloud/datastore/helpers.py +++ b/gcloud/datastore/helpers.py @@ -24,7 +24,7 @@ from gcloud._helpers import _datetime_from_microseconds from gcloud._helpers import _microseconds_from_datetime -from gcloud.datastore import _datastore_v1_pb2 as datastore_pb +from gcloud.datastore import _entity_pb2 from gcloud.datastore.entity import Entity from gcloud.datastore.key import Key @@ -79,7 +79,7 @@ def entity_from_protobuf(pb): The protobuf should be one returned from the Cloud Datastore Protobuf API. - :type pb: :class:`gcloud.datastore._datastore_v1_pb2.Entity` + :type pb: :class:`gcloud.datastore._entity_pb2.Entity` :param pb: The Protobuf representing the entity. :rtype: :class:`gcloud.datastore.entity.Entity` @@ -122,7 +122,7 @@ def key_from_protobuf(pb): The protobuf should be one returned from the Cloud Datastore Protobuf API. - :type pb: :class:`gcloud.datastore._datastore_v1_pb2.Key` + :type pb: :class:`gcloud.datastore._entity_pb2.Key` :param pb: The Protobuf representing the key. :rtype: :class:`gcloud.datastore.key.Key` @@ -216,7 +216,7 @@ def _get_value_from_value_pb(value_pb): Some work is done to coerce the return value into a more useful type (particularly in the case of a timestamp value, or a key value). - :type value_pb: :class:`gcloud.datastore._datastore_v1_pb2.Value` + :type value_pb: :class:`gcloud.datastore._entity_pb2.Value` :param value_pb: The Value Protobuf. :returns: The value provided by the Protobuf. @@ -280,7 +280,7 @@ def _set_protobuf_value(value_pb, val): Some value types (entities, keys, lists) cannot be directly assigned; this function handles them correctly. - :type value_pb: :class:`gcloud.datastore._datastore_v1_pb2.Value` + :type value_pb: :class:`gcloud.datastore._entity_pb2.Value` :param value_pb: The value protobuf to which the value is being assigned. :type val: :class:`datetime.datetime`, boolean, float, integer, string, @@ -317,10 +317,10 @@ def _set_protobuf_value(value_pb, val): def _prepare_key_for_request(key_pb): """Add protobuf keys to a request object. - :type key_pb: :class:`gcloud.datastore._datastore_v1_pb2.Key` + :type key_pb: :class:`gcloud.datastore._entity_pb2.Key` :param key_pb: A key to be added to a request. - :rtype: :class:`gcloud.datastore._datastore_v1_pb2.Key` + :rtype: :class:`gcloud.datastore._entity_pb2.Key` :returns: A key which will be added to a request. It will be the original if nothing needs to be changed. """ @@ -334,7 +334,7 @@ def _prepare_key_for_request(key_pb): # both go to the datastore given by 's~foo'. So if the key # protobuf in the request body has dataset_id='foo', the # backend will reject since 'foo' != 's~foo'. - new_key_pb = datastore_pb.Key() + new_key_pb = _entity_pb2.Key() new_key_pb.CopyFrom(key_pb) new_key_pb.partition_id.ClearField('dataset_id') key_pb = new_key_pb diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index b8033a012632..abeff12385fe 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -17,7 +17,7 @@ import copy import six -from gcloud.datastore import _datastore_v1_pb2 as datastore_pb +from gcloud.datastore import _entity_pb2 class Key(object): @@ -235,10 +235,10 @@ def completed_key(self, id_or_name): def to_protobuf(self): """Return a protobuf corresponding to the key. - :rtype: :class:`gcloud.datastore._datastore_v1_pb2.Key` + :rtype: :class:`gcloud.datastore._entity_pb2.Key` :returns: The protobuf representing the key. """ - key = datastore_pb.Key() + key = _entity_pb2.Key() key.partition_id.dataset_id = self.dataset_id if self.namespace: diff --git a/gcloud/datastore/test_batch.py b/gcloud/datastore/test_batch.py index a030560d834f..dbbb51727f77 100644 --- a/gcloud/datastore/test_batch.py +++ b/gcloud/datastore/test_batch.py @@ -426,8 +426,8 @@ def is_partial(self): return self._id is None def to_protobuf(self): - from gcloud.datastore import _datastore_v1_pb2 - key = self._key = _datastore_v1_pb2.Key() + from gcloud.datastore import _entity_pb2 + key = self._key = _entity_pb2.Key() # Don't assign it, because it will just get ripped out # key.partition_id.dataset_id = self.dataset_id diff --git a/gcloud/datastore/test_client.py b/gcloud/datastore/test_client.py index aa7fa5ebcae9..bbc8d9bb6a60 100644 --- a/gcloud/datastore/test_client.py +++ b/gcloud/datastore/test_client.py @@ -16,9 +16,9 @@ def _make_entity_pb(dataset_id, kind, integer_id, name=None, str_val=None): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _entity_pb2 - entity_pb = datastore_pb.Entity() + entity_pb = _entity_pb2.Entity() entity_pb.key.partition_id.dataset_id = dataset_id path_element = entity_pb.key.path_element.add() path_element.kind = kind @@ -314,14 +314,14 @@ def test_get_multi_miss(self): self.assertEqual(results, []) def test_get_multi_miss_w_missing(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _entity_pb2 from gcloud.datastore.key import Key KIND = 'Kind' ID = 1234 # Make a missing entity pb to be returned from mock backend. - missed = datastore_pb.Entity() + missed = _entity_pb2.Entity() missed.key.partition_id.dataset_id = self.DATASET_ID path_element = missed.key.path_element.add() path_element.kind = KIND @@ -378,7 +378,7 @@ def test_get_multi_miss_w_deferred(self): [key.to_protobuf()]) def test_get_multi_w_deferred_from_backend_but_not_passed(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _entity_pb2 from gcloud.datastore.entity import Entity from gcloud.datastore.key import Key @@ -387,9 +387,9 @@ def test_get_multi_w_deferred_from_backend_but_not_passed(self): key2 = Key('Kind', 2345, dataset_id=self.DATASET_ID) key2_pb = key2.to_protobuf() - entity1_pb = datastore_pb.Entity() + entity1_pb = _entity_pb2.Entity() entity1_pb.key.CopyFrom(key1_pb) - entity2_pb = datastore_pb.Entity() + entity2_pb = _entity_pb2.Entity() entity2_pb.key.CopyFrom(key2_pb) creds = object() diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index 24654d09e1c8..c23e29049f56 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -337,11 +337,12 @@ def test_lookup_single_key_empty_response_w_transaction(self): def test_lookup_single_key_nonempty_response(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _entity_pb2 DATASET_ID = 'DATASET' key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.LookupResponse() - entity = datastore_pb.Entity() + entity = _entity_pb2.Entity() entity.key.CopyFrom(key_pb) rsp_pb.found.add(entity=entity) conn = self._makeOne() @@ -606,10 +607,11 @@ def test_run_query_wo_namespace_empty_result(self): def test_run_query_w_namespace_nonempty_result(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _entity_pb2 DATASET_ID = 'DATASET' KIND = 'Kind' - entity = datastore_pb.Entity() + entity = _entity_pb2.Entity() q_pb = self._make_query_pb(KIND) rsp_pb = datastore_pb.RunQueryResponse() rsp_pb.batch.entity_result.add(entity=entity) diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py index 51e81acf6492..a98b1c4b4930 100644 --- a/gcloud/datastore/test_helpers.py +++ b/gcloud/datastore/test_helpers.py @@ -22,12 +22,12 @@ def _callFUT(self, val): return entity_from_protobuf(val) def test_it(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _entity_pb2 _DATASET_ID = 'DATASET' _KIND = 'KIND' _ID = 1234 - entity_pb = datastore_pb.Entity() + entity_pb = _entity_pb2.Entity() entity_pb.key.partition_id.dataset_id = _DATASET_ID entity_pb.key.path_element.add(kind=_KIND, id=_ID) prop_pb = entity_pb.property.add() @@ -71,12 +71,12 @@ def test_it(self): self.assertEqual(key.id, _ID) def test_mismatched_value_indexed(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _entity_pb2 _DATASET_ID = 'DATASET' _KIND = 'KIND' _ID = 1234 - entity_pb = datastore_pb.Entity() + entity_pb = _entity_pb2.Entity() entity_pb.key.partition_id.dataset_id = _DATASET_ID entity_pb.key.path_element.add(kind=_KIND, id=_ID) @@ -96,16 +96,16 @@ def test_mismatched_value_indexed(self): self._callFUT(entity_pb) def test_entity_no_key(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _entity_pb2 - entity_pb = datastore_pb.Entity() + entity_pb = _entity_pb2.Entity() entity = self._callFUT(entity_pb) self.assertEqual(entity.key, None) self.assertEqual(dict(entity), {}) def test_nested_entity_no_key(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _entity_pb2 DATASET_ID = 's~FOO' KIND = 'KIND' @@ -113,12 +113,12 @@ def test_nested_entity_no_key(self): OUTSIDE_NAME = 'OBAR' INSIDE_VALUE = 1337 - entity_inside = datastore_pb.Entity() + entity_inside = _entity_pb2.Entity() inside_prop = entity_inside.property.add() inside_prop.name = INSIDE_NAME inside_prop.value.integer_value = INSIDE_VALUE - entity_pb = datastore_pb.Entity() + entity_pb = _entity_pb2.Entity() entity_pb.key.partition_id.dataset_id = DATASET_ID element = entity_pb.key.path_element.add() element.kind = KIND @@ -146,7 +146,7 @@ def _callFUT(self, val): return key_from_protobuf(val) def _makePB(self, dataset_id=None, namespace=None, path=()): - from gcloud.datastore._datastore_v1_pb2 import Key + from gcloud.datastore._entity_pb2 import Key pb = Key() if dataset_id is not None: pb.partition_id.dataset_id = dataset_id @@ -303,7 +303,7 @@ def _callFUT(self, pb): return _get_value_from_value_pb(pb) def _makePB(self, attr_name, value): - from gcloud.datastore._datastore_v1_pb2 import Value + from gcloud.datastore._entity_pb2 import Value pb = Value() setattr(pb, attr_name, value) @@ -320,7 +320,7 @@ def test_datetime(self): self.assertEqual(self._callFUT(pb), utc) def test_key(self): - from gcloud.datastore._datastore_v1_pb2 import Value + from gcloud.datastore._entity_pb2 import Value from gcloud.datastore.key import Key pb = Value() @@ -350,7 +350,7 @@ def test_unicode(self): self.assertEqual(self._callFUT(pb), u'str') def test_entity(self): - from gcloud.datastore._datastore_v1_pb2 import Value + from gcloud.datastore._entity_pb2 import Value from gcloud.datastore.entity import Entity pb = Value() @@ -365,7 +365,7 @@ def test_entity(self): self.assertEqual(entity['foo'], 'Foo') def test_list(self): - from gcloud.datastore._datastore_v1_pb2 import Value + from gcloud.datastore._entity_pb2 import Value pb = Value() list_pb = pb.list_value @@ -377,7 +377,7 @@ def test_list(self): self.assertEqual(items, ['Foo', 'Bar']) def test_unknown(self): - from gcloud.datastore._datastore_v1_pb2 import Value + from gcloud.datastore._entity_pb2 import Value pb = Value() self.assertEqual(self._callFUT(pb), None) @@ -406,7 +406,7 @@ def _callFUT(self, value_pb, val): return _set_protobuf_value(value_pb, val) def _makePB(self): - from gcloud.datastore._datastore_v1_pb2 import Value + from gcloud.datastore._entity_pb2 import Value return Value() @@ -543,19 +543,19 @@ def _callFUT(self, key_pb): return _prepare_key_for_request(key_pb) def test_prepare_dataset_id_valid(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb - key = datastore_pb.Key() + from gcloud.datastore import _entity_pb2 + key = _entity_pb2.Key() key.partition_id.dataset_id = 'foo' new_key = self._callFUT(key) self.assertFalse(new_key is key) - key_without = datastore_pb.Key() + key_without = _entity_pb2.Key() new_key.ClearField('partition_id') self.assertEqual(new_key, key_without) def test_prepare_dataset_id_unset(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb - key = datastore_pb.Key() + from gcloud.datastore import _entity_pb2 + key = _entity_pb2.Key() new_key = self._callFUT(key) self.assertTrue(new_key is key) @@ -622,7 +622,7 @@ def __init__(self, prefix, from_missing=False): self.from_missing = from_missing def lookup(self, dataset_id, key_pbs): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _entity_pb2 # Store the arguments called with. self._called_dataset_id = dataset_id @@ -630,7 +630,7 @@ def lookup(self, dataset_id, key_pbs): key_pb, = key_pbs - response = datastore_pb.Entity() + response = _entity_pb2.Entity() response.key.CopyFrom(key_pb) response.key.partition_id.dataset_id = self.prefix + dataset_id diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index 0220b22e7787..5432004b3748 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -333,7 +333,7 @@ def test_completed_key_on_complete(self): self.assertRaises(ValueError, key.completed_key, 5678) def test_to_protobuf_defaults(self): - from gcloud.datastore._datastore_v1_pb2 import Key as KeyPB + from gcloud.datastore._entity_pb2 import Key as KeyPB _KIND = 'KIND' key = self._makeOne(_KIND, dataset_id=self._DEFAULT_DATASET) pb = key.to_protobuf() diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index 92c28e1653b0..c4a3e0e6a65b 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -327,10 +327,12 @@ def _makeOne(self, *args, **kw): def _addQueryResults(self, connection, cursor=_END, more=False): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _entity_pb2 + MORE = datastore_pb.QueryResultBatch.NOT_FINISHED NO_MORE = datastore_pb.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT _ID = 123 - entity_pb = datastore_pb.Entity() + entity_pb = _entity_pb2.Entity() entity_pb.key.partition_id.dataset_id = self._DATASET path_element = entity_pb.key.path_element.add() path_element.kind = self._KIND diff --git a/gcloud/datastore/test_transaction.py b/gcloud/datastore/test_transaction.py index 577b7f07bddd..53f0654d6997 100644 --- a/gcloud/datastore/test_transaction.py +++ b/gcloud/datastore/test_transaction.py @@ -161,7 +161,7 @@ class Foo(Exception): def _make_key(kind, id, dataset_id): - from gcloud.datastore._datastore_v1_pb2 import Key + from gcloud.datastore._entity_pb2 import Key key = Key() key.partition_id.dataset_id = dataset_id