diff --git a/gcloud/datastore/_query_pb2.py b/gcloud/datastore/_query_pb2.py new file mode 100644 index 000000000000..b3427c33525c --- /dev/null +++ b/gcloud/datastore/_query_pb2.py @@ -0,0 +1,33 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Datastore shim to emulate v1beta3 module structure. + +This module intended to pair with query.proto. +""" + +from gcloud.datastore import _datastore_v1_pb2 + + +EntityResult = _datastore_v1_pb2.EntityResult +Query = _datastore_v1_pb2.Query +KindExpression = _datastore_v1_pb2.KindExpression +PropertyReference = _datastore_v1_pb2.PropertyReference +PropertyOrder = _datastore_v1_pb2.PropertyOrder +Filter = _datastore_v1_pb2.Filter +CompositeFilter = _datastore_v1_pb2.CompositeFilter +PropertyFilter = _datastore_v1_pb2.PropertyFilter +GqlQuery = _datastore_v1_pb2.GqlQuery +GqlQueryArg = _datastore_v1_pb2.GqlQueryArg +QueryResultBatch = _datastore_v1_pb2.QueryResultBatch diff --git a/gcloud/datastore/client.py b/gcloud/datastore/client.py index f3a31356594b..0767a54c33a6 100644 --- a/gcloud/datastore/client.py +++ b/gcloud/datastore/client.py @@ -90,7 +90,7 @@ def _extended_lookup(connection, dataset_id, key_pbs, :type dataset_id: string :param dataset_id: The ID of the dataset of which to make the request. - :type key_pbs: list of :class:`gcloud.datastore._datastore_v1_pb2.Key` + :type key_pbs: list of :class:`gcloud.datastore._entity_pb2.Key` :param key_pbs: The keys to retrieve from the datastore. :type missing: an empty list or None. @@ -113,7 +113,7 @@ def _extended_lookup(connection, dataset_id, key_pbs, the given transaction. Incompatible with ``eventual==True``. - :rtype: list of :class:`gcloud.datastore._datastore_v1_pb2.Entity` + :rtype: list of :class:`gcloud.datastore._entity_pb2.Entity` :returns: The requested entities. :raises: :class:`ValueError` if missing / deferred are not null or empty list. diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index b03937b90354..3f921cb82cd6 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -239,7 +239,7 @@ def run_query(self, dataset_id, query_pb, namespace=None, :type dataset_id: string :param dataset_id: The ID of the dataset over which to run the query. - :type query_pb: :class:`gcloud.datastore._datastore_v1_pb2.Query` + :type query_pb: :class:`gcloud.datastore._query_pb2.Query` :param query_pb: The Protobuf representing the query to run. :type namespace: string diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index f3fe0a584b9c..93cf5a9db225 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -17,7 +17,7 @@ import base64 from gcloud._helpers import _ensure_tuple_or_list -from gcloud.datastore import _datastore_v1_pb2 as datastore_pb +from gcloud.datastore import _query_pb2 from gcloud.datastore import helpers from gcloud.datastore.key import Key @@ -64,11 +64,11 @@ class Query(object): """ OPERATORS = { - '<=': datastore_pb.PropertyFilter.LESS_THAN_OR_EQUAL, - '>=': datastore_pb.PropertyFilter.GREATER_THAN_OR_EQUAL, - '<': datastore_pb.PropertyFilter.LESS_THAN, - '>': datastore_pb.PropertyFilter.GREATER_THAN, - '=': datastore_pb.PropertyFilter.EQUAL, + '<=': _query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL, + '>=': _query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL, + '<': _query_pb2.PropertyFilter.LESS_THAN, + '>': _query_pb2.PropertyFilter.GREATER_THAN, + '=': _query_pb2.PropertyFilter.EQUAL, } """Mapping of operator strings and their protobuf equivalents.""" @@ -359,11 +359,11 @@ class Iterator(object): query results. """ - _NOT_FINISHED = datastore_pb.QueryResultBatch.NOT_FINISHED + _NOT_FINISHED = _query_pb2.QueryResultBatch.NOT_FINISHED _FINISHED = ( - datastore_pb.QueryResultBatch.NO_MORE_RESULTS, - datastore_pb.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT, + _query_pb2.QueryResultBatch.NO_MORE_RESULTS, + _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT, ) def __init__(self, query, client, limit=None, offset=0, @@ -456,12 +456,12 @@ def _pb_from_query(query): :type query: :class:`Query` :param query: The source query. - :rtype: :class:`gcloud.datastore._datastore_v1_pb2.Query` + :rtype: :class:`gcloud.datastore._query_pb2.Query` :returns: A protobuf that can be sent to the protobuf API. N.b. that it does not contain "in-flight" fields for ongoing query executions (cursors, offset, limit). """ - pb = datastore_pb.Query() + pb = _query_pb2.Query() for projection_name in query.projection: pb.projection.add().property.name = projection_name @@ -470,7 +470,7 @@ def _pb_from_query(query): pb.kind.add().name = query.kind composite_filter = pb.filter.composite_filter - composite_filter.operator = datastore_pb.CompositeFilter.AND + composite_filter.operator = _query_pb2.CompositeFilter.AND if query.ancestor: ancestor_pb = helpers._prepare_key_for_request( @@ -479,7 +479,7 @@ def _pb_from_query(query): # Filter on __key__ HAS_ANCESTOR == ancestor. ancestor_filter = composite_filter.filter.add().property_filter ancestor_filter.property.name = '__key__' - ancestor_filter.operator = datastore_pb.PropertyFilter.HAS_ANCESTOR + ancestor_filter.operator = _query_pb2.PropertyFilter.HAS_ANCESTOR ancestor_filter.value.key_value.CopyFrom(ancestor_pb) for property_name, operator, value in query.filters: diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index c23e29049f56..76e3b7d449df 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -30,8 +30,8 @@ def _make_key_pb(self, dataset_id, id=1234): return Key(*path_args, dataset_id=dataset_id).to_protobuf() def _make_query_pb(self, kind): - from gcloud.datastore.connection import datastore_pb - pb = datastore_pb.Query() + from gcloud.datastore import _query_pb2 + pb = _query_pb2.Query() pb.kind.add().name = kind return pb @@ -476,6 +476,7 @@ def test_lookup_multiple_keys_w_deferred(self): def test_run_query_w_eventual_no_transaction(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _query_pb2 DATASET_ID = 'DATASET' KIND = 'Nonesuch' @@ -483,9 +484,9 @@ def test_run_query_w_eventual_no_transaction(self): q_pb = self._make_query_pb(KIND) rsp_pb = datastore_pb.RunQueryResponse() rsp_pb.batch.end_cursor = CURSOR - no_more = datastore_pb.QueryResultBatch.NO_MORE_RESULTS + no_more = _query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more - rsp_pb.batch.entity_result_type = datastore_pb.EntityResult.FULL + rsp_pb.batch.entity_result_type = _query_pb2.EntityResult.FULL conn = self._makeOne() URI = '/'.join([ conn.api_base_url, @@ -515,6 +516,7 @@ def test_run_query_w_eventual_no_transaction(self): def test_run_query_wo_eventual_w_transaction(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _query_pb2 DATASET_ID = 'DATASET' KIND = 'Nonesuch' @@ -523,9 +525,9 @@ def test_run_query_wo_eventual_w_transaction(self): q_pb = self._make_query_pb(KIND) rsp_pb = datastore_pb.RunQueryResponse() rsp_pb.batch.end_cursor = CURSOR - no_more = datastore_pb.QueryResultBatch.NO_MORE_RESULTS + no_more = _query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more - rsp_pb.batch.entity_result_type = datastore_pb.EntityResult.FULL + rsp_pb.batch.entity_result_type = _query_pb2.EntityResult.FULL conn = self._makeOne() URI = '/'.join([ conn.api_base_url, @@ -555,6 +557,7 @@ def test_run_query_wo_eventual_w_transaction(self): def test_run_query_w_eventual_and_transaction(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _query_pb2 DATASET_ID = 'DATASET' KIND = 'Nonesuch' @@ -563,15 +566,16 @@ def test_run_query_w_eventual_and_transaction(self): q_pb = self._make_query_pb(KIND) rsp_pb = datastore_pb.RunQueryResponse() rsp_pb.batch.end_cursor = CURSOR - no_more = datastore_pb.QueryResultBatch.NO_MORE_RESULTS + no_more = _query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more - rsp_pb.batch.entity_result_type = datastore_pb.EntityResult.FULL + rsp_pb.batch.entity_result_type = _query_pb2.EntityResult.FULL conn = self._makeOne() self.assertRaises(ValueError, conn.run_query, DATASET_ID, q_pb, eventual=True, transaction_id=TRANSACTION) def test_run_query_wo_namespace_empty_result(self): from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _query_pb2 DATASET_ID = 'DATASET' KIND = 'Nonesuch' @@ -579,9 +583,9 @@ def test_run_query_wo_namespace_empty_result(self): q_pb = self._make_query_pb(KIND) rsp_pb = datastore_pb.RunQueryResponse() rsp_pb.batch.end_cursor = CURSOR - no_more = datastore_pb.QueryResultBatch.NO_MORE_RESULTS + no_more = _query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more - rsp_pb.batch.entity_result_type = datastore_pb.EntityResult.FULL + rsp_pb.batch.entity_result_type = _query_pb2.EntityResult.FULL conn = self._makeOne() URI = '/'.join([ conn.api_base_url, diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index c4a3e0e6a65b..a0e944b98b61 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -326,11 +326,11 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def _addQueryResults(self, connection, cursor=_END, more=False): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb from gcloud.datastore import _entity_pb2 + from gcloud.datastore import _query_pb2 - MORE = datastore_pb.QueryResultBatch.NOT_FINISHED - NO_MORE = datastore_pb.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT + MORE = _query_pb2.QueryResultBatch.NOT_FINISHED + NO_MORE = _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT _ID = 123 entity_pb = _entity_pb2.Entity() entity_pb.key.partition_id.dataset_id = self._DATASET @@ -531,7 +531,8 @@ def _callFUT(self, query): return _pb_from_query(query) def test_empty(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _query_pb2 + pb = self._callFUT(_Query()) self.assertEqual(list(pb.projection), []) self.assertEqual(list(pb.kind), []) @@ -539,7 +540,7 @@ def test_empty(self): self.assertEqual(list(pb.group_by), []) self.assertEqual(pb.filter.property_filter.property.name, '') cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) + self.assertEqual(cfilter.operator, _query_pb2.CompositeFilter.AND) self.assertEqual(list(cfilter.filter), []) self.assertEqual(pb.start_cursor, b'') self.assertEqual(pb.end_cursor, b'') @@ -556,13 +557,14 @@ def test_kind(self): self.assertEqual([item.name for item in pb.kind], ['KIND']) def test_ancestor(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb from gcloud.datastore.key import Key from gcloud.datastore.helpers import _prepare_key_for_request + from gcloud.datastore import _query_pb2 + ancestor = Key('Ancestor', 123, dataset_id='DATASET') pb = self._callFUT(_Query(ancestor=ancestor)) cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) + self.assertEqual(cfilter.operator, _query_pb2.CompositeFilter.AND) self.assertEqual(len(cfilter.filter), 1) pfilter = cfilter.filter[0].property_filter self.assertEqual(pfilter.property.name, '__key__') @@ -570,31 +572,33 @@ def test_ancestor(self): self.assertEqual(pfilter.value.key_value, ancestor_pb) def test_filter(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _query_pb2 + query = _Query(filters=[('name', '=', u'John')]) query.OPERATORS = { - '=': datastore_pb.PropertyFilter.EQUAL, + '=': _query_pb2.PropertyFilter.EQUAL, } pb = self._callFUT(query) cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) + self.assertEqual(cfilter.operator, _query_pb2.CompositeFilter.AND) self.assertEqual(len(cfilter.filter), 1) pfilter = cfilter.filter[0].property_filter self.assertEqual(pfilter.property.name, 'name') self.assertEqual(pfilter.value.string_value, u'John') def test_filter_key(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb from gcloud.datastore.key import Key from gcloud.datastore.helpers import _prepare_key_for_request + from gcloud.datastore import _query_pb2 + key = Key('Kind', 123, dataset_id='DATASET') query = _Query(filters=[('__key__', '=', key)]) query.OPERATORS = { - '=': datastore_pb.PropertyFilter.EQUAL, + '=': _query_pb2.PropertyFilter.EQUAL, } pb = self._callFUT(query) cfilter = pb.filter.composite_filter - self.assertEqual(cfilter.operator, datastore_pb.CompositeFilter.AND) + self.assertEqual(cfilter.operator, _query_pb2.CompositeFilter.AND) self.assertEqual(len(cfilter.filter), 1) pfilter = cfilter.filter[0].property_filter self.assertEqual(pfilter.property.name, '__key__') @@ -602,14 +606,15 @@ def test_filter_key(self): self.assertEqual(pfilter.value.key_value, key_pb) def test_order(self): - from gcloud.datastore import _datastore_v1_pb2 as datastore_pb + from gcloud.datastore import _query_pb2 + pb = self._callFUT(_Query(order=['a', '-b', 'c'])) self.assertEqual([item.property.name for item in pb.order], ['a', 'b', 'c']) self.assertEqual([item.direction for item in pb.order], - [datastore_pb.PropertyOrder.ASCENDING, - datastore_pb.PropertyOrder.DESCENDING, - datastore_pb.PropertyOrder.ASCENDING]) + [_query_pb2.PropertyOrder.ASCENDING, + _query_pb2.PropertyOrder.DESCENDING, + _query_pb2.PropertyOrder.ASCENDING]) def test_group_by(self): pb = self._callFUT(_Query(group_by=['a', 'b', 'c']))