Skip to content

Commit 6bdc25d

Browse files
committed
Adding support for queries in regression test.
This is an attempt port the gcloud-node regression tests for queries over to gcloud-python.
1 parent 780aa01 commit 6bdc25d

File tree

2 files changed

+310
-19
lines changed

2 files changed

+310
-19
lines changed

CONTRIBUTING.rst

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -141,6 +141,10 @@ Running Regression Tests
141141

142142
$ python regression/run_regression.py --package {package}
143143

144+
This alone will not run the tests. You'll need to change some local
145+
auth settings and change some configuration in your project to
146+
run all the tests.
147+
144148
- Regression tests will be run against an actual project and
145149
so you'll need to provide some environment variables to facilitate
146150
authentication to your project:
@@ -149,6 +153,9 @@ Running Regression Tests
149153
- ``GCLOUD_TESTS_CLIENT_EMAIL``: The email for the service account you're
150154
authenticating with
151155
- ``GCLOUD_TESTS_KEY_FILE``: The path to an encrypted key file.
156+
See private key
157+
`docs <https://cloud.google.com/storage/docs/authentication#generating-a-private-key>`__
158+
for explanation on how to get a private key.
152159

153160
- Examples of these can be found in ``regression/local_test_setup.sample``. We
154161
recommend copying this to ``regression/local_test_setup``, editing the values
@@ -160,6 +167,29 @@ Running Regression Tests
160167
absolute) on your system where the key file for your service account can
161168
be found.
162169

170+
- For datastore tests, you'll need to create composite
171+
`indexes <https://cloud.google.com/datastore/docs/tools/indexconfig>`__
172+
with the ``gcloud`` command line
173+
`tool <https://developers.google.com/cloud/sdk/gcloud/>`__::
174+
175+
# Install the app (App Engine Command Line Interface) component.
176+
$ gcloud components update app
177+
178+
# See https://cloud.google.com/sdk/crypto for details on PyOpenSSL and
179+
# http://stackoverflow.com/a/25067729/1068170 for why we must persist.
180+
$ export CLOUDSDK_PYTHON_SITEPACKAGES=1
181+
182+
# Authenticate the gcloud tool with your account.
183+
$ gcloud auth activate-service-account $GCLOUD_TESTS_CLIENT_EMAIL \
184+
> --key-file=$GCLOUD_TESTS_KEY_FILE
185+
186+
# Create the indexes
187+
$ gcloud preview datastore create-indexes regression/data/ \
188+
> --project=$GCLOUD_TESTS_DATASET_ID
189+
190+
# Restore your environment to its previous state.
191+
$ unset CLOUDSDK_PYTHON_SITEPACKAGES
192+
163193
Test Coverage
164194
-------------
165195

regression/datastore.py

Lines changed: 280 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -10,24 +10,39 @@
1010

1111
class TestDatastore(unittest2.TestCase):
1212

13-
def setUp(self):
13+
@classmethod
14+
def setUpClass(cls):
1415
environ = regression_utils.get_environ()
15-
self._dataset_id = environ['dataset_id']
16-
self._client_email = environ['client_email']
17-
self._key_filename = environ['key_filename']
18-
self._datasets = {}
16+
cls._dataset_id = environ['dataset_id']
17+
cls._client_email = environ['client_email']
18+
cls._key_filename = environ['key_filename']
19+
cls._datasets = {}
20+
21+
cls.suite_entities_to_delete = []
22+
23+
@classmethod
24+
def tearDownClass(cls):
25+
with cls._get_dataset().transaction():
26+
for entity in cls.suite_entities_to_delete:
27+
entity.delete()
1928

20-
self.entities_to_delete = []
29+
@classmethod
30+
def _get_dataset(cls):
31+
if cls._dataset_id not in cls._datasets:
32+
cls._datasets[cls._dataset_id] = datastore.get_dataset(
33+
cls._dataset_id, cls._client_email, cls._key_filename)
34+
return cls._datasets[cls._dataset_id]
35+
36+
def setUp(self):
37+
self.case_entities_to_delete = []
2138

2239
def tearDown(self):
23-
for entity in self.entities_to_delete:
24-
entity.delete()
40+
with self._get_dataset().transaction():
41+
for entity in self.case_entities_to_delete:
42+
entity.delete()
43+
2544

26-
def _get_dataset(self):
27-
if self._dataset_id not in self._datasets:
28-
self._datasets[self._dataset_id] = datastore.get_dataset(
29-
self._dataset_id, self._client_email, self._key_filename)
30-
return self._datasets[self._dataset_id]
45+
class TestDatastoreSave(TestDatastore):
3146

3247
def _get_post(self, name=None, key_id=None, post_content=None):
3348
post_content = post_content or {
@@ -60,16 +75,16 @@ def _generic_test_post(self, name=None, key_id=None):
6075
entity.save()
6176

6277
# Register entity to be deleted.
63-
self.entities_to_delete.append(entity)
78+
self.case_entities_to_delete.append(entity)
6479

6580
if name is not None:
6681
self.assertEqual(entity.key().name(), name)
6782
if key_id is not None:
6883
self.assertEqual(entity.key().id(), key_id)
6984
retrieved_entity = self._get_dataset().get_entity(entity.key())
7085
# Check the keys are the same.
71-
self.assertEqual(retrieved_entity.key().path(),
72-
entity.key().path())
86+
self.assertEqual(retrieved_entity.key(), entity.key())
87+
7388
# Check the data is the same.
7489
retrieved_dict = dict(retrieved_entity.items())
7590
entity_dict = dict(entity.items())
@@ -90,7 +105,7 @@ def test_save_multiple(self):
90105
entity1 = self._get_post()
91106
entity1.save()
92107
# Register entity to be deleted.
93-
self.entities_to_delete.append(entity1)
108+
self.case_entities_to_delete.append(entity1)
94109

95110
second_post_content = {
96111
'title': 'How to make the perfect homemade pasta',
@@ -104,12 +119,258 @@ def test_save_multiple(self):
104119
entity2 = self._get_post(post_content=second_post_content)
105120
entity2.save()
106121
# Register entity to be deleted.
107-
self.entities_to_delete.append(entity2)
122+
self.case_entities_to_delete.append(entity2)
108123

109124
keys = [entity1.key(), entity2.key()]
110125
matches = dataset.get_entities(keys)
111126
self.assertEqual(len(matches), 2)
112127

113128
def test_empty_kind(self):
114-
posts = self._get_dataset().query().kind('Post').limit(2).fetch()
129+
posts = self._get_dataset().query(kind='Post').limit(2).fetch()
115130
self.assertEqual(posts, [])
131+
132+
133+
class TestDatastoreQuery(TestDatastore):
134+
135+
KEY_PATHS = [
136+
[{'kind': 'Character', 'name': 'Rickard'}],
137+
[{'kind': 'Character', 'name': 'Rickard'},
138+
{'kind': 'Character', 'name': 'Eddard'}],
139+
[{'kind': 'Character', 'name': 'Catelyn'}],
140+
[{'kind': 'Character', 'name': 'Eddard'},
141+
{'kind': 'Character', 'name': 'Arya'}],
142+
[{'kind': 'Character', 'name': 'Eddard'},
143+
{'kind': 'Character', 'name': 'Sansa'}],
144+
[{'kind': 'Character', 'name': 'Eddard'},
145+
{'kind': 'Character', 'name': 'Robb'}],
146+
[{'kind': 'Character', 'name': 'Eddard'},
147+
{'kind': 'Character', 'name': 'Bran'}],
148+
[{'kind': 'Character', 'name': 'Eddard'},
149+
{'kind': 'Character', 'name': 'Jon Snow'}],
150+
]
151+
CHARACTERS = [
152+
{
153+
'name': 'Rickard',
154+
'family': 'Stark',
155+
'appearances': 0,
156+
'alive': False,
157+
}, {
158+
'name': 'Eddard',
159+
'family': 'Stark',
160+
'appearances': 9,
161+
'alive': False,
162+
}, {
163+
'name': 'Catelyn',
164+
'family': ['Stark', 'Tully'],
165+
'appearances': 26,
166+
'alive': False,
167+
}, {
168+
'name': 'Arya',
169+
'family': 'Stark',
170+
'appearances': 33,
171+
'alive': True,
172+
}, {
173+
'name': 'Sansa',
174+
'family': 'Stark',
175+
'appearances': 31,
176+
'alive': True,
177+
}, {
178+
'name': 'Robb',
179+
'family': 'Stark',
180+
'appearances': 22,
181+
'alive': False,
182+
}, {
183+
'name': 'Bran',
184+
'family': 'Stark',
185+
'appearances': 25,
186+
'alive': True,
187+
}, {
188+
'name': 'Jon Snow',
189+
'family': 'Stark',
190+
'appearances': 32,
191+
'alive': True,
192+
},
193+
]
194+
195+
@classmethod
196+
def setUpClass(cls):
197+
super(TestDatastoreQuery, cls).setUpClass()
198+
dataset = cls._get_dataset()
199+
cls.KEYS = [datastore.key.Key(path=key_path)
200+
for key_path in cls.KEY_PATHS]
201+
202+
with dataset.transaction():
203+
for key, character in zip(cls.KEYS, cls.CHARACTERS):
204+
entity = datastore.entity.Entity(dataset=dataset).key(key)
205+
entity.update(character)
206+
entity.save()
207+
# Register entity to be deleted.
208+
cls.suite_entities_to_delete.append(entity)
209+
210+
def test_keys(self):
211+
self.assertEqual(len(self.KEY_PATHS), len(self.CHARACTERS))
212+
for key_path, character in zip(self.KEY_PATHS, self.CHARACTERS):
213+
self.assertEqual(key_path[-1]['name'], character['name'])
214+
215+
def test_limit_queries(self):
216+
dataset = self._get_dataset()
217+
limit = 5
218+
query = dataset.query(kind='Character').limit(limit)
219+
# Verify there is not cursor before fetch().
220+
self.assertRaises(RuntimeError, query.cursor)
221+
222+
# Fetch characters.
223+
character_entities = query.fetch()
224+
self.assertEqual(len(character_entities), limit)
225+
226+
# Check cursor after fetch.
227+
cursor = query.cursor()
228+
self.assertTrue(cursor is not None)
229+
230+
# Fetch next batch of characters.
231+
new_query = dataset.query(kind='Character').with_cursor(cursor)
232+
new_character_entities = new_query.fetch()
233+
characters_remaining = len(self.CHARACTERS) - limit
234+
self.assertEqual(len(new_character_entities), characters_remaining)
235+
236+
def test_query_simple_filter(self):
237+
query = self._get_dataset().query(kind='Character')
238+
query = query.filter('appearances >=', 20)
239+
expected_matches = 6
240+
# We expect 6, but allow the query to get 1 extra.
241+
entities = query.fetch(limit=expected_matches + 1)
242+
self.assertEqual(len(entities), expected_matches)
243+
244+
def test_query_multiple_filters(self):
245+
query = self._get_dataset().query(kind='Character')
246+
query = query.filter('appearances >=', 26).filter('family =', 'Stark')
247+
expected_matches = 4
248+
# We expect 4, but allow the query to get 1 extra.
249+
entities = query.fetch(limit=expected_matches + 1)
250+
self.assertEqual(len(entities), expected_matches)
251+
252+
def test_ancestor_query(self):
253+
query = self._get_dataset().query('Character')
254+
filtered_query = query.ancestor(['Character', 'Eddard'])
255+
256+
expected_matches = 5
257+
# We expect 5, but allow the query to get 1 extra.
258+
entities = filtered_query.fetch(limit=expected_matches + 1)
259+
self.assertEqual(len(entities), expected_matches)
260+
261+
def test_query___key___filter(self):
262+
dataset = self._get_dataset()
263+
rickard_key = datastore.key.Key(
264+
path=[{'kind': 'Character', 'name': 'Rickard'}])
265+
266+
query = dataset.query('Character').filter('__key__ =', rickard_key)
267+
expected_matches = 1
268+
# We expect 1, but allow the query to get 1 extra.
269+
entities = query.fetch(limit=expected_matches + 1)
270+
self.assertEqual(len(entities), expected_matches)
271+
272+
def test_ordered_query(self):
273+
query = self._get_dataset().query('Character').order('appearances')
274+
expected_matches = 8
275+
# We expect 8, but allow the query to get 1 extra.
276+
entities = query.fetch(limit=expected_matches + 1)
277+
self.assertEqual(len(entities), expected_matches)
278+
279+
# Actually check the ordered data returned.
280+
self.assertEqual(entities[0]['name'], self.CHARACTERS[0]['name'])
281+
self.assertEqual(entities[7]['name'], self.CHARACTERS[3]['name'])
282+
283+
def test_projection_query(self):
284+
query = self._get_dataset().query('Character')
285+
filtered_query = query.projection(['name', 'family'])
286+
287+
# NOTE: There are 9 responses because of Catelyn. She has both
288+
# Stark and Tully as her families, hence occurs twice in
289+
# the results.
290+
expected_matches = 9
291+
# We expect 9, but allow the query to get 1 extra.
292+
entities = filtered_query.fetch(limit=expected_matches + 1)
293+
self.assertEqual(len(entities), expected_matches)
294+
295+
arya_entity = entities[0]
296+
arya_dict = dict(arya_entity.items())
297+
self.assertEqual(arya_dict, {'name': 'Arya', 'family': 'Stark'})
298+
299+
catelyn_stark_entity = entities[2]
300+
catelyn_stark_dict = dict(catelyn_stark_entity.items())
301+
self.assertEqual(catelyn_stark_dict,
302+
{'name': 'Catelyn', 'family': 'Stark'})
303+
304+
catelyn_tully_entity = entities[3]
305+
catelyn_tully_dict = dict(catelyn_tully_entity.items())
306+
self.assertEqual(catelyn_tully_dict,
307+
{'name': 'Catelyn', 'family': 'Tully'})
308+
309+
# Check both Catelyn keys are the same.
310+
catelyn_stark_key = catelyn_stark_entity.key()
311+
catelyn_tully_key = catelyn_tully_entity.key()
312+
self.assertEqual(catelyn_stark_key, catelyn_tully_key)
313+
314+
sansa_entity = entities[8]
315+
sansa_dict = dict(sansa_entity.items())
316+
self.assertEqual(sansa_dict, {'name': 'Sansa', 'family': 'Stark'})
317+
318+
def test_query_paginate_with_offset(self):
319+
query = self._get_dataset().query('Character')
320+
offset = 2
321+
limit = 3
322+
page_query = query.offset(offset).limit(limit).order('appearances')
323+
# Make sure no query set before fetch.
324+
self.assertRaises(RuntimeError, page_query.cursor)
325+
326+
# Fetch characters.
327+
entities = page_query.fetch()
328+
self.assertEqual(len(entities), limit)
329+
self.assertEqual(entities[0]['name'], 'Robb')
330+
self.assertEqual(entities[1]['name'], 'Bran')
331+
self.assertEqual(entities[2]['name'], 'Catelyn')
332+
333+
# Use cursor to begin next query.
334+
cursor = page_query.cursor()
335+
next_query = page_query.with_cursor(cursor).offset(0)
336+
self.assertEqual(next_query.limit(), limit)
337+
# Fetch next set of characters.
338+
entities = next_query.fetch()
339+
self.assertEqual(len(entities), limit)
340+
self.assertEqual(entities[0]['name'], 'Sansa')
341+
self.assertEqual(entities[1]['name'], 'Jon Snow')
342+
self.assertEqual(entities[2]['name'], 'Arya')
343+
344+
def test_query_paginate_with_start_cursor(self):
345+
query = self._get_dataset().query('Character')
346+
offset = 2
347+
limit = 2
348+
page_query = query.offset(offset).limit(limit).order('appearances')
349+
# Make sure no query set before fetch.
350+
self.assertRaises(RuntimeError, page_query.cursor)
351+
352+
# Fetch characters.
353+
entities = page_query.fetch()
354+
self.assertEqual(len(entities), limit)
355+
356+
# Use cursor to create a fresh query.
357+
cursor = page_query.cursor()
358+
fresh_query = self._get_dataset().query('Character')
359+
fresh_query = fresh_query.order('appearances').with_cursor(cursor)
360+
361+
new_entities = fresh_query.fetch()
362+
characters_remaining = len(self.CHARACTERS) - limit - offset
363+
self.assertEqual(len(new_entities), characters_remaining)
364+
self.assertEqual(new_entities[0]['name'], 'Catelyn')
365+
self.assertEqual(new_entities[3]['name'], 'Arya')
366+
367+
def test_query_group_by(self):
368+
query = self._get_dataset().query('Character').group_by(['alive'])
369+
370+
expected_matches = 2
371+
# We expect 2, but allow the query to get 1 extra.
372+
entities = query.fetch(limit=expected_matches + 1)
373+
self.assertEqual(len(entities), expected_matches)
374+
375+
self.assertEqual(entities[0]['name'], 'Catelyn')
376+
self.assertEqual(entities[1]['name'], 'Arya')

0 commit comments

Comments
 (0)