Skip to content

Commit 621b0e9

Browse files
committed
Makes entities for query tests persist to avoid flakiness.
This came about from conversations with @silvolu and @pcostell. Unfortunately, even if an entity has been stored and indexes built for 10+ minutes, the tests - test_query_simple_filter - test_query_multiple_filters are still flaky.
1 parent 15cd053 commit 621b0e9

File tree

4 files changed

+127
-118
lines changed

4 files changed

+127
-118
lines changed

CONTRIBUTING.rst

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -190,6 +190,11 @@ Running Regression Tests
190190
# Restore your environment to its previous state.
191191
$ unset CLOUDSDK_PYTHON_SITEPACKAGES
192192

193+
- For datastore query tests, you'll need stored data in your dataset.
194+
To populate this data, run::
195+
196+
$ python regression/populate_datastore.py
197+
193198
Test Coverage
194199
-------------
195200

regression/datastore.py

Lines changed: 22 additions & 118 deletions
Original file line numberDiff line numberDiff line change
@@ -5,39 +5,21 @@
55
from gcloud import datastore
66
# This assumes the command is being run via tox hence the
77
# repository root is the current directory.
8+
from regression.populate_datastore import CHARACTERS
89
from regression import regression_utils
910

1011

1112
class TestDatastore(unittest2.TestCase):
1213

1314
@classmethod
1415
def setUpClass(cls):
15-
environ = regression_utils.get_environ()
16-
cls._dataset_id = environ['dataset_id']
17-
cls._client_email = environ['client_email']
18-
cls._key_filename = environ['key_filename']
19-
cls._datasets = {}
20-
21-
cls.suite_entities_to_delete = []
22-
23-
@classmethod
24-
def tearDownClass(cls):
25-
with cls._get_dataset().transaction():
26-
for entity in cls.suite_entities_to_delete:
27-
entity.delete()
28-
29-
@classmethod
30-
def _get_dataset(cls):
31-
if cls._dataset_id not in cls._datasets:
32-
cls._datasets[cls._dataset_id] = datastore.get_dataset(
33-
cls._dataset_id, cls._client_email, cls._key_filename)
34-
return cls._datasets[cls._dataset_id]
16+
cls.dataset = regression_utils.get_dataset()
3517

3618
def setUp(self):
3719
self.case_entities_to_delete = []
3820

3921
def tearDown(self):
40-
with self._get_dataset().transaction():
22+
with self.dataset.transaction():
4123
for entity in self.case_entities_to_delete:
4224
entity.delete()
4325

@@ -55,8 +37,7 @@ def _get_post(self, name=None, key_id=None, post_content=None):
5537
'rating': 5.0,
5638
}
5739
# Create an entity with the given content in our dataset.
58-
dataset = self._get_dataset()
59-
entity = dataset.entity(kind='Post')
40+
entity = self.dataset.entity(kind='Post')
6041
entity.update(post_content)
6142

6243
# Update the entity key.
@@ -81,7 +62,7 @@ def _generic_test_post(self, name=None, key_id=None):
8162
self.assertEqual(entity.key().name(), name)
8263
if key_id is not None:
8364
self.assertEqual(entity.key().id(), key_id)
84-
retrieved_entity = self._get_dataset().get_entity(entity.key())
65+
retrieved_entity = self.dataset.get_entity(entity.key())
8566
# Check the keys are the same.
8667
self.assertEqual(retrieved_entity.key(), entity.key())
8768

@@ -100,8 +81,7 @@ def test_post_with_generated_id(self):
10081
self._generic_test_post()
10182

10283
def test_save_multiple(self):
103-
dataset = self._get_dataset()
104-
with dataset.transaction():
84+
with self.dataset.transaction():
10585
entity1 = self._get_post()
10686
entity1.save()
10787
# Register entity to be deleted.
@@ -122,100 +102,24 @@ def test_save_multiple(self):
122102
self.case_entities_to_delete.append(entity2)
123103

124104
keys = [entity1.key(), entity2.key()]
125-
matches = dataset.get_entities(keys)
105+
matches = self.dataset.get_entities(keys)
126106
self.assertEqual(len(matches), 2)
127107

128108
def test_empty_kind(self):
129-
posts = self._get_dataset().query(kind='Post').limit(2).fetch()
109+
posts = self.dataset.query('Post').limit(2).fetch()
130110
self.assertEqual(posts, [])
131111

132112

133113
class TestDatastoreQuery(TestDatastore):
134114

135-
KEY_PATHS = [
136-
[{'kind': 'Character', 'name': 'Rickard'}],
137-
[{'kind': 'Character', 'name': 'Rickard'},
138-
{'kind': 'Character', 'name': 'Eddard'}],
139-
[{'kind': 'Character', 'name': 'Catelyn'}],
140-
[{'kind': 'Character', 'name': 'Eddard'},
141-
{'kind': 'Character', 'name': 'Arya'}],
142-
[{'kind': 'Character', 'name': 'Eddard'},
143-
{'kind': 'Character', 'name': 'Sansa'}],
144-
[{'kind': 'Character', 'name': 'Eddard'},
145-
{'kind': 'Character', 'name': 'Robb'}],
146-
[{'kind': 'Character', 'name': 'Eddard'},
147-
{'kind': 'Character', 'name': 'Bran'}],
148-
[{'kind': 'Character', 'name': 'Eddard'},
149-
{'kind': 'Character', 'name': 'Jon Snow'}],
150-
]
151-
CHARACTERS = [
152-
{
153-
'name': 'Rickard',
154-
'family': 'Stark',
155-
'appearances': 0,
156-
'alive': False,
157-
}, {
158-
'name': 'Eddard',
159-
'family': 'Stark',
160-
'appearances': 9,
161-
'alive': False,
162-
}, {
163-
'name': 'Catelyn',
164-
'family': ['Stark', 'Tully'],
165-
'appearances': 26,
166-
'alive': False,
167-
}, {
168-
'name': 'Arya',
169-
'family': 'Stark',
170-
'appearances': 33,
171-
'alive': True,
172-
}, {
173-
'name': 'Sansa',
174-
'family': 'Stark',
175-
'appearances': 31,
176-
'alive': True,
177-
}, {
178-
'name': 'Robb',
179-
'family': 'Stark',
180-
'appearances': 22,
181-
'alive': False,
182-
}, {
183-
'name': 'Bran',
184-
'family': 'Stark',
185-
'appearances': 25,
186-
'alive': True,
187-
}, {
188-
'name': 'Jon Snow',
189-
'family': 'Stark',
190-
'appearances': 32,
191-
'alive': True,
192-
},
193-
]
194-
195115
@classmethod
196116
def setUpClass(cls):
197117
super(TestDatastoreQuery, cls).setUpClass()
198-
dataset = cls._get_dataset()
199-
cls.KEYS = [datastore.key.Key(path=key_path)
200-
for key_path in cls.KEY_PATHS]
201-
202-
with dataset.transaction():
203-
for key, character in zip(cls.KEYS, cls.CHARACTERS):
204-
entity = datastore.entity.Entity(dataset=dataset).key(key)
205-
entity.update(character)
206-
entity.save()
207-
# Register entity to be deleted.
208-
cls.suite_entities_to_delete.append(entity)
209-
210-
def test_keys(self):
211-
self.assertEqual(len(self.KEY_PATHS), len(self.CHARACTERS))
212-
for key_path, character in zip(self.KEY_PATHS, self.CHARACTERS):
213-
self.assertEqual(key_path[-1]['name'], character['name'])
118+
cls.CHARACTERS = CHARACTERS
214119

215120
def test_limit_queries(self):
216-
dataset = self._get_dataset()
217121
limit = 5
218-
query = dataset.query(kind='Character').limit(limit)
122+
query = self.dataset.query('Character').limit(limit)
219123
# Verify there is not cursor before fetch().
220124
self.assertRaises(RuntimeError, query.cursor)
221125

@@ -228,29 +132,29 @@ def test_limit_queries(self):
228132
self.assertTrue(cursor is not None)
229133

230134
# Fetch next batch of characters.
231-
new_query = dataset.query(kind='Character').with_cursor(cursor)
135+
new_query = self.dataset.query('Character').with_cursor(cursor)
232136
new_character_entities = new_query.fetch()
233137
characters_remaining = len(self.CHARACTERS) - limit
234138
self.assertEqual(len(new_character_entities), characters_remaining)
235139

236140
def test_query_simple_filter(self):
237-
query = self._get_dataset().query(kind='Character')
141+
query = self.dataset.query('Character')
238142
query = query.filter('appearances >=', 20)
239143
expected_matches = 6
240144
# We expect 6, but allow the query to get 1 extra.
241145
entities = query.fetch(limit=expected_matches + 1)
242146
self.assertEqual(len(entities), expected_matches)
243147

244148
def test_query_multiple_filters(self):
245-
query = self._get_dataset().query(kind='Character')
149+
query = self.dataset.query('Character')
246150
query = query.filter('appearances >=', 26).filter('family =', 'Stark')
247151
expected_matches = 4
248152
# We expect 4, but allow the query to get 1 extra.
249153
entities = query.fetch(limit=expected_matches + 1)
250154
self.assertEqual(len(entities), expected_matches)
251155

252156
def test_ancestor_query(self):
253-
query = self._get_dataset().query('Character')
157+
query = self.dataset.query('Character')
254158
filtered_query = query.ancestor(['Character', 'Eddard'])
255159

256160
expected_matches = 5
@@ -259,18 +163,18 @@ def test_ancestor_query(self):
259163
self.assertEqual(len(entities), expected_matches)
260164

261165
def test_query___key___filter(self):
262-
dataset = self._get_dataset()
263166
rickard_key = datastore.key.Key(
264167
path=[{'kind': 'Character', 'name': 'Rickard'}])
265168

266-
query = dataset.query('Character').filter('__key__ =', rickard_key)
169+
query = self.dataset.query('Character').filter(
170+
'__key__ =', rickard_key)
267171
expected_matches = 1
268172
# We expect 1, but allow the query to get 1 extra.
269173
entities = query.fetch(limit=expected_matches + 1)
270174
self.assertEqual(len(entities), expected_matches)
271175

272176
def test_ordered_query(self):
273-
query = self._get_dataset().query('Character').order('appearances')
177+
query = self.dataset.query('Character').order('appearances')
274178
expected_matches = 8
275179
# We expect 8, but allow the query to get 1 extra.
276180
entities = query.fetch(limit=expected_matches + 1)
@@ -281,7 +185,7 @@ def test_ordered_query(self):
281185
self.assertEqual(entities[7]['name'], self.CHARACTERS[3]['name'])
282186

283187
def test_projection_query(self):
284-
query = self._get_dataset().query('Character')
188+
query = self.dataset.query('Character')
285189
filtered_query = query.projection(['name', 'family'])
286190

287191
# NOTE: There are 9 responses because of Catelyn. She has both
@@ -316,7 +220,7 @@ def test_projection_query(self):
316220
self.assertEqual(sansa_dict, {'name': 'Sansa', 'family': 'Stark'})
317221

318222
def test_query_paginate_with_offset(self):
319-
query = self._get_dataset().query('Character')
223+
query = self.dataset.query('Character')
320224
offset = 2
321225
limit = 3
322226
page_query = query.offset(offset).limit(limit).order('appearances')
@@ -342,7 +246,7 @@ def test_query_paginate_with_offset(self):
342246
self.assertEqual(entities[2]['name'], 'Arya')
343247

344248
def test_query_paginate_with_start_cursor(self):
345-
query = self._get_dataset().query('Character')
249+
query = self.dataset.query('Character')
346250
offset = 2
347251
limit = 2
348252
page_query = query.offset(offset).limit(limit).order('appearances')
@@ -355,7 +259,7 @@ def test_query_paginate_with_start_cursor(self):
355259

356260
# Use cursor to create a fresh query.
357261
cursor = page_query.cursor()
358-
fresh_query = self._get_dataset().query('Character')
262+
fresh_query = self.dataset.query('Character')
359263
fresh_query = fresh_query.order('appearances').with_cursor(cursor)
360264

361265
new_entities = fresh_query.fetch()
@@ -365,7 +269,7 @@ def test_query_paginate_with_start_cursor(self):
365269
self.assertEqual(new_entities[3]['name'], 'Arya')
366270

367271
def test_query_group_by(self):
368-
query = self._get_dataset().query('Character').group_by(['alive'])
272+
query = self.dataset.query('Character').group_by(['alive'])
369273

370274
expected_matches = 2
371275
# We expect 2, but allow the query to get 1 extra.

regression/populate_datastore.py

Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
"""Script to populate datastore with regression test data."""
2+
3+
4+
from gcloud import datastore
5+
# This assumes the command is being run via tox hence the
6+
# repository root is the current directory.
7+
from regression import regression_utils
8+
9+
10+
KEY_PATHS = [
11+
[{'kind': 'Character', 'name': 'Rickard'}],
12+
[{'kind': 'Character', 'name': 'Rickard'},
13+
{'kind': 'Character', 'name': 'Eddard'}],
14+
[{'kind': 'Character', 'name': 'Catelyn'}],
15+
[{'kind': 'Character', 'name': 'Eddard'},
16+
{'kind': 'Character', 'name': 'Arya'}],
17+
[{'kind': 'Character', 'name': 'Eddard'},
18+
{'kind': 'Character', 'name': 'Sansa'}],
19+
[{'kind': 'Character', 'name': 'Eddard'},
20+
{'kind': 'Character', 'name': 'Robb'}],
21+
[{'kind': 'Character', 'name': 'Eddard'},
22+
{'kind': 'Character', 'name': 'Bran'}],
23+
[{'kind': 'Character', 'name': 'Eddard'},
24+
{'kind': 'Character', 'name': 'Jon Snow'}],
25+
]
26+
CHARACTERS = [
27+
{
28+
'name': 'Rickard',
29+
'family': 'Stark',
30+
'appearances': 0,
31+
'alive': False,
32+
}, {
33+
'name': 'Eddard',
34+
'family': 'Stark',
35+
'appearances': 9,
36+
'alive': False,
37+
}, {
38+
'name': 'Catelyn',
39+
'family': ['Stark', 'Tully'],
40+
'appearances': 26,
41+
'alive': False,
42+
}, {
43+
'name': 'Arya',
44+
'family': 'Stark',
45+
'appearances': 33,
46+
'alive': True,
47+
}, {
48+
'name': 'Sansa',
49+
'family': 'Stark',
50+
'appearances': 31,
51+
'alive': True,
52+
}, {
53+
'name': 'Robb',
54+
'family': 'Stark',
55+
'appearances': 22,
56+
'alive': False,
57+
}, {
58+
'name': 'Bran',
59+
'family': 'Stark',
60+
'appearances': 25,
61+
'alive': True,
62+
}, {
63+
'name': 'Jon Snow',
64+
'family': 'Stark',
65+
'appearances': 32,
66+
'alive': True,
67+
},
68+
]
69+
70+
71+
def add_characters():
72+
dataset = regression_utils.get_dataset()
73+
with dataset.transaction():
74+
for key_path, character in zip(KEY_PATHS, CHARACTERS):
75+
if key_path[-1]['name'] != character['name']:
76+
raise ValueError(('Character and key don\'t agree',
77+
key_path, character))
78+
key = datastore.key.Key(path=key_path)
79+
entity = datastore.entity.Entity(dataset=dataset).key(key)
80+
entity.update(character)
81+
entity.save()
82+
print 'Adding Character %s %s' % (character['name'],
83+
character['family'])
84+
85+
86+
if __name__ == '__main__':
87+
add_characters()

0 commit comments

Comments
 (0)