diff --git a/system_tests/bigquery.py b/system_tests/bigquery.py index f7ff5706683c..46ca200e952f 100644 --- a/system_tests/bigquery.py +++ b/system_tests/bigquery.py @@ -30,6 +30,18 @@ DATASET_NAME = 'system_tests' + unique_resource_id() +def _rate_limit_exceeded(forbidden): + """Predicate: pass only exceptions with 'rateLimitExceeded' as reason.""" + return any(error['reason'] == 'rateLimitExceeded' + for error in forbidden._errors) + +# We need to wait to stay within the rate limits. +# The alternative outcome is a 403 Forbidden response from upstream, which +# they return instead of the more appropriate 429. +# See: https://cloud.google.com/bigquery/quota-policy +retry_403 = RetryErrors(Forbidden, error_predicate=_rate_limit_exceeded) + + class Config(object): """Run-time configuration to be modified at set-up. @@ -56,8 +68,10 @@ def tearDown(self): def test_create_dataset(self): dataset = Config.CLIENT.dataset(DATASET_NAME) self.assertFalse(dataset.exists()) - dataset.create() + + retry_403(dataset.create)() self.to_delete.append(dataset) + self.assertTrue(dataset.exists()) self.assertEqual(dataset.name, DATASET_NAME) @@ -65,8 +79,10 @@ def test_reload_dataset(self): dataset = Config.CLIENT.dataset(DATASET_NAME) dataset.friendly_name = 'Friendly' dataset.description = 'Description' - dataset.create() + + retry_403(dataset.create)() self.to_delete.append(dataset) + other = Config.CLIENT.dataset(DATASET_NAME) other.reload() self.assertEqual(other.friendly_name, 'Friendly') @@ -75,8 +91,10 @@ def test_reload_dataset(self): def test_patch_dataset(self): dataset = Config.CLIENT.dataset(DATASET_NAME) self.assertFalse(dataset.exists()) - dataset.create() + + retry_403(dataset.create)() self.to_delete.append(dataset) + self.assertTrue(dataset.exists()) self.assertEqual(dataset.friendly_name, None) self.assertEqual(dataset.description, None) @@ -88,22 +106,15 @@ def test_update_dataset(self): dataset = Config.CLIENT.dataset(DATASET_NAME) self.assertFalse(dataset.exists()) - # We need to wait to stay within the rate limits. - # The alternative outcome is a 403 Forbidden response from upstream. - # See: https://cloud.google.com/bigquery/quota-policy - retry = RetryErrors(Forbidden, max_tries=2, delay=30) - retry(dataset.create)() - + retry_403(dataset.create)() self.to_delete.append(dataset) + self.assertTrue(dataset.exists()) after = [grant for grant in dataset.access_grants if grant.entity_id != 'projectWriters'] dataset.access_grants = after - # We need to wait to stay within the rate limits. - # The alternative outcome is a 403 Forbidden response from upstream. - # See: https://cloud.google.com/bigquery/quota-policy - retry(dataset.update)() + retry_403(dataset.update)() self.assertEqual(len(dataset.access_grants), len(after)) for found, expected in zip(dataset.access_grants, after): @@ -119,7 +130,7 @@ def test_list_datasets(self): ] for dataset_name in datasets_to_create: dataset = Config.CLIENT.dataset(dataset_name) - dataset.create() + retry_403(dataset.create)() self.to_delete.append(dataset) # Retrieve the datasets. @@ -133,8 +144,10 @@ def test_list_datasets(self): def test_create_table(self): dataset = Config.CLIENT.dataset(DATASET_NAME) self.assertFalse(dataset.exists()) - dataset.create() + + retry_403(dataset.create)() self.to_delete.append(dataset) + TABLE_NAME = 'test_table' full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') @@ -149,7 +162,8 @@ def test_create_table(self): def test_list_tables(self): dataset = Config.CLIENT.dataset(DATASET_NAME) self.assertFalse(dataset.exists()) - dataset.create() + + retry_403(dataset.create)() self.to_delete.append(dataset) # Retrieve tables before any are created for the dataset. @@ -182,8 +196,10 @@ def test_list_tables(self): def test_patch_table(self): dataset = Config.CLIENT.dataset(DATASET_NAME) self.assertFalse(dataset.exists()) - dataset.create() + + retry_403(dataset.create)() self.to_delete.append(dataset) + TABLE_NAME = 'test_table' full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') @@ -203,13 +219,9 @@ def test_update_table(self): dataset = Config.CLIENT.dataset(DATASET_NAME) self.assertFalse(dataset.exists()) - # We need to wait to stay within the rate limits. - # The alternative outcome is a 403 Forbidden response from upstream. - # See: https://cloud.google.com/bigquery/quota-policy - retry = RetryErrors(Forbidden, max_tries=2, delay=30) - retry(dataset.create)() - + retry_403(dataset.create)() self.to_delete.append(dataset) + TABLE_NAME = 'test_table' full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') @@ -246,8 +258,10 @@ def test_load_table_then_dump_table(self): ROW_IDS = range(len(ROWS)) dataset = Config.CLIENT.dataset(DATASET_NAME) self.assertFalse(dataset.exists()) - dataset.create() + + retry_403(dataset.create)() self.to_delete.append(dataset) + TABLE_NAME = 'test_table' full_name = bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED') @@ -312,7 +326,8 @@ def test_load_table_from_storage_then_dump_table(self): self.to_delete.insert(0, blob) dataset = Config.CLIENT.dataset(DATASET_NAME) - dataset.create() + + retry_403(dataset.create)() self.to_delete.append(dataset) full_name = bigquery.SchemaField('full_name', 'STRING',