|
34 | 34 | import pkg_resources |
35 | 35 |
|
36 | 36 | try: |
37 | | - from google.cloud.bigquery import storage |
| 37 | + from google.cloud import bigquery_storage |
38 | 38 | except ImportError: # pragma: NO COVER |
39 | | - storage = None |
| 39 | + bigquery_storage = None |
40 | 40 |
|
41 | 41 | try: |
42 | 42 | import fastavro # to parse BQ storage client results |
@@ -1790,10 +1790,12 @@ def test_dbapi_fetchall(self): |
1790 | 1790 | row_tuples = [r.values() for r in rows] |
1791 | 1791 | self.assertEqual(row_tuples, [(1, 2), (3, 4), (5, 6)]) |
1792 | 1792 |
|
1793 | | - @unittest.skipIf(storage is None, "Requires `google-cloud-bigquery-storage`") |
| 1793 | + @unittest.skipIf( |
| 1794 | + bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" |
| 1795 | + ) |
1794 | 1796 | @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") |
1795 | 1797 | def test_dbapi_fetch_w_bqstorage_client_large_result_set(self): |
1796 | | - bqstorage_client = storage.BigQueryReadClient( |
| 1798 | + bqstorage_client = bigquery_storage.BigQueryReadClient( |
1797 | 1799 | credentials=Config.CLIENT._credentials |
1798 | 1800 | ) |
1799 | 1801 | cursor = dbapi.connect(Config.CLIENT, bqstorage_client).cursor() |
@@ -1850,7 +1852,9 @@ def test_dbapi_dry_run_query(self): |
1850 | 1852 |
|
1851 | 1853 | self.assertEqual(list(rows), []) |
1852 | 1854 |
|
1853 | | - @unittest.skipIf(storage is None, "Requires `google-cloud-bigquery-storage`") |
| 1855 | + @unittest.skipIf( |
| 1856 | + bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" |
| 1857 | + ) |
1854 | 1858 | def test_dbapi_connection_does_not_leak_sockets(self): |
1855 | 1859 | current_process = psutil.Process() |
1856 | 1860 | conn_count_start = len(current_process.connections()) |
@@ -2278,15 +2282,17 @@ def test_query_results_to_dataframe(self): |
2278 | 2282 | self.assertIsInstance(row[col], exp_datatypes[col]) |
2279 | 2283 |
|
2280 | 2284 | @unittest.skipIf(pandas is None, "Requires `pandas`") |
2281 | | - @unittest.skipIf(storage is None, "Requires `google-cloud-bigquery-storage`") |
| 2285 | + @unittest.skipIf( |
| 2286 | + bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" |
| 2287 | + ) |
2282 | 2288 | def test_query_results_to_dataframe_w_bqstorage(self): |
2283 | 2289 | query = """ |
2284 | 2290 | SELECT id, author, time_ts, dead |
2285 | 2291 | FROM `bigquery-public-data.hacker_news.comments` |
2286 | 2292 | LIMIT 10 |
2287 | 2293 | """ |
2288 | 2294 |
|
2289 | | - bqstorage_client = storage.BigQueryReadClient( |
| 2295 | + bqstorage_client = bigquery_storage.BigQueryReadClient( |
2290 | 2296 | credentials=Config.CLIENT._credentials |
2291 | 2297 | ) |
2292 | 2298 |
|
@@ -2575,7 +2581,9 @@ def _fetch_dataframe(self, query): |
2575 | 2581 | return Config.CLIENT.query(query).result().to_dataframe() |
2576 | 2582 |
|
2577 | 2583 | @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") |
2578 | | - @unittest.skipIf(storage is None, "Requires `google-cloud-bigquery-storage`") |
| 2584 | + @unittest.skipIf( |
| 2585 | + bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" |
| 2586 | + ) |
2579 | 2587 | def test_nested_table_to_arrow(self): |
2580 | 2588 | from google.cloud.bigquery.job import SourceFormat |
2581 | 2589 | from google.cloud.bigquery.job import WriteDisposition |
@@ -2610,7 +2618,7 @@ def test_nested_table_to_arrow(self): |
2610 | 2618 | job_config.schema = schema |
2611 | 2619 | # Load a table using a local JSON file from memory. |
2612 | 2620 | Config.CLIENT.load_table_from_file(body, table, job_config=job_config).result() |
2613 | | - bqstorage_client = storage.BigQueryReadClient( |
| 2621 | + bqstorage_client = bigquery_storage.BigQueryReadClient( |
2614 | 2622 | credentials=Config.CLIENT._credentials |
2615 | 2623 | ) |
2616 | 2624 |
|
@@ -2765,12 +2773,14 @@ def test_list_rows_page_size(self): |
2765 | 2773 | self.assertEqual(page.num_items, num_last_page) |
2766 | 2774 |
|
2767 | 2775 | @unittest.skipIf(pandas is None, "Requires `pandas`") |
2768 | | - @unittest.skipIf(storage is None, "Requires `google-cloud-bigquery-storage`") |
| 2776 | + @unittest.skipIf( |
| 2777 | + bigquery_storage is None, "Requires `google-cloud-bigquery-storage`" |
| 2778 | + ) |
2769 | 2779 | def test_list_rows_max_results_w_bqstorage(self): |
2770 | 2780 | table_ref = DatasetReference("bigquery-public-data", "utility_us").table( |
2771 | 2781 | "country_code_iso" |
2772 | 2782 | ) |
2773 | | - bqstorage_client = storage.BigQueryReadClient( |
| 2783 | + bqstorage_client = bigquery_storage.BigQueryReadClient( |
2774 | 2784 | credentials=Config.CLIENT._credentials |
2775 | 2785 | ) |
2776 | 2786 |
|
|
0 commit comments