diff --git a/CHANGELOG.md b/CHANGELOG.md index 06aec678..f918ac9b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,17 @@ [1]: https://pypi.org/project/google-cloud-bigquery-storage/#history +## 2.0.0 + +09-24-2020 08:21 PDT + +### Implementation Changes + +- Transition the library to microgenerator. ([#62](https://github.com/googleapis/python-bigquery-storage/pull/62)) + This is a **breaking change** that introduces several **method signature changes** and **drops support + for Python 2.7 and 3.5**. See [migration guide](https://googleapis.dev/python/bigquerystorage/latest/UPGRADING.html) + for more info. + ## 1.1.0 09-14-2020 08:51 PDT diff --git a/UPGRADING.md b/UPGRADING.md index 92f130ae..cab98087 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -33,9 +33,10 @@ The 2.0.0 release requires Python 3.6+. ## Import Path -The library was moved into `google.cloud.bigquery` namespace. It is recommended -to use this path in order to reduce the chance of future compatibility issues -in case the library is restuctured internally. +The library's top-level namespace is `google.cloud.bigquery_storage`. Importing +from `google.cloud.bigquery_storage_v1` still works, but it is advisable to use +the `google.cloud.bigquery_storage` path in order to reduce the chance of future +compatibility issues should the library be restuctured internally. **Before:** ```py @@ -44,7 +45,7 @@ from google.cloud.bigquery_storage_v1 import BigQueryReadClient **After:** ```py -from google.cloud.bigquery.storage import BigQueryReadClient +from google.cloud.bigquery_storage import BigQueryReadClient ``` @@ -65,7 +66,7 @@ data_format = BigQueryReadClient.enums.DataFormat.ARROW **After:** ```py -from google.cloud.bigquery.storage import types +from google.cloud.bigquery_storage import types data_format = types.DataFormat.ARROW ``` @@ -157,13 +158,13 @@ session = client.create_read_session( **After:** ```py -from google.cloud.bigquery import storage +from google.cloud import bigquery_storage -client = storage.BigQueryReadClient() +client = bigquery_storage.BigQueryReadClient() -requested_session = storage.types.ReadSession( +requested_session = bigquery_storage.types.ReadSession( table="projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID", - data_format=storage.types.DataFormat.ARROW, + data_format=bigquery_storage.types.DataFormat.ARROW, ) session = client.create_read_session( request={ diff --git a/docs/storage_v1/library.rst b/docs/bigquery_storage_v1/library.rst similarity index 100% rename from docs/storage_v1/library.rst rename to docs/bigquery_storage_v1/library.rst diff --git a/docs/storage_v1/services.rst b/docs/bigquery_storage_v1/services.rst similarity index 69% rename from docs/storage_v1/services.rst rename to docs/bigquery_storage_v1/services.rst index 56b24588..5d0f9532 100644 --- a/docs/storage_v1/services.rst +++ b/docs/bigquery_storage_v1/services.rst @@ -1,6 +1,6 @@ Services for Google Cloud Bigquery Storage v1 API ================================================= -.. automodule:: google.cloud.bigquery.storage_v1.services.big_query_read +.. automodule:: google.cloud.bigquery_storage_v1.services.big_query_read :members: :inherited-members: diff --git a/docs/storage_v1/types.rst b/docs/bigquery_storage_v1/types.rst similarity index 66% rename from docs/storage_v1/types.rst rename to docs/bigquery_storage_v1/types.rst index 85f2d543..1eb34796 100644 --- a/docs/storage_v1/types.rst +++ b/docs/bigquery_storage_v1/types.rst @@ -1,5 +1,5 @@ Types for Google Cloud Bigquery Storage v1 API ============================================== -.. automodule:: google.cloud.bigquery.storage_v1.types +.. automodule:: google.cloud.bigquery_storage_v1.types :members: diff --git a/docs/index.rst b/docs/index.rst index fb3e7182..618e74d7 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -18,9 +18,9 @@ API Reference .. toctree:: :maxdepth: 2 - storage_v1/library - storage_v1/services - storage_v1/types + bigquery_storage_v1/library + bigquery_storage_v1/services + bigquery_storage_v1/types Migration Guide diff --git a/google/cloud/bigquery/storage_v1/__init__.py b/google/cloud/bigquery/storage_v1/__init__.py deleted file mode 100644 index 55591c25..00000000 --- a/google/cloud/bigquery/storage_v1/__init__.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.big_query_read import BigQueryReadClient -from .types.arrow import ArrowRecordBatch -from .types.arrow import ArrowSchema -from .types.avro import AvroRows -from .types.avro import AvroSchema -from .types.storage import CreateReadSessionRequest -from .types.storage import ReadRowsRequest -from .types.storage import ReadRowsResponse -from .types.storage import SplitReadStreamRequest -from .types.storage import SplitReadStreamResponse -from .types.storage import StreamStats -from .types.storage import ThrottleState -from .types.stream import DataFormat -from .types.stream import ReadSession -from .types.stream import ReadStream - - -__all__ = ( - "ArrowRecordBatch", - "ArrowSchema", - "AvroRows", - "AvroSchema", - "CreateReadSessionRequest", - "DataFormat", - "ReadRowsRequest", - "ReadRowsResponse", - "ReadSession", - "ReadStream", - "SplitReadStreamRequest", - "SplitReadStreamResponse", - "StreamStats", - "ThrottleState", - "BigQueryReadClient", -) diff --git a/google/cloud/bigquery/storage/__init__.py b/google/cloud/bigquery_storage/__init__.py similarity index 56% rename from google/cloud/bigquery/storage/__init__.py rename to google/cloud/bigquery_storage/__init__.py index 24e20ba2..227e6184 100644 --- a/google/cloud/bigquery/storage/__init__.py +++ b/google/cloud/bigquery_storage/__init__.py @@ -16,22 +16,22 @@ # from google.cloud.bigquery_storage_v1 import BigQueryReadClient -from google.cloud.bigquery_storage_v1 import types +from google.cloud.bigquery_storage_v1 import gapic_types as types from google.cloud.bigquery_storage_v1 import __version__ -from google.cloud.bigquery.storage_v1.types.arrow import ArrowRecordBatch -from google.cloud.bigquery.storage_v1.types.arrow import ArrowSchema -from google.cloud.bigquery.storage_v1.types.avro import AvroRows -from google.cloud.bigquery.storage_v1.types.avro import AvroSchema -from google.cloud.bigquery.storage_v1.types.storage import CreateReadSessionRequest -from google.cloud.bigquery.storage_v1.types.storage import ReadRowsRequest -from google.cloud.bigquery.storage_v1.types.storage import ReadRowsResponse -from google.cloud.bigquery.storage_v1.types.storage import SplitReadStreamRequest -from google.cloud.bigquery.storage_v1.types.storage import SplitReadStreamResponse -from google.cloud.bigquery.storage_v1.types.storage import StreamStats -from google.cloud.bigquery.storage_v1.types.storage import ThrottleState -from google.cloud.bigquery.storage_v1.types.stream import DataFormat -from google.cloud.bigquery.storage_v1.types.stream import ReadSession -from google.cloud.bigquery.storage_v1.types.stream import ReadStream +from google.cloud.bigquery_storage_v1.types.arrow import ArrowRecordBatch +from google.cloud.bigquery_storage_v1.types.arrow import ArrowSchema +from google.cloud.bigquery_storage_v1.types.avro import AvroRows +from google.cloud.bigquery_storage_v1.types.avro import AvroSchema +from google.cloud.bigquery_storage_v1.types.storage import CreateReadSessionRequest +from google.cloud.bigquery_storage_v1.types.storage import ReadRowsRequest +from google.cloud.bigquery_storage_v1.types.storage import ReadRowsResponse +from google.cloud.bigquery_storage_v1.types.storage import SplitReadStreamRequest +from google.cloud.bigquery_storage_v1.types.storage import SplitReadStreamResponse +from google.cloud.bigquery_storage_v1.types.storage import StreamStats +from google.cloud.bigquery_storage_v1.types.storage import ThrottleState +from google.cloud.bigquery_storage_v1.types.stream import DataFormat +from google.cloud.bigquery_storage_v1.types.stream import ReadSession +from google.cloud.bigquery_storage_v1.types.stream import ReadStream __all__ = ( "__version__", diff --git a/google/cloud/bigquery/storage/py.typed b/google/cloud/bigquery_storage/py.typed similarity index 100% rename from google/cloud/bigquery/storage/py.typed rename to google/cloud/bigquery_storage/py.typed diff --git a/google/cloud/bigquery_storage_v1/client.py b/google/cloud/bigquery_storage_v1/client.py index a910a574..d6dcea10 100644 --- a/google/cloud/bigquery_storage_v1/client.py +++ b/google/cloud/bigquery_storage_v1/client.py @@ -23,8 +23,8 @@ import google.api_core.gapic_v1.method -from google.cloud.bigquery import storage_v1 from google.cloud.bigquery_storage_v1 import reader +from google.cloud.bigquery_storage_v1.services import big_query_read _SCOPES = ( @@ -33,7 +33,7 @@ ) -class BigQueryReadClient(storage_v1.BigQueryReadClient): +class BigQueryReadClient(big_query_read.BigQueryReadClient): """Client for interacting with BigQuery Storage API. The BigQuery storage API can be used to read data stored in BigQuery. @@ -60,9 +60,9 @@ def read_rows( to read data. Example: - >>> from google.cloud.bigquery import storage + >>> from google.cloud import bigquery_storage >>> - >>> client = storage.BigQueryReadClient() + >>> client = bigquery_storage.BigQueryReadClient() >>> >>> # TODO: Initialize ``table``: >>> table = "projects/{}/datasets/{}/tables/{}".format( @@ -74,9 +74,9 @@ def read_rows( >>> # TODO: Initialize `parent`: >>> parent = 'projects/your-billing-project-id' >>> - >>> requested_session = storage.types.ReadSession( + >>> requested_session = bigquery_storage.types.ReadSession( ... table=table, - ... data_format=storage.types.DataFormat.AVRO, + ... data_format=bigquery_storage.types.DataFormat.AVRO, ... ) >>> session = client.create_read_session( ... parent=parent, read_session=requested_session diff --git a/google/cloud/bigquery_storage_v1/types.py b/google/cloud/bigquery_storage_v1/gapic_types.py similarity index 91% rename from google/cloud/bigquery_storage_v1/types.py rename to google/cloud/bigquery_storage_v1/gapic_types.py index 1c3dbc9f..55b90e9f 100644 --- a/google/cloud/bigquery_storage_v1/types.py +++ b/google/cloud/bigquery_storage_v1/gapic_types.py @@ -22,10 +22,10 @@ import proto -from google.cloud.bigquery.storage_v1.types import arrow -from google.cloud.bigquery.storage_v1.types import avro -from google.cloud.bigquery.storage_v1.types import storage -from google.cloud.bigquery.storage_v1.types import stream +from google.cloud.bigquery_storage_v1.types import arrow +from google.cloud.bigquery_storage_v1.types import avro +from google.cloud.bigquery_storage_v1.types import storage +from google.cloud.bigquery_storage_v1.types import stream from google.protobuf import message as protobuf_message from google.protobuf import timestamp_pb2 diff --git a/google/cloud/bigquery/storage_v1/py.typed b/google/cloud/bigquery_storage_v1/py.typed similarity index 100% rename from google/cloud/bigquery/storage_v1/py.typed rename to google/cloud/bigquery_storage_v1/py.typed diff --git a/google/cloud/bigquery_storage_v1/reader.py b/google/cloud/bigquery_storage_v1/reader.py index 89df3c4d..034ad726 100644 --- a/google/cloud/bigquery_storage_v1/reader.py +++ b/google/cloud/bigquery_storage_v1/reader.py @@ -81,11 +81,11 @@ def __init__(self, wrapped, client, name, offset, read_rows_kwargs): Args: wrapped (Iterable[ \ - ~google.cloud.bigquery.storage.types.ReadRowsResponse \ + ~google.cloud.bigquery_storage.types.ReadRowsResponse \ ]): The ReadRows stream to read. client ( \ - ~google.cloud.bigquery.storage_v1.services. \ + ~google.cloud.bigquery_storage_v1.services. \ big_query_read.BigQueryReadClient \ ): A GAPIC client used to reconnect to a ReadRows stream. This @@ -104,7 +104,7 @@ def __init__(self, wrapped, client, name, offset, read_rows_kwargs): Returns: Iterable[ \ - ~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \ + ~google.cloud.bigquery_storage.types.ReadRowsResponse \ ]: A sequence of row messages. """ diff --git a/google/cloud/bigquery/storage_v1/services/__init__.py b/google/cloud/bigquery_storage_v1/services/__init__.py similarity index 100% rename from google/cloud/bigquery/storage_v1/services/__init__.py rename to google/cloud/bigquery_storage_v1/services/__init__.py diff --git a/google/cloud/bigquery/storage_v1/services/big_query_read/__init__.py b/google/cloud/bigquery_storage_v1/services/big_query_read/__init__.py similarity index 100% rename from google/cloud/bigquery/storage_v1/services/big_query_read/__init__.py rename to google/cloud/bigquery_storage_v1/services/big_query_read/__init__.py diff --git a/google/cloud/bigquery/storage_v1/services/big_query_read/async_client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py similarity index 98% rename from google/cloud/bigquery/storage_v1/services/big_query_read/async_client.py rename to google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py index 121c24e8..f5c80cd0 100644 --- a/google/cloud/bigquery/storage_v1/services/big_query_read/async_client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py @@ -28,10 +28,10 @@ from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.cloud.bigquery.storage_v1.types import arrow -from google.cloud.bigquery.storage_v1.types import avro -from google.cloud.bigquery.storage_v1.types import storage -from google.cloud.bigquery.storage_v1.types import stream +from google.cloud.bigquery_storage_v1.types import arrow +from google.cloud.bigquery_storage_v1.types import avro +from google.cloud.bigquery_storage_v1.types import storage +from google.cloud.bigquery_storage_v1.types import stream from google.protobuf import timestamp_pb2 as timestamp # type: ignore from .transports.base import BigQueryReadTransport, DEFAULT_CLIENT_INFO diff --git a/google/cloud/bigquery/storage_v1/services/big_query_read/client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py similarity index 99% rename from google/cloud/bigquery/storage_v1/services/big_query_read/client.py rename to google/cloud/bigquery_storage_v1/services/big_query_read/client.py index 38279b78..33ec3f77 100644 --- a/google/cloud/bigquery/storage_v1/services/big_query_read/client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py @@ -32,10 +32,10 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.cloud.bigquery.storage_v1.types import arrow -from google.cloud.bigquery.storage_v1.types import avro -from google.cloud.bigquery.storage_v1.types import storage -from google.cloud.bigquery.storage_v1.types import stream +from google.cloud.bigquery_storage_v1.types import arrow +from google.cloud.bigquery_storage_v1.types import avro +from google.cloud.bigquery_storage_v1.types import storage +from google.cloud.bigquery_storage_v1.types import stream from google.protobuf import timestamp_pb2 as timestamp # type: ignore from .transports.base import BigQueryReadTransport, DEFAULT_CLIENT_INFO diff --git a/google/cloud/bigquery/storage_v1/services/big_query_read/transports/__init__.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/__init__.py similarity index 100% rename from google/cloud/bigquery/storage_v1/services/big_query_read/transports/__init__.py rename to google/cloud/bigquery_storage_v1/services/big_query_read/transports/__init__.py diff --git a/google/cloud/bigquery/storage_v1/services/big_query_read/transports/base.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py similarity index 98% rename from google/cloud/bigquery/storage_v1/services/big_query_read/transports/base.py rename to google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py index 3b9b0e71..5727ca5a 100644 --- a/google/cloud/bigquery/storage_v1/services/big_query_read/transports/base.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py @@ -25,8 +25,8 @@ from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore -from google.cloud.bigquery.storage_v1.types import storage -from google.cloud.bigquery.storage_v1.types import stream +from google.cloud.bigquery_storage_v1.types import storage +from google.cloud.bigquery_storage_v1.types import stream try: diff --git a/google/cloud/bigquery/storage_v1/services/big_query_read/transports/grpc.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py similarity index 99% rename from google/cloud/bigquery/storage_v1/services/big_query_read/transports/grpc.py rename to google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py index 17ecafe6..7777e68c 100644 --- a/google/cloud/bigquery/storage_v1/services/big_query_read/transports/grpc.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py @@ -26,8 +26,8 @@ import grpc # type: ignore -from google.cloud.bigquery.storage_v1.types import storage -from google.cloud.bigquery.storage_v1.types import stream +from google.cloud.bigquery_storage_v1.types import storage +from google.cloud.bigquery_storage_v1.types import stream from .base import BigQueryReadTransport, DEFAULT_CLIENT_INFO diff --git a/google/cloud/bigquery/storage_v1/services/big_query_read/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py similarity index 99% rename from google/cloud/bigquery/storage_v1/services/big_query_read/transports/grpc_asyncio.py rename to google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py index ff5755c9..c1715a24 100644 --- a/google/cloud/bigquery/storage_v1/services/big_query_read/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py @@ -27,8 +27,8 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.bigquery.storage_v1.types import storage -from google.cloud.bigquery.storage_v1.types import stream +from google.cloud.bigquery_storage_v1.types import storage +from google.cloud.bigquery_storage_v1.types import stream from .base import BigQueryReadTransport, DEFAULT_CLIENT_INFO from .grpc import BigQueryReadGrpcTransport diff --git a/google/cloud/bigquery/storage_v1/types/__init__.py b/google/cloud/bigquery_storage_v1/types/__init__.py similarity index 97% rename from google/cloud/bigquery/storage_v1/types/__init__.py rename to google/cloud/bigquery_storage_v1/types/__init__.py index 0d37362f..346ce9cf 100644 --- a/google/cloud/bigquery/storage_v1/types/__init__.py +++ b/google/cloud/bigquery_storage_v1/types/__init__.py @@ -24,6 +24,7 @@ AvroRows, ) from .stream import ( + DataFormat, ReadSession, ReadStream, ) @@ -43,6 +44,7 @@ "ArrowRecordBatch", "AvroSchema", "AvroRows", + "DataFormat", "ReadSession", "ReadStream", "CreateReadSessionRequest", diff --git a/google/cloud/bigquery/storage_v1/types/arrow.py b/google/cloud/bigquery_storage_v1/types/arrow.py similarity index 100% rename from google/cloud/bigquery/storage_v1/types/arrow.py rename to google/cloud/bigquery_storage_v1/types/arrow.py diff --git a/google/cloud/bigquery/storage_v1/types/avro.py b/google/cloud/bigquery_storage_v1/types/avro.py similarity index 100% rename from google/cloud/bigquery/storage_v1/types/avro.py rename to google/cloud/bigquery_storage_v1/types/avro.py diff --git a/google/cloud/bigquery/storage_v1/types/storage.py b/google/cloud/bigquery_storage_v1/types/storage.py similarity index 97% rename from google/cloud/bigquery/storage_v1/types/storage.py rename to google/cloud/bigquery_storage_v1/types/storage.py index 57584b28..3460dce7 100644 --- a/google/cloud/bigquery/storage_v1/types/storage.py +++ b/google/cloud/bigquery_storage_v1/types/storage.py @@ -18,9 +18,9 @@ import proto # type: ignore -from google.cloud.bigquery.storage_v1.types import arrow -from google.cloud.bigquery.storage_v1.types import avro -from google.cloud.bigquery.storage_v1.types import stream +from google.cloud.bigquery_storage_v1.types import arrow +from google.cloud.bigquery_storage_v1.types import avro +from google.cloud.bigquery_storage_v1.types import stream __protobuf__ = proto.module( diff --git a/google/cloud/bigquery/storage_v1/types/stream.py b/google/cloud/bigquery_storage_v1/types/stream.py similarity index 97% rename from google/cloud/bigquery/storage_v1/types/stream.py rename to google/cloud/bigquery_storage_v1/types/stream.py index 99b7afee..eeec7a88 100644 --- a/google/cloud/bigquery/storage_v1/types/stream.py +++ b/google/cloud/bigquery_storage_v1/types/stream.py @@ -18,8 +18,8 @@ import proto # type: ignore -from google.cloud.bigquery.storage_v1.types import arrow -from google.cloud.bigquery.storage_v1.types import avro +from google.cloud.bigquery_storage_v1.types import arrow +from google.cloud.bigquery_storage_v1.types import avro from google.protobuf import timestamp_pb2 as timestamp # type: ignore diff --git a/noxfile.py b/noxfile.py index 188218a1..7f37b788 100644 --- a/noxfile.py +++ b/noxfile.py @@ -79,9 +79,10 @@ def default(session): session.run( "py.test", "--quiet", - "--cov=google.cloud.bigquerystorage", + "--cov=google.cloud.bigquery_storage", + "--cov=google.cloud.bigquery_storage_v1", "--cov=google.cloud", - "--cov=tests.unit", + "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", diff --git a/samples/quickstart/quickstart.py b/samples/quickstart/quickstart.py index ef42f02a..4372c22d 100644 --- a/samples/quickstart/quickstart.py +++ b/samples/quickstart/quickstart.py @@ -17,8 +17,8 @@ def main(project_id="your-project-id", snapshot_millis=0): # [START bigquerystorage_quickstart] - from google.cloud.bigquery.storage import BigQueryReadClient - from google.cloud.bigquery.storage import types + from google.cloud.bigquery_storage import BigQueryReadClient + from google.cloud.bigquery_storage import types # TODO(developer): Set the project_id variable. # project_id = 'your-project-id' diff --git a/samples/quickstart/quickstart_test.py b/samples/quickstart/quickstart_test.py index 37b1b2dd..33494cca 100644 --- a/samples/quickstart/quickstart_test.py +++ b/samples/quickstart/quickstart_test.py @@ -29,7 +29,7 @@ def now_millis(): @pytest.fixture() def project_id(): - return os.environ["PROJECT_ID"] + return os.environ["GOOGLE_CLOUD_PROJECT"] def test_quickstart_wo_snapshot(capsys, project_id): diff --git a/samples/to_dataframe/main_test.py b/samples/to_dataframe/main_test.py index ecce1685..46820578 100644 --- a/samples/to_dataframe/main_test.py +++ b/samples/to_dataframe/main_test.py @@ -21,7 +21,7 @@ def clients(): # [START bigquerystorage_pandas_tutorial_create_client] import google.auth from google.cloud import bigquery - from google.cloud.bigquery import storage + from google.cloud import bigquery_storage # Explicitly create a credentials object. This allows you to use the same # credentials for both the BigQuery and BigQuery Storage clients, avoiding @@ -32,7 +32,7 @@ def clients(): # Make clients. bqclient = bigquery.Client(credentials=credentials, project=your_project_id,) - bqstorageclient = storage.BigQueryReadClient(credentials=credentials) + bqstorageclient = bigquery_storage.BigQueryReadClient(credentials=credentials) # [END bigquerystorage_pandas_tutorial_create_client] # [END bigquerystorage_pandas_tutorial_all] return bqclient, bqstorageclient @@ -96,7 +96,7 @@ def test_query_to_dataframe(capsys, clients): def test_session_to_dataframe(capsys, clients): - from google.cloud.bigquery.storage import types + from google.cloud.bigquery_storage import types bqclient, bqstorageclient = clients your_project_id = bqclient.project diff --git a/scripts/fixup_storage_v1_keywords.py b/scripts/fixup_bigquery_storage_v1_keywords.py similarity index 96% rename from scripts/fixup_storage_v1_keywords.py rename to scripts/fixup_bigquery_storage_v1_keywords.py index 2fe0e587..4fc6755e 100644 --- a/scripts/fixup_storage_v1_keywords.py +++ b/scripts/fixup_bigquery_storage_v1_keywords.py @@ -37,7 +37,7 @@ def partition( return results[1], results[0] -class storageCallTransformer(cst.CSTTransformer): +class bigquery_storageCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'create_read_session': ('parent', 'read_session', 'max_stream_count', ), @@ -92,7 +92,7 @@ def fix_files( in_dir: pathlib.Path, out_dir: pathlib.Path, *, - transformer=storageCallTransformer(), + transformer=bigquery_storageCallTransformer(), ): """Duplicate the input dir to the output dir, fixing file method calls. @@ -125,7 +125,7 @@ def fix_files( if __name__ == '__main__': parser = argparse.ArgumentParser( - description="""Fix up source that uses the storage client library. + description="""Fix up source that uses the bigquery_storage client library. The existing sources are NOT overwritten but are copied to output_dir with changes made. diff --git a/setup.py b/setup.py index 4167081a..d8e5fe4f 100644 --- a/setup.py +++ b/setup.py @@ -50,9 +50,6 @@ if "google.cloud" in packages: namespaces.append("google.cloud") -if "google.cloud.bigquery" in packages: - namespaces.append("google.cloud.bigquery") - setuptools.setup( name=name, version=version, @@ -80,7 +77,7 @@ install_requires=dependencies, extras_require=extras, python_requires=">=3.6", - scripts=["scripts/fixup_storage_v1_keywords.py"], + scripts=["scripts/fixup_bigquery_storage_v1_keywords.py"], include_package_data=True, zip_safe=False, ) diff --git a/synth.metadata b/synth.metadata index 030565d7..957c95be 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,21 +4,21 @@ "git": { "name": ".", "remote": "git@github.com:plamut/python-bigquery-storage.git", - "sha": "e019d01628884bb3a24495f48f5036c9160deabd" + "sha": "bb1fdd26638add930e6601663ca92b76cbd9064c" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "916c10e8581804df2b48a0f0457d848f3faa582e" + "sha": "da29da32b3a988457b49ae290112b74f14b713cc" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "916c10e8581804df2b48a0f0457d848f3faa582e" + "sha": "da29da32b3a988457b49ae290112b74f14b713cc" } } ], diff --git a/synth.py b/synth.py index 2c867da7..e3fa7eea 100644 --- a/synth.py +++ b/synth.py @@ -94,34 +94,36 @@ '\g<0>\n\n session.install("google-cloud-bigquery")', ) -# We want the default client accessible through "google.cloud.bigquery.storage" -# to be the hand-written client that wrap the generated client, as this path is -# the users' main "entry point" into the library. -# HOWEVER - we don't want to expose the async client just yet. +# We don't want the generated client to be accessible through +# "google.cloud.bigquery_storage", replace it with the hand written client that +# wraps it. s.replace( - "google/cloud/bigquery/storage/__init__.py", - r"from google\.cloud\.bigquery\.storage_v1\.services.big_query_read.client import", + "google/cloud/bigquery_storage/__init__.py", + r"from google\.cloud\.bigquery_storage_v1\.services.big_query_read.client import", "from google.cloud.bigquery_storage_v1 import" ) + +# We also don't want to expose the async client just yet, at least not until +# it is wrapped in its own manual client class. s.replace( - "google/cloud/bigquery/storage/__init__.py", + "google/cloud/bigquery_storage/__init__.py", ( - r"from google\.cloud\.bigquery\.storage_v1\.services.big_query_read.async_client " + r"from google\.cloud\.bigquery_storage_v1\.services.big_query_read.async_client " r"import BigQueryReadAsyncClient\n" ), "", ) s.replace( - "google/cloud/bigquery/storage/__init__.py", + "google/cloud/bigquery_storage/__init__.py", r"""["']BigQueryReadAsyncClient["'],\n""", "", ) -# Ditto for types and __version__, make them accessible through the consolidated +# We want types and __version__ to be accessible through the "main" library # entry point. s.replace( - "google/cloud/bigquery/storage/__init__.py", - r"from google\.cloud\.bigquery\.storage_v1\.types\.arrow import ArrowRecordBatch", + "google/cloud/bigquery_storage/__init__.py", + r"from google\.cloud\.bigquery_storage_v1\.types\.arrow import ArrowRecordBatch", ( "from google.cloud.bigquery_storage_v1 import types\n" "from google.cloud.bigquery_storage_v1 import __version__\n" @@ -129,7 +131,7 @@ ), ) s.replace( - "google/cloud/bigquery/storage/__init__.py", + "google/cloud/bigquery_storage/__init__.py", r"""["']ArrowRecordBatch["']""", ( '"__version__",\n' @@ -138,12 +140,34 @@ ), ) -# Fix redundant library installations in nox sessions (unit and system tests). +# We want to expose all types through "google.cloud.bigquery_storage.types", +# not just the types generated for the BQ Storage library. For example, we also +# want to include common proto types such as Timestamp. +s.replace( + "google/cloud/bigquery_storage/__init__.py", + r"import types", + "import gapic_types as types", +) + +# The DataFormat enum is not exposed in bigquery_storage_v1/types, add it there. +s.replace( + "google/cloud/bigquery_storage_v1/types/__init__.py", + r"from \.stream import \(", + "\g<0>\n DataFormat,", +) +s.replace( + "google/cloud/bigquery_storage_v1/types/__init__.py", + r"""["']ReadSession["']""", + '"DataFormat",\n \g<0>', +) + +# Fix library installations in nox sessions (unit and system tests) - it's +# redundant to install the library twice. s.replace( "noxfile.py", ( r'session\.install\("-e", "\."\)\n ' - r'(?=session\.install\("-e", "\.\[fastavro)' + r'(?=session\.install\("-e", "\.\[fastavro)' # in unit tests session ), "", ) @@ -151,11 +175,26 @@ "noxfile.py", ( r'(?<=google-cloud-testutils", \)\n)' - r' session\.install\("-e", "\."\)\n' + r' session\.install\("-e", "\."\)\n' # in system tests session ), ' session.install("-e", ".[fastavro,pandas,pyarrow]")\n', ) +# Fix test coverage plugin paths. +s.replace( + "noxfile.py", + r'"--cov=google\.cloud\.bigquerystorage"', + ( + '"--cov=google.cloud.bigquery_storage",\n' + ' "--cov=google.cloud.bigquery_storage_v1"' + ), +) +s.replace( + "noxfile.py", + r'--cov=tests\.unit', + '--cov=tests/unit', +) + # TODO(busunkim): Use latest sphinx after microgenerator transition s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"') diff --git a/tests/system/v1/conftest.py b/tests/system/v1/conftest.py index 901429fc..cb8c5c19 100644 --- a/tests/system/v1/conftest.py +++ b/tests/system/v1/conftest.py @@ -20,7 +20,7 @@ import pytest -from google.cloud.bigquery import storage +from google.cloud import bigquery_storage _TABLE_FORMAT = "projects/{}/datasets/{}/tables/{}" @@ -43,7 +43,7 @@ def credentials(): @pytest.fixture(scope="session") def client(credentials): - return storage.BigQueryReadClient(credentials=credentials) + return bigquery_storage.BigQueryReadClient(credentials=credentials) @pytest.fixture() diff --git a/tests/system/v1/test_reader_dataframe_v1.py b/tests/system/v1/test_reader_dataframe_v1.py index 0d5b1899..ce1a46ee 100644 --- a/tests/system/v1/test_reader_dataframe_v1.py +++ b/tests/system/v1/test_reader_dataframe_v1.py @@ -19,7 +19,7 @@ import pyarrow.types import pytest -from google.cloud.bigquery.storage import types +from google.cloud.bigquery_storage import types def test_read_v1(client, project_id): diff --git a/tests/system/v1/test_reader_v1.py b/tests/system/v1/test_reader_v1.py index ff1f7191..ff0e5b9f 100644 --- a/tests/system/v1/test_reader_v1.py +++ b/tests/system/v1/test_reader_v1.py @@ -24,7 +24,7 @@ import pytz from google.cloud import bigquery -from google.cloud.bigquery.storage import types +from google.cloud.bigquery_storage import types def _to_bq_table_ref(table_name_string, partition_suffix=""): diff --git a/tests/unit/gapic/storage_v1/__init__.py b/tests/unit/gapic/bigquery_storage_v1/__init__.py similarity index 100% rename from tests/unit/gapic/storage_v1/__init__.py rename to tests/unit/gapic/bigquery_storage_v1/__init__.py diff --git a/tests/unit/gapic/storage_v1/test_big_query_read.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py similarity index 98% rename from tests/unit/gapic/storage_v1/test_big_query_read.py rename to tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py index 848e0d83..5ce4b3b2 100644 --- a/tests/unit/gapic/storage_v1/test_big_query_read.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py @@ -32,15 +32,15 @@ from google.api_core import grpc_helpers_async from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery.storage_v1.services.big_query_read import ( +from google.cloud.bigquery_storage_v1.services.big_query_read import ( BigQueryReadAsyncClient, ) -from google.cloud.bigquery.storage_v1.services.big_query_read import BigQueryReadClient -from google.cloud.bigquery.storage_v1.services.big_query_read import transports -from google.cloud.bigquery.storage_v1.types import arrow -from google.cloud.bigquery.storage_v1.types import avro -from google.cloud.bigquery.storage_v1.types import storage -from google.cloud.bigquery.storage_v1.types import stream +from google.cloud.bigquery_storage_v1.services.big_query_read import BigQueryReadClient +from google.cloud.bigquery_storage_v1.services.big_query_read import transports +from google.cloud.bigquery_storage_v1.types import arrow +from google.cloud.bigquery_storage_v1.types import avro +from google.cloud.bigquery_storage_v1.types import storage +from google.cloud.bigquery_storage_v1.types import stream from google.oauth2 import service_account from google.protobuf import timestamp_pb2 as timestamp # type: ignore @@ -416,7 +416,7 @@ def test_big_query_read_client_client_options_credentials_file( def test_big_query_read_client_client_options_from_dict(): with mock.patch( - "google.cloud.bigquery.storage_v1.services.big_query_read.transports.BigQueryReadGrpcTransport.__init__" + "google.cloud.bigquery_storage_v1.services.big_query_read.transports.BigQueryReadGrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None client = BigQueryReadClient(client_options={"api_endpoint": "squid.clam.whelk"}) @@ -1062,7 +1062,7 @@ def test_big_query_read_base_transport_error(): def test_big_query_read_base_transport(): # Instantiate the base transport. with mock.patch( - "google.cloud.bigquery.storage_v1.services.big_query_read.transports.BigQueryReadTransport.__init__" + "google.cloud.bigquery_storage_v1.services.big_query_read.transports.BigQueryReadTransport.__init__" ) as Transport: Transport.return_value = None transport = transports.BigQueryReadTransport( @@ -1086,7 +1086,7 @@ def test_big_query_read_base_transport_with_credentials_file(): with mock.patch.object( auth, "load_credentials_from_file" ) as load_creds, mock.patch( - "google.cloud.bigquery.storage_v1.services.big_query_read.transports.BigQueryReadTransport._prep_wrapped_messages" + "google.cloud.bigquery_storage_v1.services.big_query_read.transports.BigQueryReadTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) @@ -1107,7 +1107,7 @@ def test_big_query_read_base_transport_with_credentials_file(): def test_big_query_read_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(auth, "default") as adc, mock.patch( - "google.cloud.bigquery.storage_v1.services.big_query_read.transports.BigQueryReadTransport._prep_wrapped_messages" + "google.cloud.bigquery_storage_v1.services.big_query_read.transports.BigQueryReadTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (credentials.AnonymousCredentials(), None) diff --git a/tests/unit/test_client_v1.py b/tests/unit/test_client_v1.py index 4a369a89..9ef3378d 100644 --- a/tests/unit/test_client_v1.py +++ b/tests/unit/test_client_v1.py @@ -16,7 +16,7 @@ import mock import pytest -from google.cloud.bigquery.storage import types +from google.cloud.bigquery_storage import types PROJECT = "my-project" @@ -25,7 +25,7 @@ @pytest.fixture() def mock_transport(monkeypatch): - from google.cloud.bigquery.storage_v1.services.big_query_read import transports + from google.cloud.bigquery_storage_v1.services.big_query_read import transports fake_create_session_rpc = mock.Mock(name="create_read_session_rpc") fake_read_rows_rpc = mock.Mock(name="read_rows_rpc") @@ -47,13 +47,13 @@ def mock_transport(monkeypatch): @pytest.fixture() def client_under_test(mock_transport): - from google.cloud.bigquery import storage + from google.cloud import bigquery_storage - return storage.BigQueryReadClient(transport=mock_transport) + return bigquery_storage.BigQueryReadClient(transport=mock_transport) def test_constructor_w_client_info(): - from google.cloud.bigquery import storage + from google.cloud import bigquery_storage class MyTransport: def __init__(self, *args, **kwargs): @@ -61,11 +61,13 @@ def __init__(self, *args, **kwargs): self.kwargs = kwargs transport_class_patcher = mock.patch.object( - storage.BigQueryReadClient, "get_transport_class", return_value=MyTransport + bigquery_storage.BigQueryReadClient, + "get_transport_class", + return_value=MyTransport, ) with transport_class_patcher: - client_under_test = storage.BigQueryReadClient( + client_under_test = bigquery_storage.BigQueryReadClient( client_info=client_info.ClientInfo( client_library_version="test-client-version" ), diff --git a/tests/unit/test_reader_v1.py b/tests/unit/test_reader_v1.py index 216d4561..4922ab47 100644 --- a/tests/unit/test_reader_v1.py +++ b/tests/unit/test_reader_v1.py @@ -29,7 +29,7 @@ import six import google.api_core.exceptions -from google.cloud.bigquery.storage import types +from google.cloud.bigquery_storage import types PROJECT = "my-project" @@ -126,9 +126,9 @@ def class_under_test(mut): @pytest.fixture() def mock_gapic_client(): - from google.cloud.bigquery import storage_v1 + from google.cloud.bigquery_storage_v1.services import big_query_read - return mock.create_autospec(storage_v1.BigQueryReadClient) + return mock.create_autospec(big_query_read.BigQueryReadClient) def _bq_to_avro_blocks(bq_blocks, avro_schema_json):