Skip to content

Commit 57416c3

Browse files
chore(docs): add execute query docs pages (#1014)
1 parent 3c22951 commit 57416c3

8 files changed

Lines changed: 98 additions & 31 deletions

File tree

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
Execute Query Iterator Async
2+
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
3+
4+
.. autoclass:: google.cloud.bigtable.data.execute_query.ExecuteQueryIteratorAsync
5+
:members:
6+
:show-inheritance:
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
Execute Query Metadata
2+
~~~~~~~~~~~~~~~~~~~~~~~~~~
3+
4+
.. automodule:: google.cloud.bigtable.data.execute_query.metadata
5+
:members:
6+
:show-inheritance:
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
Execute Query Values
2+
~~~~~~~~~~~~~~~~~~~~
3+
4+
.. automodule:: google.cloud.bigtable.data.execute_query.values
5+
:members:
6+
:show-inheritance:

packages/google-cloud-bigtable/docs/async_data_client/async_data_usage.rst

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,3 +13,6 @@ Async Data Client
1313
async_data_mutations
1414
async_data_read_modify_write_rules
1515
async_data_exceptions
16+
async_data_execute_query_iterator
17+
async_data_execute_query_values
18+
async_data_execute_query_metadata

packages/google-cloud-bigtable/google/cloud/bigtable/data/_async/client.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -456,38 +456,38 @@ async def execute_query(
456456
retryable_errors list until operation_timeout is reached.
457457
458458
Args:
459-
- query: Query to be run on Bigtable instance. The query can use ``@param``
459+
query: Query to be run on Bigtable instance. The query can use ``@param``
460460
placeholders to use parameter interpolation on the server. Values for all
461461
parameters should be provided in ``parameters``. Types of parameters are
462462
inferred but should be provided in ``parameter_types`` if the inference is
463463
not possible (i.e. when value can be None, an empty list or an empty dict).
464-
- instance_id: The Bigtable instance ID to perform the query on.
464+
instance_id: The Bigtable instance ID to perform the query on.
465465
instance_id is combined with the client's project to fully
466466
specify the instance.
467-
- parameters: Dictionary with values for all parameters used in the ``query``.
468-
- parameter_types: Dictionary with types of parameters used in the ``query``.
467+
parameters: Dictionary with values for all parameters used in the ``query``.
468+
parameter_types: Dictionary with types of parameters used in the ``query``.
469469
Required to contain entries only for parameters whose type cannot be
470470
detected automatically (i.e. the value can be None, an empty list or
471471
an empty dict).
472-
- app_profile_id: The app profile to associate with requests.
472+
app_profile_id: The app profile to associate with requests.
473473
https://cloud.google.com/bigtable/docs/app-profiles
474-
- operation_timeout: the time budget for the entire operation, in seconds.
474+
operation_timeout: the time budget for the entire operation, in seconds.
475475
Failed requests will be retried within the budget.
476476
Defaults to 600 seconds.
477-
- attempt_timeout: the time budget for an individual network request, in seconds.
477+
attempt_timeout: the time budget for an individual network request, in seconds.
478478
If it takes longer than this time to complete, the request will be cancelled with
479479
a DeadlineExceeded exception, and a retry will be attempted.
480480
Defaults to the 20 seconds.
481481
If None, defaults to operation_timeout.
482-
- retryable_errors: a list of errors that will be retried if encountered.
482+
retryable_errors: a list of errors that will be retried if encountered.
483483
Defaults to 4 (DeadlineExceeded), 14 (ServiceUnavailable), and 10 (Aborted)
484484
Returns:
485-
- an asynchronous iterator that yields rows returned by the query
485+
ExecuteQueryIteratorAsync: an asynchronous iterator that yields rows returned by the query
486486
Raises:
487-
- DeadlineExceeded: raised after operation timeout
487+
google.api_core.exceptions.DeadlineExceeded: raised after operation timeout
488488
will be chained with a RetryExceptionGroup containing GoogleAPIError exceptions
489489
from any retries that failed
490-
- GoogleAPIError: raised if the request encounters an unrecoverable error
490+
google.api_core.exceptions.GoogleAPIError: raised if the request encounters an unrecoverable error
491491
"""
492492
warnings.warn(
493493
"ExecuteQuery is in preview and may change in the future.",

packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/_async/execute_query_iterator.py

Lines changed: 27 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
Optional,
2424
Sequence,
2525
Tuple,
26+
TYPE_CHECKING,
2627
)
2728

2829
from google.api_core import retry as retries
@@ -43,35 +44,40 @@
4344
ExecuteQueryRequest as ExecuteQueryRequestPB,
4445
)
4546

47+
if TYPE_CHECKING:
48+
from google.cloud.bigtable.data import BigtableDataClientAsync
49+
4650

4751
class ExecuteQueryIteratorAsync:
4852
"""
4953
ExecuteQueryIteratorAsync handles collecting streaming responses from the
50-
ExecuteQuery RPC and parsing them to `QueryResultRow`s.
54+
ExecuteQuery RPC and parsing them to QueryResultRows.
5155
5256
ExecuteQueryIteratorAsync implements Asynchronous Iterator interface and can
5357
be used with "async for" syntax. It is also a context manager.
5458
5559
It is **not thread-safe**. It should not be used by multiple asyncio Tasks.
5660
5761
Args:
58-
client (google.cloud.bigtable.data._async.BigtableDataClientAsync): bigtable client
59-
instance_id (str): id of the instance on which the query is executed
60-
request_body (Dict[str, Any]): dict representing the body of the ExecuteQueryRequest
61-
attempt_timeout (float | None): the time budget for the entire operation, in seconds.
62-
Failed requests will be retried within the budget.
63-
Defaults to 600 seconds.
64-
operation_timeout (float): the time budget for an individual network request, in seconds.
65-
If it takes longer than this time to complete, the request will be cancelled with
66-
a DeadlineExceeded exception, and a retry will be attempted.
67-
Defaults to the 20 seconds. If None, defaults to operation_timeout.
68-
req_metadata (Sequence[Tuple[str, str]]): metadata used while sending the gRPC request
69-
retryable_excs (List[type[Exception]]): a list of errors that will be retried if encountered.
62+
client: bigtable client
63+
instance_id: id of the instance on which the query is executed
64+
request_body: dict representing the body of the ExecuteQueryRequest
65+
attempt_timeout: the time budget for the entire operation, in seconds.
66+
Failed requests will be retried within the budget.
67+
Defaults to 600 seconds.
68+
operation_timeout: the time budget for an individual network request, in seconds.
69+
If it takes longer than this time to complete, the request will be cancelled with
70+
a DeadlineExceeded exception, and a retry will be attempted.
71+
Defaults to the 20 seconds. If None, defaults to operation_timeout.
72+
req_metadata: metadata used while sending the gRPC request
73+
retryable_excs: a list of errors that will be retried if encountered.
74+
Raises:
75+
RuntimeError: if the instance is not created within an async event loop context.
7076
"""
7177

7278
def __init__(
7379
self,
74-
client: Any,
80+
client: BigtableDataClientAsync,
7581
instance_id: str,
7682
app_profile_id: Optional[str],
7783
request_body: Dict[str, Any],
@@ -112,15 +118,18 @@ def __init__(
112118
) from e
113119

114120
@property
115-
def is_closed(self):
121+
def is_closed(self) -> bool:
122+
"""Returns True if the iterator is closed, False otherwise."""
116123
return self._is_closed
117124

118125
@property
119-
def app_profile_id(self):
126+
def app_profile_id(self) -> Optional[str]:
127+
"""Returns the app_profile_id of the iterator."""
120128
return self._app_profile_id
121129

122130
@property
123-
def table_name(self):
131+
def table_name(self) -> Optional[str]:
132+
"""Returns the table_name of the iterator."""
124133
return self._table_name
125134

126135
async def _make_request_with_resume_token(self):
@@ -176,7 +185,7 @@ async def _next_impl(self) -> AsyncIterator[QueryResultRow]:
176185
yield result
177186
await self.close()
178187

179-
async def __anext__(self):
188+
async def __anext__(self) -> QueryResultRow:
180189
if self._is_closed:
181190
raise StopAsyncIteration
182191
return await self._result_generator.__anext__()

packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/metadata.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,8 @@ def __repr__(self) -> str:
9090
return self.__str__()
9191

9292
class Struct(_NamedList[Type], Type):
93+
"""Struct SQL type."""
94+
9395
@classmethod
9496
def from_pb_type(cls, type_pb: Optional[PBType] = None) -> "SqlType.Struct":
9597
if type_pb is None:
@@ -120,6 +122,8 @@ def __str__(self):
120122
return super(_NamedList, self).__str__()
121123

122124
class Array(Type):
125+
"""Array SQL type."""
126+
123127
def __init__(self, element_type: "SqlType.Type"):
124128
if isinstance(element_type, SqlType.Array):
125129
raise ValueError("Arrays of arrays are not supported.")
@@ -148,6 +152,8 @@ def __str__(self) -> str:
148152
return f"{self.__class__.__name__}<{str(self.element_type)}>"
149153

150154
class Map(Type):
155+
"""Map SQL type."""
156+
151157
def __init__(self, key_type: "SqlType.Type", value_type: "SqlType.Type"):
152158
self._key_type = key_type
153159
self._value_type = value_type
@@ -189,32 +195,44 @@ def __str__(self) -> str:
189195
)
190196

191197
class Bytes(Type):
198+
"""Bytes SQL type."""
199+
192200
expected_type = bytes
193201
value_pb_dict_field_name = "bytes_value"
194202
type_field_name = "bytes_type"
195203

196204
class String(Type):
205+
"""String SQL type."""
206+
197207
expected_type = str
198208
value_pb_dict_field_name = "string_value"
199209
type_field_name = "string_type"
200210

201211
class Int64(Type):
212+
"""Int64 SQL type."""
213+
202214
expected_type = int
203215
value_pb_dict_field_name = "int_value"
204216
type_field_name = "int64_type"
205217

206218
class Float64(Type):
219+
"""Float64 SQL type."""
220+
207221
expected_type = float
208222
value_pb_dict_field_name = "float_value"
209223
type_field_name = "float64_type"
210224

211225
class Bool(Type):
226+
"""Bool SQL type."""
227+
212228
expected_type = bool
213229
value_pb_dict_field_name = "bool_value"
214230
type_field_name = "bool_type"
215231

216232
class Timestamp(Type):
217233
"""
234+
Timestamp SQL type.
235+
218236
Timestamp supports :class:`DatetimeWithNanoseconds` but Bigtable SQL does
219237
not currently support nanoseconds precision. We support this for potential
220238
compatibility in the future. Nanoseconds are currently ignored.
@@ -243,6 +261,8 @@ def _to_value_pb_dict(self, value: Any) -> Dict[str, Any]:
243261
return {"timestamp_value": ts}
244262

245263
class Date(Type):
264+
"""Date SQL type."""
265+
246266
type_field_name = "date_type"
247267
expected_type = datetime.date
248268

@@ -265,10 +285,23 @@ def _to_value_pb_dict(self, value: Any) -> Dict[str, Any]:
265285

266286

267287
class Metadata:
288+
"""
289+
Base class for metadata returned by the ExecuteQuery operation.
290+
"""
291+
268292
pass
269293

270294

271295
class ProtoMetadata(Metadata):
296+
"""
297+
Metadata class for the ExecuteQuery operation.
298+
299+
Args:
300+
columns (List[Tuple[Optional[str], SqlType.Type]]): List of column
301+
metadata tuples. Each tuple contains the column name and the column
302+
type.
303+
"""
304+
272305
class Column:
273306
def __init__(self, column_name: Optional[str], column_type: SqlType.Type):
274307
self._column_name = column_name

packages/google-cloud-bigtable/google/cloud/bigtable/data/execute_query/values.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,8 +112,12 @@ def __repr__(self) -> str:
112112

113113

114114
class QueryResultRow(_NamedList[ExecuteQueryValueType]):
115-
pass
115+
"""
116+
Represents a single row of the result
117+
"""
116118

117119

118120
class Struct(_NamedList[ExecuteQueryValueType]):
119-
pass
121+
"""
122+
Represents a struct value in the result
123+
"""

0 commit comments

Comments
 (0)