diff --git a/docs/datastore-transactions.rst b/docs/datastore-transactions.rst index 732283969403..305279fe621a 100644 --- a/docs/datastore-transactions.rst +++ b/docs/datastore-transactions.rst @@ -5,3 +5,4 @@ Transactions :members: :undoc-members: :show-inheritance: + :inherited-members: diff --git a/gcloud/datastore/batch.py b/gcloud/datastore/batch.py index d6dbd98ded54..ae749c43957f 100644 --- a/gcloud/datastore/batch.py +++ b/gcloud/datastore/batch.py @@ -32,7 +32,7 @@ class Batch(object): Used to build up a bulk mutuation. For example, the following snippet of code will put the two ``save`` - operations and the delete operatiuon into the same mutation, and send + operations and the ``delete`` operation into the same mutation, and send them to the server in a single API request:: >>> from gcloud.datastore.batch import Batch @@ -42,8 +42,8 @@ class Batch(object): >>> batch.delete(key3) >>> batch.commit() - You can also use a batch as a context manager, in which case the - ``commit`` will be called automatically if its block exits without + You can also use a batch as a context manager, in which case + :meth:`commit` will be called automatically if its block exits without raising an exception:: >>> with Batch() as batch: @@ -54,8 +54,8 @@ class Batch(object): By default, no updates will be sent if the block exits with an error:: >>> with Batch() as batch: - ... do_some_work(batch) - ... raise Exception() # rolls back + ... do_some_work(batch) + ... raise Exception() # rolls back :type client: :class:`gcloud.datastore.client.Client` :param client: The client used to connect to datastore. @@ -131,8 +131,8 @@ def mutations(self): Every batch is committed with a single Mutation representing the 'work' to be done as part of the batch. - Inside a batch, calling ``batch.put()`` with an entity, or - ``batch.delete`` with a key, builds up the mutation. + Inside a batch, calling :meth:`put` with an entity, or + :meth:`delete` with a key, builds up the mutation. This getter returns the Mutation protobuf that has been built-up so far. @@ -142,7 +142,7 @@ def mutations(self): return self._mutation def put(self, entity): - """Remember an entity's state to be saved during ``commit``. + """Remember an entity's state to be saved during :meth:`commit`. .. note:: Any existing properties for the entity will be replaced by those @@ -155,7 +155,7 @@ def put(self, entity): Python3) map to 'string_value' in the datastore; values which are "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. - When an entity has a partial key, calling :meth:`commit`` sends it as + When an entity has a partial key, calling :meth:`commit` sends it as an ``insert_auto_id`` mutation and the key is completed. On return, the key for the ``entity`` passed in as updated to match the key ID assigned by the server. @@ -181,7 +181,7 @@ def put(self, entity): _assign_entity_to_pb(entity_pb, entity) def delete(self, key): - """Remember a key to be deleted durring ``commit``. + """Remember a key to be deleted during :meth:`commit`. :type key: :class:`gcloud.datastore.key.Key` :param key: the key to be deleted. diff --git a/gcloud/datastore/client.py b/gcloud/datastore/client.py index e4a2ee3cb276..4e7ba9e37040 100644 --- a/gcloud/datastore/client.py +++ b/gcloud/datastore/client.py @@ -93,17 +93,16 @@ def _extended_lookup(connection, dataset_id, key_pbs, :type key_pbs: list of :class:`gcloud.datastore._generated.entity_pb2.Key` :param key_pbs: The keys to retrieve from the datastore. - :type missing: an empty list or None. - :param missing: If a list is passed, the key-only entity protobufs - returned by the backend as "missing" will be copied - into it. Use only as a keyword param. + :type missing: list + :param missing: (Optional) If a list is passed, the key-only entity + protobufs returned by the backend as "missing" will be + copied into it. - :type deferred: an empty list or None. - :param deferred: If a list is passed, the key protobufs returned + :type deferred: list + :param deferred: (Optional) If a list is passed, the key protobufs returned by the backend as "deferred" will be copied into it. - Use only as a keyword param. - :type eventual: boolean + :type eventual: bool :param eventual: If False (the default), request ``STRONG`` read consistency. If True, request ``EVENTUAL`` read consistency. @@ -247,15 +246,14 @@ def get(self, key, missing=None, deferred=None): :type key: :class:`gcloud.datastore.key.Key` :param key: The key to be retrieved from the datastore. - :type missing: an empty list or None. - :param missing: If a list is passed, the key-only entities returned - by the backend as "missing" will be copied into it. - Use only as a keyword param. + :type missing: list + :param missing: (Optional) If a list is passed, the key-only entities + returned by the backend as "missing" will be copied + into it. - :type deferred: an empty list or None. - :param deferred: If a list is passed, the keys returned + :type deferred: list + :param deferred: (Optional) If a list is passed, the keys returned by the backend as "deferred" will be copied into it. - Use only as a keyword param. :rtype: :class:`gcloud.datastore.entity.Entity` or ``NoneType`` :returns: The requested entity if it exists. @@ -271,20 +269,20 @@ def get_multi(self, keys, missing=None, deferred=None): :type keys: list of :class:`gcloud.datastore.key.Key` :param keys: The keys to be retrieved from the datastore. - :type missing: an empty list or None. - :param missing: If a list is passed, the key-only entities returned - by the backend as "missing" will be copied into it. - Use only as a keyword param. + :type missing: list + :param missing: (Optional) If a list is passed, the key-only entities + returned by the backend as "missing" will be copied + into it. If the list is not empty, an error will occur. - :type deferred: an empty list or None. - :param deferred: If a list is passed, the keys returned + :type deferred: list + :param deferred: (Optional) If a list is passed, the keys returned by the backend as "deferred" will be copied into it. - Use only as a keyword param. + If the list is not empty, an error will occur. :rtype: list of :class:`gcloud.datastore.entity.Entity` :returns: The requested entities. - :raises: ValueError if one or more of ``keys`` has a dataset ID which - does not match our dataset ID. + :raises: :class:`ValueError` if one or more of ``keys`` has a dataset + ID which does not match our dataset ID. """ if not keys: return [] @@ -338,7 +336,7 @@ def put_multi(self, entities): :type entities: list of :class:`gcloud.datastore.entity.Entity` :param entities: The entities to be saved to the datastore. - :raises: ValueError if ``entities`` is a single entity. + :raises: :class:`ValueError` if ``entities`` is a single entity. """ if isinstance(entities, Entity): raise ValueError("Pass a sequence of entities") @@ -397,10 +395,10 @@ def delete_multi(self, keys): def allocate_ids(self, incomplete_key, num_ids): """Allocate a list of IDs from a partial key. - :type incomplete_key: A :class:`gcloud.datastore.key.Key` + :type incomplete_key: :class:`gcloud.datastore.key.Key` :param incomplete_key: Partial key to use as base for allocated IDs. - :type num_ids: integer + :type num_ids: int :param num_ids: The number of IDs to allocate. :rtype: list of :class:`gcloud.datastore.key.Key` diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 647cf2df2c54..e3f2164fdc84 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -36,8 +36,8 @@ class Connection(connection.Connection): :param http: An optional HTTP object to make requests. :type api_base_url: string - :param api_base_url: The base of the API call URL. Defaults to the value - :attr:`Connection.API_BASE_URL`. + :param api_base_url: The base of the API call URL. Defaults to + :attr:`API_BASE_URL`. """ API_BASE_URL = 'https://www.googleapis.com' @@ -129,7 +129,7 @@ def build_api_url(self, dataset_id, method, base_url=None, usually your project name in the cloud console. :type method: string - :param method: The API method to call (ie, runQuery, lookup, ...). + :param method: The API method to call (e.g. 'runQuery', 'lookup'). :type base_url: string :param base_url: The base URL where the API lives. @@ -150,17 +150,19 @@ def lookup(self, dataset_id, key_pbs, Maps the ``DatastoreService.Lookup`` protobuf RPC. - This method deals only with protobufs - (:class:`gcloud.datastore._generated.entity_pb2.Key` and - :class:`gcloud.datastore._generated.entity_pb2.Entity`) and is used - under the hood in :func:`gcloud.datastore.get`: + This uses mostly protobufs + (:class:`gcloud.datastore._generated.entity_pb2.Key` as input and + :class:`gcloud.datastore._generated.entity_pb2.Entity` as output). It + is used under the hood in + :meth:`Client.get() <.datastore.client.Client.get>`: >>> from gcloud import datastore - >>> key = datastore.Key('MyKind', 1234, dataset_id='dataset-id') - >>> datastore.get(key) + >>> client = datastore.Client(dataset_id='dataset-id') + >>> key = client.key('MyKind', 1234) + >>> client.get(key) [] - Using the ``connection`` class directly: + Using a :class:`Connection` directly: >>> connection.lookup('dataset-id', [key.to_protobuf()]) [] @@ -172,10 +174,10 @@ def lookup(self, dataset_id, key_pbs, :class:`gcloud.datastore._generated.entity_pb2.Key` :param key_pbs: The keys to retrieve from the datastore. - :type eventual: boolean + :type eventual: bool :param eventual: If False (the default), request ``STRONG`` read - consistency. If True, request ``EVENTUAL`` read - consistency. + consistency. If True, request ``EVENTUAL`` read + consistency. :type transaction_id: string :param transaction_id: If passed, make the request in the scope of @@ -218,21 +220,22 @@ def run_query(self, dataset_id, query_pb, namespace=None, uses this method to fetch data: >>> from gcloud import datastore - >>> query = datastore.Query(kind='MyKind') >>> query.add_filter('property', '=', 'val') - Using the query's ``fetch_page`` method... + Using the query iterator's + :meth:`next_page() <.datastore.query.Iterator.next_page>` method: - >>> entities, cursor, more_results = query.fetch_page() + >>> query_iter = query.fetch() + >>> entities, more_results, cursor = query_iter.next_page() >>> entities [] - >>> cursor - >>> more_results + >>> cursor + - Under the hood this is doing... + Under the hood this is doing: >>> connection.run_query('dataset-id', query.to_protobuf()) [], cursor, more_results, skipped_results @@ -246,7 +249,7 @@ def run_query(self, dataset_id, query_pb, namespace=None, :type namespace: string :param namespace: The namespace over which to run the query. - :type eventual: boolean + :type eventual: bool :param eventual: If False (the default), request ``STRONG`` read consistency. If True, request ``EVENTUAL`` read consistency. @@ -280,8 +283,8 @@ def begin_transaction(self, dataset_id): :type dataset_id: string :param dataset_id: The ID dataset to which the transaction applies. - :rtype: :class:`._generated.datastore_pb2.BeginTransactionResponse` - :returns': the result protobuf for the begin transaction request. + :rtype: bytes + :returns: The serialized transaction that was begun. """ request = _datastore_pb2.BeginTransactionRequest() request.isolation_level = ( diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 48b33fb5475d..0b25ae15e2df 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -46,10 +46,11 @@ class Key(object): :param path_args: May represent a partial (odd length) or full (even length) key path. - :type kwargs: dictionary + :type kwargs: dict :param kwargs: Keyword arguments to be passed in. Accepted keyword arguments are + * namespace (string): A namespace identifier for the key. * dataset_id (string): The dataset ID associated with the key. * parent (:class:`gcloud.datastore.key.Key`): The parent of the key. @@ -75,7 +76,7 @@ def __eq__(self, other): Completed keys compare equal if they have the same path, dataset ID, and namespace. - :rtype: boolean + :rtype: bool :returns: True if the keys compare equal, else False. """ if not isinstance(other, Key): @@ -96,7 +97,7 @@ def __ne__(self, other): Completed keys compare equal if they have the same path, dataset ID, and namespace. - :rtype: boolean + :rtype: bool :returns: False if the keys compare equal, else True. """ return not self.__eq__(other) @@ -259,7 +260,7 @@ def to_protobuf(self): def is_partial(self): """Boolean indicating if the key has an ID (or name). - :rtype: boolean + :rtype: bool :returns: ``True`` if the last element of the key's path does not have an ``id`` or a ``name``. """ @@ -437,7 +438,7 @@ def _dataset_ids_equal(dataset_id1, dataset_id2): :type dataset_id2: string :param dataset_id2: A dataset ID. - :rtype: boolean + :rtype: bool :returns: Boolean indicating if the IDs are the same. """ if dataset_id1 == dataset_id2: diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index efa0075b1a64..12194cd2e2f0 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -204,13 +204,14 @@ def add_filter(self, property_name, operator, value): :type operator: string :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``. - :type value: integer, string, boolean, float, None, datetime + :type value: :class:`int`, :class:`str`, :class:`bool`, + :class:`float`, :class:`NoneType`, + :class`datetime.datetime` :param value: The value to filter on. :raises: :class:`ValueError` if ``operation`` is not one of the specified values, or if a filter names ``'__key__'`` but - passes invalid operator (``==`` is required) or value (a key - is required). + passes an invalid value (a key is required). """ if self.OPERATORS.get(operator) is None: error_message = 'Invalid expression: "%s"' % (operator,) diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index fa1ede867d5d..ba7b4aba6ddc 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -32,8 +32,8 @@ class Transaction(Batch): >>> with datastore.Transaction(): ... datastore.put_multi([entity1, entity2]) - Because it derives from :class:`Batch`, :class`Transaction` also provides - :meth:`put` and :meth:`delete` methods:: + Because it derives from :class:`Batch <.datastore.batch.Batch>`, + :class:`Transaction` also provides :meth:`put` and :meth:`delete` methods:: >>> with datastore.Transaction() as xact: ... xact.put(entity1) @@ -60,15 +60,17 @@ class Transaction(Batch): ``entity`` won't have a complete Key until the transaction is committed. - Once you exit the transaction (or call ``commit()``), the + Once you exit the transaction (or call :meth:`commit`), the automatically generated ID will be assigned to the entity:: >>> with datastore.Transaction(): ... entity = datastore.Entity(key=Key('Thing')) ... datastore.put(entity) - ... assert entity.key.is_partial # There is no ID on this key. + ... print entity.key.is_partial # There is no ID on this key. ... - >>> assert not entity.key.is_partial # There *is* an ID. + True + >>> print entity.key.is_partial # There *is* an ID. + False If you don't want to use the context manager you can initialize a transaction manually:: @@ -117,8 +119,10 @@ def id(self): def current(self): """Return the topmost transaction. - .. note:: if the topmost element on the stack is not a transaction, - returns None. + .. note:: + + If the topmost element on the stack is not a transaction, + returns None. :rtype: :class:`gcloud.datastore.transaction.Transaction` or None """