Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions gcloud/datastore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@
API.

- :class:`gcloud.datastore.client.Client`
which represents a dataset ID (string) and namespace (string) bundled with
which represents a project (string) and namespace (string) bundled with
a connection and has convenience methods for constructing objects with that
dataset ID / namespace.
project / namespace.

- :class:`gcloud.datastore.entity.Entity`
which represents a single entity in the datastore
Expand Down
24 changes: 12 additions & 12 deletions gcloud/datastore/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"""

from gcloud.datastore import helpers
from gcloud.datastore.key import _dataset_ids_equal
from gcloud.datastore.key import _projects_equal
from gcloud.datastore._generated import datastore_pb2 as _datastore_pb2


Expand Down Expand Up @@ -73,13 +73,13 @@ def current(self):
return self._client.current_batch

@property
def dataset_id(self):
"""Getter for dataset ID in which the batch will run.
def project(self):
"""Getter for project in which the batch will run.

:rtype: :class:`str`
:returns: The dataset ID in which the batch will run.
:returns: The project in which the batch will run.
"""
return self._client.dataset_id
return self._client.project

@property
def namespace(self):
Expand Down Expand Up @@ -167,13 +167,13 @@ def put(self, entity):
:param entity: the entity to be saved.

:raises: ValueError if entity has no key assigned, or if the key's
``dataset_id`` does not match ours.
``project`` does not match ours.
"""
if entity.key is None:
raise ValueError("Entity must have a key")

if not _dataset_ids_equal(self.dataset_id, entity.key.dataset_id):
raise ValueError("Key must be from same dataset as batch")
if not _projects_equal(self.project, entity.key.project):
raise ValueError("Key must be from same project as batch")

if entity.key.is_partial:
entity_pb = self._add_partial_key_entity_pb()
Expand All @@ -190,13 +190,13 @@ def delete(self, key):
:param key: the key to be deleted.

:raises: ValueError if key is not complete, or if the key's
``dataset_id`` does not match ours.
``project`` does not match ours.
"""
if key.is_partial:
raise ValueError("Key must be complete")

if not _dataset_ids_equal(self.dataset_id, key.dataset_id):
raise ValueError("Key must be from same dataset as batch")
if not _projects_equal(self.project, key.project):
raise ValueError("Key must be from same project as batch")

key_pb = helpers._prepare_key_for_request(key.to_protobuf())
self._add_delete_key_pb().CopyFrom(key_pb)
Expand All @@ -215,7 +215,7 @@ def commit(self):
context manager.
"""
_, updated_keys = self.connection.commit(
self.dataset_id, self._commit_request, self._id)
self.project, self._commit_request, self._id)
# If the back-end returns without error, we are guaranteed that
# :meth:`Connection.commit` will return keys that match (length and
# order) directly ``_partial_key_entities``.
Expand Down
100 changes: 47 additions & 53 deletions gcloud/datastore/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convenience wrapper for invoking APIs/factories w/ a dataset ID."""
"""Convenience wrapper for invoking APIs/factories w/ a project."""

import os

Expand All @@ -23,8 +23,8 @@
from gcloud.datastore.connection import Connection
from gcloud.datastore.batch import Batch
from gcloud.datastore.entity import Entity
from gcloud.datastore.key import _projects_equal
from gcloud.datastore.key import Key
from gcloud.datastore.key import _dataset_ids_equal
from gcloud.datastore.query import Query
from gcloud.datastore.transaction import Transaction
from gcloud.environment_vars import DATASET
Expand All @@ -35,18 +35,18 @@
"""Maximum number of iterations to wait for deferred keys."""


def _get_production_dataset_id():
def _get_production_project():
"""Gets the production application ID if it can be inferred."""
return os.getenv(DATASET)


def _get_gcd_dataset_id():
def _get_gcd_project():
"""Gets the GCD application ID if it can be inferred."""
return os.getenv(GCD_DATASET)


def _determine_default_dataset_id(dataset_id=None):
"""Determine default dataset ID explicitly or implicitly as fall-back.
def _determine_default_project(project=None):
"""Determine default project explicitly or implicitly as fall-back.

In implicit case, supports four environments. In order of precedence, the
implicit environments are:
Expand All @@ -56,28 +56,28 @@ def _determine_default_dataset_id(dataset_id=None):
* Google App Engine application ID
* Google Compute Engine project ID (from metadata server)

:type dataset_id: string
:param dataset_id: Optional. The dataset ID to use as default.
:type project: string
:param project: Optional. The project to use as default.

:rtype: string or ``NoneType``
:returns: Default dataset ID if it can be determined.
:returns: Default project if it can be determined.
"""
if dataset_id is None:
dataset_id = _get_production_dataset_id()
if project is None:
project = _get_production_project()

if dataset_id is None:
dataset_id = _get_gcd_dataset_id()
if project is None:
project = _get_gcd_project()

if dataset_id is None:
dataset_id = _app_engine_id()
if project is None:
project = _app_engine_id()

if dataset_id is None:
dataset_id = _compute_engine_id()
if project is None:
project = _compute_engine_id()

return dataset_id
return project


def _extended_lookup(connection, dataset_id, key_pbs,
def _extended_lookup(connection, project, key_pbs,
missing=None, deferred=None,
eventual=False, transaction_id=None):
"""Repeat lookup until all keys found (unless stop requested).
Expand All @@ -87,8 +87,8 @@ def _extended_lookup(connection, dataset_id, key_pbs,
:type connection: :class:`gcloud.datastore.connection.Connection`
:param connection: The connection used to connect to datastore.

:type dataset_id: string
:param dataset_id: The ID of the dataset of which to make the request.
:type project: string
:param project: The project to make the request for.

:type key_pbs: list of :class:`gcloud.datastore._generated.entity_pb2.Key`
:param key_pbs: The keys to retrieve from the datastore.
Expand Down Expand Up @@ -130,7 +130,7 @@ def _extended_lookup(connection, dataset_id, key_pbs,
loop_num += 1

results_found, missing_found, deferred_found = connection.lookup(
dataset_id=dataset_id,
project=project,
key_pbs=key_pbs,
eventual=eventual,
transaction_id=transaction_id,
Expand All @@ -156,10 +156,10 @@ def _extended_lookup(connection, dataset_id, key_pbs,


class Client(_BaseClient):
"""Convenience wrapper for invoking APIs/factories w/ a dataset ID.
"""Convenience wrapper for invoking APIs/factories w/ a project.

:type dataset_id: string
:param dataset_id: (optional) dataset ID to pass to proxied API methods.
:type project: string
:param project: (optional) The project to pass to proxied API methods.

:type namespace: string
:param namespace: (optional) namespace to pass to proxied API methods.
Expand All @@ -178,12 +178,12 @@ class Client(_BaseClient):
"""
_connection_class = Connection

def __init__(self, dataset_id=None, namespace=None,
def __init__(self, project=None, namespace=None,
credentials=None, http=None):
dataset_id = _determine_default_dataset_id(dataset_id)
if dataset_id is None:
raise EnvironmentError('Dataset ID could not be inferred.')
self.dataset_id = dataset_id
project = _determine_default_project(project)
if project is None:
raise EnvironmentError('Project could not be inferred.')
self.project = project
self.namespace = namespace
self._batch_stack = _LocalStack()
super(Client, self).__init__(credentials, http)
Expand Down Expand Up @@ -281,22 +281,22 @@ def get_multi(self, keys, missing=None, deferred=None):

:rtype: list of :class:`gcloud.datastore.entity.Entity`
:returns: The requested entities.
:raises: :class:`ValueError` if one or more of ``keys`` has a dataset
ID which does not match our dataset ID.
:raises: :class:`ValueError` if one or more of ``keys`` has a project
which does not match our project.
"""
if not keys:
return []

ids = set(key.dataset_id for key in keys)
ids = set(key.project for key in keys)
for current_id in ids:
if not _dataset_ids_equal(current_id, self.dataset_id):
raise ValueError('Keys do not match dataset ID')
if not _projects_equal(current_id, self.project):
raise ValueError('Keys do not match project')

transaction = self.current_transaction

entity_pbs = _extended_lookup(
connection=self.connection,
dataset_id=self.dataset_id,
project=self.project,
key_pbs=[k.to_protobuf() for k in keys],
missing=missing,
deferred=deferred,
Expand Down Expand Up @@ -414,7 +414,7 @@ def allocate_ids(self, incomplete_key, num_ids):
incomplete_key_pbs = [incomplete_key_pb] * num_ids

conn = self.connection
allocated_key_pbs = conn.allocate_ids(incomplete_key.dataset_id,
allocated_key_pbs = conn.allocate_ids(incomplete_key.project,
incomplete_key_pbs)
allocated_ids = [allocated_key_pb.path_element[-1].id
for allocated_key_pb in allocated_key_pbs]
Expand All @@ -424,39 +424,33 @@ def allocate_ids(self, incomplete_key, num_ids):
def key(self, *path_args, **kwargs):
"""Proxy to :class:`gcloud.datastore.key.Key`.

Passes our ``dataset_id``.
Passes our ``project``.
"""
if 'dataset_id' in kwargs:
raise TypeError('Cannot pass dataset_id')
kwargs['dataset_id'] = self.dataset_id
if 'project' in kwargs:
raise TypeError('Cannot pass project')
kwargs['project'] = self.project
if 'namespace' not in kwargs:
kwargs['namespace'] = self.namespace
return Key(*path_args, **kwargs)

def batch(self):
"""Proxy to :class:`gcloud.datastore.batch.Batch`.

Passes our ``dataset_id``.
"""
"""Proxy to :class:`gcloud.datastore.batch.Batch`."""
return Batch(self)

def transaction(self):
"""Proxy to :class:`gcloud.datastore.transaction.Transaction`.

Passes our ``dataset_id``.
"""
"""Proxy to :class:`gcloud.datastore.transaction.Transaction`."""
return Transaction(self)

def query(self, **kwargs):
"""Proxy to :class:`gcloud.datastore.query.Query`.

Passes our ``dataset_id``.
Passes our ``project``.
"""
if 'client' in kwargs:
raise TypeError('Cannot pass client')
if 'dataset_id' in kwargs:
raise TypeError('Cannot pass dataset_id')
kwargs['dataset_id'] = self.dataset_id
if 'project' in kwargs:
raise TypeError('Cannot pass project')
kwargs['project'] = self.project
if 'namespace' not in kwargs:
kwargs['namespace'] = self.namespace
return Query(self, **kwargs)
Loading