diff --git a/0.18.1/.buildinfo b/0.18.1/.buildinfo
new file mode 100644
index 000000000000..2f2287011325
--- /dev/null
+++ b/0.18.1/.buildinfo
@@ -0,0 +1,4 @@
+# Sphinx build info version 1
+# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
+config: 2311b0f3f0e958e7927415b2ae696345
+tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/0.18.1/_modules/gcloud/bigquery/_helpers.html b/0.18.1/_modules/gcloud/bigquery/_helpers.html
new file mode 100644
index 000000000000..dc6adcafaa28
--- /dev/null
+++ b/0.18.1/_modules/gcloud/bigquery/_helpers.html
@@ -0,0 +1,399 @@
+
+
+
+
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared elper functions for BigQuery API classes."""
+
+fromgcloud._helpersimport_datetime_from_microseconds
+
+
+def_not_null(value,field):
+ """Check whether 'value' should be coerced to 'field' type."""
+ returnvalueisnotNoneorfield.mode!='NULLABLE'
+
+
+def_int_from_json(value,field):
+ """Coerce 'value' to an int, if set or not nullable."""
+ if_not_null(value,field):
+ returnint(value)
+
+
+def_float_from_json(value,field):
+ """Coerce 'value' to a float, if set or not nullable."""
+ if_not_null(value,field):
+ returnfloat(value)
+
+
+def_bool_from_json(value,field):
+ """Coerce 'value' to a bool, if set or not nullable."""
+ if_not_null(value,field):
+ returnvalue.lower()in['t','true','1']
+
+
+def_datetime_from_json(value,field):
+ """Coerce 'value' to a datetime, if set or not nullable."""
+ if_not_null(value,field):
+ # value will be a float in seconds, to microsecond precision, in UTC.
+ return_datetime_from_microseconds(1e6*float(value))
+
+
+def_record_from_json(value,field):
+ """Coerce 'value' to a mapping, if set or not nullable."""
+ if_not_null(value,field):
+ record={}
+ forsubfield,cellinzip(field.fields,value['f']):
+ converter=_CELLDATA_FROM_JSON[subfield.field_type]
+ iffield.mode=='REPEATED':
+ value=[converter(item,field)foritemincell['v']]
+ else:
+ value=converter(cell['v'],field)
+ record[subfield.name]=value
+ returnrecord
+
+
+def_string_from_json(value,_):
+ """NOOP string -> string coercion"""
+ returnvalue
+
+
+_CELLDATA_FROM_JSON={
+ 'INTEGER':_int_from_json,
+ 'FLOAT':_float_from_json,
+ 'BOOLEAN':_bool_from_json,
+ 'TIMESTAMP':_datetime_from_json,
+ 'RECORD':_record_from_json,
+ 'STRING':_string_from_json,
+}
+
+
+def_rows_from_json(rows,schema):
+ """Convert JSON row data to rows w/ appropriate types."""
+ rows_data=[]
+ forrowinrows:
+ row_data=[]
+ forfield,cellinzip(schema,row['f']):
+ converter=_CELLDATA_FROM_JSON[field.field_type]
+ iffield.mode=='REPEATED':
+ row_data.append([converter(item,field)
+ foritemincell['v']])
+ else:
+ row_data.append(converter(cell['v'],field))
+ rows_data.append(tuple(row_data))
+ returnrows_data
+
+
+class_ConfigurationProperty(object):
+ """Base property implementation.
+
+ Values will be stored on a `_configuration` helper attribute of the
+ property's job instance.
+
+ :type name: string
+ :param name: name of the property
+ """
+
+ def__init__(self,name):
+ self.name=name
+ self._backing_name='_%s'%(self.name,)
+
+ def__get__(self,instance,owner):
+ """Descriptor protocal: accesstor"""
+ ifinstanceisNone:
+ returnself
+ returngetattr(instance._configuration,self._backing_name)
+
+ def_validate(self,value):
+ """Subclasses override to impose validation policy."""
+ pass
+
+ def__set__(self,instance,value):
+ """Descriptor protocal: mutator"""
+ self._validate(value)
+ setattr(instance._configuration,self._backing_name,value)
+
+ def__delete__(self,instance):
+ """Descriptor protocal: deleter"""
+ delattr(instance._configuration,self._backing_name)
+
+
+class_TypedProperty(_ConfigurationProperty):
+ """Property implementation: validates based on value type.
+
+ :type name: string
+ :param name: name of the property
+
+ :type property_type: type or sequence of types
+ :param property_type: type to be validated
+ """
+ def__init__(self,name,property_type):
+ super(_TypedProperty,self).__init__(name)
+ self.property_type=property_type
+
+ def_validate(self,value):
+ """Ensure that 'value' is of the appropriate type.
+
+ :raises: ValueError on a type mismatch.
+ """
+ ifnotisinstance(value,self.property_type):
+ raiseValueError('Required type: %s'%(self.property_type,))
+
+
+class_EnumProperty(_ConfigurationProperty):
+ """Psedo-enumeration class.
+
+ Subclasses must define ``ALLOWED`` as a class-level constant: it must
+ be a sequence of strings.
+
+ :type name: string
+ :param name: name of the property
+ """
+ def_validate(self,value):
+ """Check that ``value`` is one of the allowed values.
+
+ :raises: ValueError if value is not allowed.
+ """
+ ifvaluenotinself.ALLOWED:
+ raiseValueError('Pass one of: %s'', '.join(self.ALLOWED))
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the Google BigQuery API."""
+
+
+fromgcloud.clientimportJSONClient
+fromgcloud.bigquery.connectionimportConnection
+fromgcloud.bigquery.datasetimportDataset
+fromgcloud.bigquery.jobimportCopyJob
+fromgcloud.bigquery.jobimportExtractTableToStorageJob
+fromgcloud.bigquery.jobimportLoadTableFromStorageJob
+fromgcloud.bigquery.jobimportQueryJob
+fromgcloud.bigquery.queryimportQueryResults
+
+
+
[docs]classClient(JSONClient):
+ """Client to bundle configuration needed for API requests.
+
+ :type project: str
+ :param project: the project which the client acts on behalf of. Will be
+ passed when creating a dataset / job. If not passed,
+ falls back to the default inferred from the environment.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for the connection
+ owned by this client. If not passed (and if no ``http``
+ object is passed), falls back to the default inferred
+ from the environment.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests. If not passed, an
+ ``http`` object is created that is bound to the
+ ``credentials`` for the current object.
+ """
+
+ _connection_class=Connection
+
+
[docs]deflist_datasets(self,include_all=False,max_results=None,
+ page_token=None):
+ """List datasets for the project associated with this client.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/datasets/list
+
+ :type include_all: boolean
+ :param include_all: True if results include hidden datasets.
+
+ :type max_results: int
+ :param max_results: maximum number of datasets to return, If not
+ passed, defaults to a value set by the API.
+
+ :type page_token: str
+ :param page_token: opaque marker for the next "page" of datasets. If
+ not passed, the API will return the first page of
+ datasets.
+
+ :rtype: tuple, (list, str)
+ :returns: list of :class:`gcloud.bigquery.dataset.Dataset`, plus a
+ "next page token" string: if the token is not None,
+ indicates that more datasets can be retrieved with another
+ call (pass that value as ``page_token``).
+ """
+ params={}
+
+ ifinclude_all:
+ params['all']=True
+
+ ifmax_resultsisnotNone:
+ params['maxResults']=max_results
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ path='/projects/%s/datasets'%(self.project,)
+ resp=self.connection.api_request(method='GET',path=path,
+ query_params=params)
+ datasets=[Dataset.from_api_repr(resource,self)
+ forresourceinresp.get('datasets',())]
+ returndatasets,resp.get('nextPageToken')
+
+
[docs]defdataset(self,dataset_name):
+ """Construct a dataset bound to this client.
+
+ :type dataset_name: str
+ :param dataset_name: Name of the dataset.
+
+ :rtype: :class:`gcloud.bigquery.dataset.Dataset`
+ :returns: a new ``Dataset`` instance
+ """
+ returnDataset(dataset_name,client=self)
+
+
[docs]defjob_from_resource(self,resource):
+ """Detect correct job type from resource and instantiate.
+
+ :type resource: dict
+ :param resource: one job resource from API response
+
+ :rtype: One of:
+ :class:`gcloud.bigquery.job.LoadTableFromStorageJob`,
+ :class:`gcloud.bigquery.job.CopyJob`,
+ :class:`gcloud.bigquery.job.ExtractTableToStorageJob`,
+ :class:`gcloud.bigquery.job.QueryJob`,
+ :class:`gcloud.bigquery.job.RunSyncQueryJob`
+ :returns: the job instance, constructed via the resource
+ """
+ config=resource['configuration']
+ if'load'inconfig:
+ returnLoadTableFromStorageJob.from_api_repr(resource,self)
+ elif'copy'inconfig:
+ returnCopyJob.from_api_repr(resource,self)
+ elif'extract'inconfig:
+ returnExtractTableToStorageJob.from_api_repr(resource,self)
+ elif'query'inconfig:
+ returnQueryJob.from_api_repr(resource,self)
+ raiseValueError('Cannot parse job resource')
+
+
[docs]deflist_jobs(self,max_results=None,page_token=None,all_users=None,
+ state_filter=None):
+ """List jobs for the project associated with this client.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/list
+
+ :type max_results: int
+ :param max_results: maximum number of jobs to return, If not
+ passed, defaults to a value set by the API.
+
+ :type page_token: str
+ :param page_token: opaque marker for the next "page" of jobs. If
+ not passed, the API will return the first page of
+ jobs.
+
+ :type all_users: boolean
+ :param all_users: if true, include jobs owned by all users in the
+ project.
+
+ :type state_filter: str
+ :param state_filter: if passed, include only jobs matching the given
+ state. One of
+
+ * ``"done"``
+ * ``"pending"``
+ * ``"running"``
+
+ :rtype: tuple, (list, str)
+ :returns: list of job instances, plus a "next page token" string:
+ if the token is not ``None``, indicates that more jobs can be
+ retrieved with another call, passing that value as
+ ``page_token``).
+ """
+ params={'projection':'full'}
+
+ ifmax_resultsisnotNone:
+ params['maxResults']=max_results
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ ifall_usersisnotNone:
+ params['allUsers']=all_users
+
+ ifstate_filterisnotNone:
+ params['stateFilter']=state_filter
+
+ path='/projects/%s/jobs'%(self.project,)
+ resp=self.connection.api_request(method='GET',path=path,
+ query_params=params)
+ jobs=[self.job_from_resource(resource)
+ forresourceinresp.get('jobs',())]
+ returnjobs,resp.get('nextPageToken')
+
+
[docs]defload_table_from_storage(self,job_name,destination,*source_uris):
+ """Construct a job for loading data into a table from CloudStorage.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load
+
+ :type job_name: str
+ :param job_name: Name of the job.
+
+ :type destination: :class:`gcloud.bigquery.table.Table`
+ :param destination: Table into which data is to be loaded.
+
+ :type source_uris: sequence of string
+ :param source_uris: URIs of data files to be loaded; in format
+ ``gs://<bucket_name>/<object_name_or_glob>``.
+
+ :rtype: :class:`gcloud.bigquery.job.LoadTableFromStorageJob`
+ :returns: a new ``LoadTableFromStorageJob`` instance
+ """
+ returnLoadTableFromStorageJob(job_name,destination,source_uris,
+ client=self)
+
+
[docs]defcopy_table(self,job_name,destination,*sources):
+ """Construct a job for copying one or more tables into another table.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy
+
+ :type job_name: str
+ :param job_name: Name of the job.
+
+ :type destination: :class:`gcloud.bigquery.table.Table`
+ :param destination: Table into which data is to be copied.
+
+ :type sources: sequence of :class:`gcloud.bigquery.table.Table`
+ :param sources: tables to be copied.
+
+ :rtype: :class:`gcloud.bigquery.job.CopyJob`
+ :returns: a new ``CopyJob`` instance
+ """
+ returnCopyJob(job_name,destination,sources,client=self)
+
+
[docs]defextract_table_to_storage(self,job_name,source,*destination_uris):
+ """Construct a job for extracting a table into Cloud Storage files.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extract
+
+ :type job_name: str
+ :param job_name: Name of the job.
+
+ :type source: :class:`gcloud.bigquery.table.Table`
+ :param source: table to be extracted.
+
+ :type destination_uris: sequence of string
+ :param destination_uris: URIs of CloudStorage file(s) into which
+ table data is to be extracted; in format
+ ``gs://<bucket_name>/<object_name_or_glob>``.
+
+ :rtype: :class:`gcloud.bigquery.job.ExtractTableToStorageJob`
+ :returns: a new ``ExtractTableToStorageJob`` instance
+ """
+ returnExtractTableToStorageJob(job_name,source,destination_uris,
+ client=self)
+
+
[docs]defrun_async_query(self,job_name,query):
+ """Construct a job for running a SQL query asynchronously.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query
+
+ :type job_name: str
+ :param job_name: Name of the job.
+
+ :type query: str
+ :param query: SQL query to be executed
+
+ :rtype: :class:`gcloud.bigquery.job.QueryJob`
+ :returns: a new ``QueryJob`` instance
+ """
+ returnQueryJob(job_name,query,client=self)
+
+
[docs]defrun_sync_query(self,query):
+ """Run a SQL query synchronously.
+
+ :type query: str
+ :param query: SQL query to be executed
+
+ :rtype: :class:`gcloud.bigquery.query.QueryResults`
+ :returns: a new ``QueryResults`` instance
+ """
+ returnQueryResults(query,client=self)
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with gcloud bigquery connections."""
+
+fromgcloudimportconnectionasbase_connection
+
+
+
[docs]classConnection(base_connection.JSONConnection):
+ """A connection to Google Cloud BigQuery via the JSON REST API."""
+
+ API_BASE_URL='https://www.googleapis.com'
+ """The base of the API call URL."""
+
+ API_VERSION='v2'
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE='{api_base_url}/bigquery/{api_version}{path}'
+ """A template for the URL of a particular API call."""
+
+ SCOPE=('https://www.googleapis.com/auth/bigquery',
+ 'https://www.googleapis.com/auth/cloud-platform')
+ """The scopes required for authenticating as a Cloud BigQuery consumer."""
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define API Datasets."""
+importsix
+
+fromgcloud._helpersimport_datetime_from_microseconds
+fromgcloud.exceptionsimportNotFound
+fromgcloud.bigquery.tableimportTable
+
+
+
[docs]classAccessGrant(object):
+ """Represent grant of an access role to an entity.
+
+ Every entry in the access list will have exactly one of
+ ``userByEmail``, ``groupByEmail``, ``domain``, ``specialGroup`` or
+ ``view`` set. And if anything but ``view`` is set, it'll also have a
+ ``role`` specified. ``role`` is omitted for a ``view``, since
+ ``view`` s are always read-only.
+
+ See https://cloud.google.com/bigquery/docs/reference/v2/datasets.
+
+ :type role: string
+ :param role: Role granted to the entity. One of
+
+ * ``'OWNER'``
+ * ``'WRITER'``
+ * ``'READER'``
+
+ May also be ``None`` if the ``entity_type`` is ``view``.
+
+ :type entity_type: string
+ :param entity_type: Type of entity being granted the role. One of
+ :attr:`ENTITY_TYPES`.
+
+ :type entity_id: string
+ :param entity_id: ID of entity being granted the role.
+
+ :raises: :class:`ValueError` if the ``entity_type`` is not among
+ :attr:`ENTITY_TYPES`, or if a ``view`` has ``role`` set or
+ a non ``view`` **does not** have a ``role`` set.
+ """
+
+ ENTITY_TYPES=frozenset(['userByEmail','groupByEmail','domain',
+ 'specialGroup','view'])
+ """Allowed entity types."""
+
+ def__init__(self,role,entity_type,entity_id):
+ ifentity_typenotinself.ENTITY_TYPES:
+ message='Entity type %r not among: %s'%(
+ entity_type,', '.join(self.ENTITY_TYPES))
+ raiseValueError(message)
+ ifentity_type=='view':
+ ifroleisnotNone:
+ raiseValueError('Role must be None for a view. Received '
+ 'role: %r'%(role,))
+ else:
+ ifroleisNone:
+ raiseValueError('Role must be set for entity '
+ 'type %r'%(entity_type,))
+
+ self.role=role
+ self.entity_type=entity_type
+ self.entity_id=entity_id
+
+ def__eq__(self,other):
+ return(
+ self.role==other.roleand
+ self.entity_type==other.entity_typeand
+ self.entity_id==other.entity_id)
+
+ def__repr__(self):
+ return'<AccessGrant: role=%s, %s=%s>'%(
+ self.role,self.entity_type,self.entity_id)
+
+
+
[docs]classDataset(object):
+ """Datasets are containers for tables.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/datasets
+
+ :type name: string
+ :param name: the name of the dataset
+
+ :type client: :class:`gcloud.bigquery.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the dataset (which requires a project).
+
+ :type access_grants: list of :class:`AccessGrant`
+ :param access_grants: roles granted to entities for this dataset
+ """
+
+ _access_grants=None
+
+ def__init__(self,name,client,access_grants=()):
+ self.name=name
+ self._client=client
+ self._properties={}
+ # Let the @property do validation.
+ self.access_grants=access_grants
+
+ @property
+ defproject(self):
+ """Project bound to the dataset.
+
+ :rtype: string
+ :returns: the project (derived from the client).
+ """
+ returnself._client.project
+
+ @property
+ defpath(self):
+ """URL path for the dataset's APIs.
+
+ :rtype: string
+ :returns: the path based on project and dataste name.
+ """
+ return'/projects/%s/datasets/%s'%(self.project,self.name)
+
+ @property
+ defaccess_grants(self):
+ """Dataset's access grants.
+
+ :rtype: list of :class:`AccessGrant`
+ :returns: roles granted to entities for this dataset
+ """
+ returnlist(self._access_grants)
+
+ @access_grants.setter
+ defaccess_grants(self,value):
+ """Update dataset's access grants
+
+ :type value: list of :class:`AccessGrant`
+ :param value: roles granted to entities for this dataset
+
+ :raises: TypeError if 'value' is not a sequence, or ValueError if
+ any item in the sequence is not an AccessGrant
+ """
+ ifnotall(isinstance(field,AccessGrant)forfieldinvalue):
+ raiseValueError('Values must be AccessGrant instances')
+ self._access_grants=tuple(value)
+
+ @property
+ defcreated(self):
+ """Datetime at which the dataset was created.
+
+ :rtype: ``datetime.datetime``, or ``NoneType``
+ :returns: the creation time (None until set from the server).
+ """
+ creation_time=self._properties.get('creationTime')
+ ifcreation_timeisnotNone:
+ # creation_time will be in milliseconds.
+ return_datetime_from_microseconds(1000.0*creation_time)
+
+ @property
+ defdataset_id(self):
+ """ID for the dataset resource.
+
+ :rtype: string, or ``NoneType``
+ :returns: the ID (None until set from the server).
+ """
+ returnself._properties.get('id')
+
+ @property
+ defetag(self):
+ """ETag for the dataset resource.
+
+ :rtype: string, or ``NoneType``
+ :returns: the ETag (None until set from the server).
+ """
+ returnself._properties.get('etag')
+
+ @property
+ defmodified(self):
+ """Datetime at which the dataset was last modified.
+
+ :rtype: ``datetime.datetime``, or ``NoneType``
+ :returns: the modification time (None until set from the server).
+ """
+ modified_time=self._properties.get('lastModifiedTime')
+ ifmodified_timeisnotNone:
+ # modified_time will be in milliseconds.
+ return_datetime_from_microseconds(1000.0*modified_time)
+
+ @property
+ defself_link(self):
+ """URL for the dataset resource.
+
+ :rtype: string, or ``NoneType``
+ :returns: the URL (None until set from the server).
+ """
+ returnself._properties.get('selfLink')
+
+ @property
+ defdefault_table_expiration_ms(self):
+ """Default expiration time for tables in the dataset.
+
+ :rtype: integer, or ``NoneType``
+ :returns: The time in milliseconds, or None (the default).
+ """
+ returnself._properties.get('defaultTableExpirationMs')
+
+ @default_table_expiration_ms.setter
+ defdefault_table_expiration_ms(self,value):
+ """Update default expiration time for tables in the dataset.
+
+ :type value: integer, or ``NoneType``
+ :param value: new default time, in milliseconds
+
+ :raises: ValueError for invalid value types.
+ """
+ ifnotisinstance(value,six.integer_types)andvalueisnotNone:
+ raiseValueError("Pass an integer, or None")
+ self._properties['defaultTableExpirationMs']=value
+
+ @property
+ defdescription(self):
+ """Description of the dataset.
+
+ :rtype: string, or ``NoneType``
+ :returns: The description as set by the user, or None (the default).
+ """
+ returnself._properties.get('description')
+
+ @description.setter
+ defdescription(self,value):
+ """Update description of the dataset.
+
+ :type value: string, or ``NoneType``
+ :param value: new description
+
+ :raises: ValueError for invalid value types.
+ """
+ ifnotisinstance(value,six.string_types)andvalueisnotNone:
+ raiseValueError("Pass a string, or None")
+ self._properties['description']=value
+
+ @property
+ deffriendly_name(self):
+ """Title of the dataset.
+
+ :rtype: string, or ``NoneType``
+ :returns: The name as set by the user, or None (the default).
+ """
+ returnself._properties.get('friendlyName')
+
+ @friendly_name.setter
+ deffriendly_name(self,value):
+ """Update title of the dataset.
+
+ :type value: string, or ``NoneType``
+ :param value: new title
+
+ :raises: ValueError for invalid value types.
+ """
+ ifnotisinstance(value,six.string_types)andvalueisnotNone:
+ raiseValueError("Pass a string, or None")
+ self._properties['friendlyName']=value
+
+ @property
+ deflocation(self):
+ """Location in which the dataset is hosted.
+
+ :rtype: string, or ``NoneType``
+ :returns: The location as set by the user, or None (the default).
+ """
+ returnself._properties.get('location')
+
+ @location.setter
+ deflocation(self,value):
+ """Update location in which the dataset is hosted.
+
+ :type value: string, or ``NoneType``
+ :param value: new location
+
+ :raises: ValueError for invalid value types.
+ """
+ ifnotisinstance(value,six.string_types)andvalueisnotNone:
+ raiseValueError("Pass a string, or None")
+ self._properties['location']=value
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,client):
+ """Factory: construct a dataset given its API representation
+
+ :type resource: dict
+ :param resource: dataset resource representation returned from the API
+
+ :type client: :class:`gcloud.bigquery.client.Client`
+ :param client: Client which holds credentials and project
+ configuration for the dataset.
+
+ :rtype: :class:`gcloud.bigquery.dataset.Dataset`
+ :returns: Dataset parsed from ``resource``.
+ """
+ if('datasetReference'notinresourceor
+ 'datasetId'notinresource['datasetReference']):
+ raiseKeyError('Resource lacks required identity information:'
+ '["datasetReference"]["datasetId"]')
+ name=resource['datasetReference']['datasetId']
+ dataset=cls(name,client=client)
+ dataset._set_properties(resource)
+ returndataset
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :rtype: :class:`gcloud.bigquery.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self._client
+ returnclient
+
+ @staticmethod
+ def_parse_access_grants(access):
+ """Parse a resource fragment into a set of access grants.
+
+ ``role`` augments the entity type and present **unless** the entity
+ type is ``view``.
+
+ :type access: list of mappings
+ :param access: each mapping represents a single access grant
+
+ :rtype: list of :class:`AccessGrant`
+ :returns: a list of parsed grants
+ :raises: :class:`ValueError` if a grant in ``access`` has more keys
+ than ``role`` and one additional key.
+ """
+ result=[]
+ forgrantinaccess:
+ grant=grant.copy()
+ role=grant.pop('role',None)
+ entity_type,entity_id=grant.popitem()
+ iflen(grant)!=0:
+ raiseValueError('Grant has unexpected keys remaining.',grant)
+ result.append(
+ AccessGrant(role,entity_type,entity_id))
+ returnresult
+
+ def_set_properties(self,api_response):
+ """Update properties from resource in body of ``api_response``
+
+ :type api_response: httplib2.Response
+ :param api_response: response returned from an API call
+ """
+ self._properties.clear()
+ cleaned=api_response.copy()
+ access=cleaned.pop('access',())
+ self.access_grants=self._parse_access_grants(access)
+ if'creationTime'incleaned:
+ cleaned['creationTime']=float(cleaned['creationTime'])
+ if'lastModifiedTime'incleaned:
+ cleaned['lastModifiedTime']=float(cleaned['lastModifiedTime'])
+ if'defaultTableExpirationMs'incleaned:
+ cleaned['defaultTableExpirationMs']=int(
+ cleaned['defaultTableExpirationMs'])
+ self._properties.update(cleaned)
+
+ def_build_access_resource(self):
+ """Generate a resource fragment for dataset's access grants."""
+ result=[]
+ forgrantinself.access_grants:
+ info={grant.entity_type:grant.entity_id}
+ ifgrant.roleisnotNone:
+ info['role']=grant.role
+ result.append(info)
+ returnresult
+
+ def_build_resource(self):
+ """Generate a resource for ``create`` or ``update``."""
+ resource={
+ 'datasetReference':{
+ 'projectId':self.project,'datasetId':self.name},
+ }
+ ifself.default_table_expiration_msisnotNone:
+ value=self.default_table_expiration_ms
+ resource['defaultTableExpirationMs']=value
+
+ ifself.descriptionisnotNone:
+ resource['description']=self.description
+
+ ifself.friendly_nameisnotNone:
+ resource['friendlyName']=self.friendly_name
+
+ ifself.locationisnotNone:
+ resource['location']=self.location
+
+ iflen(self.access_grants)>0:
+ resource['access']=self._build_access_resource()
+
+ returnresource
+
+
[docs]defcreate(self,client=None):
+ """API call: create the dataset via a PUT request
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/tables/insert
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+ path='/projects/%s/datasets'%(self.project,)
+ api_response=client.connection.api_request(
+ method='POST',path=path,data=self._build_resource())
+ self._set_properties(api_response)
+
+
[docs]defexists(self,client=None):
+ """API call: test for the existence of the dataset via a GET request
+
+ See
+ https://cloud.google.com/bigquery/docs/reference/v2/datasets/get
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :rtype: bool
+ :returns: Boolean indicating existence of the dataset.
+ """
+ client=self._require_client(client)
+
+ try:
+ client.connection.api_request(method='GET',path=self.path,
+ query_params={'fields':'id'})
+ exceptNotFound:
+ returnFalse
+ else:
+ returnTrue
+
+
[docs]defreload(self,client=None):
+ """API call: refresh dataset properties via a GET request
+
+ See
+ https://cloud.google.com/bigquery/docs/reference/v2/datasets/get
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+
+ api_response=client.connection.api_request(
+ method='GET',path=self.path)
+ self._set_properties(api_response)
+
+
[docs]defpatch(self,client=None,**kw):
+ """API call: update individual dataset properties via a PATCH request
+
+ See
+ https://cloud.google.com/bigquery/docs/reference/v2/datasets/patch
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :type kw: ``dict``
+ :param kw: properties to be patched.
+
+ :raises: ValueError for invalid value types.
+ """
+ client=self._require_client(client)
+
+ partial={}
+
+ if'default_table_expiration_ms'inkw:
+ value=kw['default_table_expiration_ms']
+ ifnotisinstance(value,six.integer_types)andvalueisnotNone:
+ raiseValueError("Pass an integer, or None")
+ partial['defaultTableExpirationMs']=value
+
+ if'description'inkw:
+ partial['description']=kw['description']
+
+ if'friendly_name'inkw:
+ partial['friendlyName']=kw['friendly_name']
+
+ if'location'inkw:
+ partial['location']=kw['location']
+
+ api_response=client.connection.api_request(
+ method='PATCH',path=self.path,data=partial)
+ self._set_properties(api_response)
+
+
[docs]defupdate(self,client=None):
+ """API call: update dataset properties via a PUT request
+
+ See
+ https://cloud.google.com/bigquery/docs/reference/v2/datasets/update
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+ api_response=client.connection.api_request(
+ method='PUT',path=self.path,data=self._build_resource())
+ self._set_properties(api_response)
+
+
[docs]defdelete(self,client=None):
+ """API call: delete the dataset via a DELETE request
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/tables/delete
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+ client.connection.api_request(method='DELETE',path=self.path)
+
+
[docs]deflist_tables(self,max_results=None,page_token=None):
+ """List tables for the project associated with this client.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/tables/list
+
+ :type max_results: int
+ :param max_results: maximum number of tables to return, If not
+ passed, defaults to a value set by the API.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of datasets. If
+ not passed, the API will return the first page of
+ datasets.
+
+ :rtype: tuple, (list, str)
+ :returns: list of :class:`gcloud.bigquery.table.Table`, plus a
+ "next page token" string: if not ``None``, indicates that
+ more tables can be retrieved with another call (pass that
+ value as ``page_token``).
+ """
+ params={}
+
+ ifmax_resultsisnotNone:
+ params['maxResults']=max_results
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ path='/projects/%s/datasets/%s/tables'%(self.project,self.name)
+ connection=self._client.connection
+ resp=connection.api_request(method='GET',path=path,
+ query_params=params)
+ tables=[Table.from_api_repr(resource,self)
+ forresourceinresp.get('tables',())]
+ returntables,resp.get('nextPageToken')
+
+
[docs]deftable(self,name,schema=()):
+ """Construct a table bound to this dataset.
+
+ :type name: string
+ :param name: Name of the table.
+
+ :type schema: list of :class:`gcloud.bigquery.table.SchemaField`
+ :param schema: The table's schema
+
+ :rtype: :class:`gcloud.bigquery.table.Table`
+ :returns: a new ``Table`` instance
+ """
+ returnTable(name,dataset=self,schema=schema)
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define API Jobs."""
+
+importsix
+
+fromgcloud.exceptionsimportNotFound
+fromgcloud._helpersimport_datetime_from_microseconds
+fromgcloud.bigquery.datasetimportDataset
+fromgcloud.bigquery.tableimportSchemaField
+fromgcloud.bigquery.tableimportTable
+fromgcloud.bigquery.tableimport_build_schema_resource
+fromgcloud.bigquery.tableimport_parse_schema_resource
+fromgcloud.bigquery._helpersimport_EnumProperty
+fromgcloud.bigquery._helpersimport_TypedProperty
+
+
+
[docs]classUDFResource(object):
+ """Describe a single user-defined function (UDF) resource.
+ :type udf_type: str
+ :param udf_type: the type of the resource ('inlineCode' or 'resourceUri')
+
+ :type value: str
+ :param value: the inline code or resource URI
+
+ See
+ https://cloud.google.com/bigquery/user-defined-functions#api
+ """
+ def__init__(self,udf_type,value):
+ self.udf_type=udf_type
+ self.value=value
+
+ def__eq__(self,other):
+ return(
+ self.udf_type==other.udf_typeand
+ self.value==other.value)
+
+
+def_build_udf_resources(resources):
+ """
+ :type resources: sequence of :class:`UDFResource`
+ :param resources: fields to be appended
+
+ :rtype: mapping
+ :returns: a mapping describing userDefinedFunctionResources for the query.
+ """
+ udfs=[]
+ forresourceinresources:
+ udf={resource.udf_type:resource.value}
+ udfs.append(udf)
+ returnudfs
+
+
+
[docs]classUDFResourcesProperty(object):
+ """Custom property type for :class:`QueryJob`.
+
+ Also used by :class:`~gcloud.bigquery.query.Query`.
+ """
+ def__get__(self,instance,owner):
+ """Descriptor protocal: accesstor"""
+ ifinstanceisNone:
+ returnself
+ returnlist(instance._udf_resources)
+
+ def__set__(self,instance,value):
+ """Descriptor protocal: mutator"""
+ ifnotall(isinstance(u,UDFResource)foruinvalue):
+ raiseValueError("udf items must be UDFResource")
+ instance._udf_resources=tuple(value)
+
+
+class_BaseJob(object):
+ """Base class for jobs.
+
+ :type client: :class:`gcloud.bigquery.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the dataset (which requires a project).
+ """
+ def__init__(self,client):
+ self._client=client
+ self._properties={}
+
+ @property
+ defproject(self):
+ """Project bound to the job.
+
+ :rtype: string
+ :returns: the project (derived from the client).
+ """
+ returnself._client.project
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :rtype: :class:`gcloud.bigquery.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self._client
+ returnclient
+
+
+class_AsyncJob(_BaseJob):
+ """Base class for asynchronous jobs.
+
+ :type name: string
+ :param name: the name of the job
+
+ :type client: :class:`gcloud.bigquery.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the dataset (which requires a project).
+ """
+ def__init__(self,name,client):
+ super(_AsyncJob,self).__init__(client)
+ self.name=name
+
+ @property
+ defjob_type(self):
+ """Type of job
+
+ :rtype: string
+ :returns: one of 'load', 'copy', 'extract', 'query'
+ """
+ returnself._JOB_TYPE
+
+ @property
+ defpath(self):
+ """URL path for the job's APIs.
+
+ :rtype: string
+ :returns: the path based on project and job name.
+ """
+ return'/projects/%s/jobs/%s'%(self.project,self.name)
+
+ @property
+ defetag(self):
+ """ETag for the job resource.
+
+ :rtype: string, or ``NoneType``
+ :returns: the ETag (None until set from the server).
+ """
+ returnself._properties.get('etag')
+
+ @property
+ defself_link(self):
+ """URL for the job resource.
+
+ :rtype: string, or ``NoneType``
+ :returns: the URL (None until set from the server).
+ """
+ returnself._properties.get('selfLink')
+
+ @property
+ defuser_email(self):
+ """E-mail address of user who submitted the job.
+
+ :rtype: string, or ``NoneType``
+ :returns: the URL (None until set from the server).
+ """
+ returnself._properties.get('user_email')
+
+ @property
+ defcreated(self):
+ """Datetime at which the job was created.
+
+ :rtype: ``datetime.datetime``, or ``NoneType``
+ :returns: the creation time (None until set from the server).
+ """
+ statistics=self._properties.get('statistics')
+ ifstatisticsisnotNone:
+ millis=statistics.get('creationTime')
+ ifmillisisnotNone:
+ return_datetime_from_microseconds(millis*1000.0)
+
+ @property
+ defstarted(self):
+ """Datetime at which the job was started.
+
+ :rtype: ``datetime.datetime``, or ``NoneType``
+ :returns: the start time (None until set from the server).
+ """
+ statistics=self._properties.get('statistics')
+ ifstatisticsisnotNone:
+ millis=statistics.get('startTime')
+ ifmillisisnotNone:
+ return_datetime_from_microseconds(millis*1000.0)
+
+ @property
+ defended(self):
+ """Datetime at which the job finished.
+
+ :rtype: ``datetime.datetime``, or ``NoneType``
+ :returns: the end time (None until set from the server).
+ """
+ statistics=self._properties.get('statistics')
+ ifstatisticsisnotNone:
+ millis=statistics.get('endTime')
+ ifmillisisnotNone:
+ return_datetime_from_microseconds(millis*1000.0)
+
+ @property
+ deferror_result(self):
+ """Error information about the job as a whole.
+
+ :rtype: mapping, or ``NoneType``
+ :returns: the error information (None until set from the server).
+ """
+ status=self._properties.get('status')
+ ifstatusisnotNone:
+ returnstatus.get('errorResult')
+
+ @property
+ deferrors(self):
+ """Information about individual errors generated by the job.
+
+ :rtype: list of mappings, or ``NoneType``
+ :returns: the error information (None until set from the server).
+ """
+ status=self._properties.get('status')
+ ifstatusisnotNone:
+ returnstatus.get('errors')
+
+ @property
+ defstate(self):
+ """Status of the job.
+
+ :rtype: string, or ``NoneType``
+ :returns: the state (None until set from the server).
+ """
+ status=self._properties.get('status')
+ ifstatusisnotNone:
+ returnstatus.get('state')
+
+ def_scrub_local_properties(self,cleaned):
+ """Helper: handle subclass properties in cleaned."""
+ pass
+
+ def_set_properties(self,api_response):
+ """Update properties from resource in body of ``api_response``
+
+ :type api_response: httplib2.Response
+ :param api_response: response returned from an API call
+ """
+ cleaned=api_response.copy()
+ self._scrub_local_properties(cleaned)
+
+ statistics=cleaned.get('statistics',{})
+ if'creationTime'instatistics:
+ statistics['creationTime']=float(statistics['creationTime'])
+ if'startTime'instatistics:
+ statistics['startTime']=float(statistics['startTime'])
+ if'endTime'instatistics:
+ statistics['endTime']=float(statistics['endTime'])
+
+ self._properties.clear()
+ self._properties.update(cleaned)
+
+ @classmethod
+ def_get_resource_config(cls,resource):
+ """Helper for :meth:`from_api_repr`
+
+ :type resource: dict
+ :param resource: resource for the job
+
+ :rtype: dict
+ :returns: tuple (string, dict), where the first element is the
+ job name and the second contains job-specific configuration.
+ :raises: :class:`KeyError` if the resource has no identifier, or
+ is missing the appropriate configuration.
+ """
+ if('jobReference'notinresourceor
+ 'jobId'notinresource['jobReference']):
+ raiseKeyError('Resource lacks required identity information: '
+ '["jobReference"]["jobId"]')
+ name=resource['jobReference']['jobId']
+ if('configuration'notinresourceor
+ cls._JOB_TYPEnotinresource['configuration']):
+ raiseKeyError('Resource lacks required configuration: '
+ '["configuration"]["%s"]'%cls._JOB_TYPE)
+ config=resource['configuration'][cls._JOB_TYPE]
+ returnname,config
+
+ defbegin(self,client=None):
+ """API call: begin the job via a POST request
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/insert
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+ path='/projects/%s/jobs'%(self.project,)
+ api_response=client.connection.api_request(
+ method='POST',path=path,data=self._build_resource())
+ self._set_properties(api_response)
+
+ defexists(self,client=None):
+ """API call: test for the existence of the job via a GET request
+
+ See
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/get
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :rtype: bool
+ :returns: Boolean indicating existence of the job.
+ """
+ client=self._require_client(client)
+
+ try:
+ client.connection.api_request(method='GET',path=self.path,
+ query_params={'fields':'id'})
+ exceptNotFound:
+ returnFalse
+ else:
+ returnTrue
+
+ defreload(self,client=None):
+ """API call: refresh job properties via a GET request
+
+ See
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/get
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+
+ api_response=client.connection.api_request(
+ method='GET',path=self.path)
+ self._set_properties(api_response)
+
+ defcancel(self,client=None):
+ """API call: cancel job via a POST request
+
+ See
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/cancel
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+
+ api_response=client.connection.api_request(
+ method='POST',path='%s/cancel'%(self.path,))
+ self._set_properties(api_response)
+
+
+class_LoadConfiguration(object):
+ """User-settable configuration options for load jobs.
+
+ Values which are ``None`` -> server defaults.
+ """
+ _allow_jagged_rows=None
+ _allow_quoted_newlines=None
+ _create_disposition=None
+ _encoding=None
+ _field_delimiter=None
+ _ignore_unknown_values=None
+ _max_bad_records=None
+ _quote_character=None
+ _skip_leading_rows=None
+ _source_format=None
+ _write_disposition=None
+
+
+
[docs]classLoadTableFromStorageJob(_AsyncJob):
+ """Asynchronous job for loading data into a table from CloudStorage.
+
+ :type name: string
+ :param name: the name of the job
+
+ :type destination: :class:`gcloud.bigquery.table.Table`
+ :param destination: Table into which data is to be loaded.
+
+ :type source_uris: sequence of string
+ :param source_uris: URIs of one or more data files to be loaded, in
+ format ``gs://<bucket_name>/<object_name_or_glob>``.
+
+ :type client: :class:`gcloud.bigquery.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the dataset (which requires a project).
+
+ :type schema: list of :class:`gcloud.bigquery.table.SchemaField`
+ :param schema: The job's schema
+ """
+
+ _schema=None
+ _JOB_TYPE='load'
+
+ def__init__(self,name,destination,source_uris,client,schema=()):
+ super(LoadTableFromStorageJob,self).__init__(name,client)
+ self.destination=destination
+ self.source_uris=source_uris
+ # Let the @property do validation.
+ self.schema=schema
+ self._configuration=_LoadConfiguration()
+
+ @property
+ defschema(self):
+ """Table's schema.
+
+ :rtype: list of :class:`SchemaField`
+ :returns: fields describing the schema
+ """
+ returnlist(self._schema)
+
+ @schema.setter
+ defschema(self,value):
+ """Update table's schema
+
+ :type value: list of :class:`SchemaField`
+ :param value: fields describing the schema
+
+ :raises: TypeError if 'value' is not a sequence, or ValueError if
+ any item in the sequence is not a SchemaField
+ """
+ ifnotall(isinstance(field,SchemaField)forfieldinvalue):
+ raiseValueError('Schema items must be fields')
+ self._schema=tuple(value)
+
+ @property
+ definput_file_bytes(self):
+ """Count of bytes loaded from source files.
+
+ :rtype: integer, or ``NoneType``
+ :returns: the count (None until set from the server).
+ """
+ statistics=self._properties.get('statistics')
+ ifstatisticsisnotNone:
+ returnint(statistics['load']['inputFileBytes'])
+
+ @property
+ definput_files(self):
+ """Count of source files.
+
+ :rtype: integer, or ``NoneType``
+ :returns: the count (None until set from the server).
+ """
+ statistics=self._properties.get('statistics')
+ ifstatisticsisnotNone:
+ returnint(statistics['load']['inputFiles'])
+
+ @property
+ defoutput_bytes(self):
+ """Count of bytes saved to destination table.
+
+ :rtype: integer, or ``NoneType``
+ :returns: the count (None until set from the server).
+ """
+ statistics=self._properties.get('statistics')
+ ifstatisticsisnotNone:
+ returnint(statistics['load']['outputBytes'])
+
+ @property
+ defoutput_rows(self):
+ """Count of rows saved to destination table.
+
+ :rtype: integer, or ``NoneType``
+ :returns: the count (None until set from the server).
+ """
+ statistics=self._properties.get('statistics')
+ ifstatisticsisnotNone:
+ returnint(statistics['load']['outputRows'])
+
+ allow_jagged_rows=_TypedProperty('allow_jagged_rows',bool)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.allowJaggedRows
+ """
+
+ allow_quoted_newlines=_TypedProperty('allow_quoted_newlines',bool)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.allowQuotedNewlines
+ """
+
+ create_disposition=CreateDisposition('create_disposition')
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.createDisposition
+ """
+
+ encoding=Encoding('encoding')
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.encoding
+ """
+
+ field_delimiter=_TypedProperty('field_delimiter',six.string_types)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.fieldDelimiter
+ """
+
+ ignore_unknown_values=_TypedProperty('ignore_unknown_values',bool)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.ignoreUnknownValues
+ """
+
+ max_bad_records=_TypedProperty('max_bad_records',six.integer_types)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.maxBadRecords
+ """
+
+ quote_character=_TypedProperty('quote_character',six.string_types)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.quote
+ """
+
+ skip_leading_rows=_TypedProperty('skip_leading_rows',six.integer_types)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.skipLeadingRows
+ """
+
+ source_format=SourceFormat('source_format')
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.sourceFormat
+ """
+
+ write_disposition=WriteDisposition('write_disposition')
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load.writeDisposition
+ """
+
+ def_populate_config_resource(self,configuration):
+ """Helper for _build_resource: copy config properties to resource"""
+ ifself.allow_jagged_rowsisnotNone:
+ configuration['allowJaggedRows']=self.allow_jagged_rows
+ ifself.allow_quoted_newlinesisnotNone:
+ configuration['allowQuotedNewlines']=self.allow_quoted_newlines
+ ifself.create_dispositionisnotNone:
+ configuration['createDisposition']=self.create_disposition
+ ifself.encodingisnotNone:
+ configuration['encoding']=self.encoding
+ ifself.field_delimiterisnotNone:
+ configuration['fieldDelimiter']=self.field_delimiter
+ ifself.ignore_unknown_valuesisnotNone:
+ configuration['ignoreUnknownValues']=self.ignore_unknown_values
+ ifself.max_bad_recordsisnotNone:
+ configuration['maxBadRecords']=self.max_bad_records
+ ifself.quote_characterisnotNone:
+ configuration['quote']=self.quote_character
+ ifself.skip_leading_rowsisnotNone:
+ configuration['skipLeadingRows']=self.skip_leading_rows
+ ifself.source_formatisnotNone:
+ configuration['sourceFormat']=self.source_format
+ ifself.write_dispositionisnotNone:
+ configuration['writeDisposition']=self.write_disposition
+
+ def_build_resource(self):
+ """Generate a resource for :meth:`begin`."""
+ resource={
+ 'jobReference':{
+ 'projectId':self.project,
+ 'jobId':self.name,
+ },
+ 'configuration':{
+ self._JOB_TYPE:{
+ 'sourceUris':self.source_uris,
+ 'destinationTable':{
+ 'projectId':self.destination.project,
+ 'datasetId':self.destination.dataset_name,
+ 'tableId':self.destination.name,
+ },
+ },
+ },
+ }
+ configuration=resource['configuration'][self._JOB_TYPE]
+ self._populate_config_resource(configuration)
+
+ iflen(self.schema)>0:
+ configuration['schema']={
+ 'fields':_build_schema_resource(self.schema)}
+
+ returnresource
+
+ def_scrub_local_properties(self,cleaned):
+ """Helper: handle subclass properties in cleaned."""
+ schema=cleaned.pop('schema',{'fields':()})
+ self.schema=_parse_schema_resource(schema)
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,client):
+ """Factory: construct a job given its API representation
+
+ .. note:
+
+ This method assumes that the project found in the resource matches
+ the client's project.
+
+ :type resource: dict
+ :param resource: dataset job representation returned from the API
+
+ :type client: :class:`gcloud.bigquery.client.Client`
+ :param client: Client which holds credentials and project
+ configuration for the dataset.
+
+ :rtype: :class:`gcloud.bigquery.job.LoadTableFromStorageJob`
+ :returns: Job parsed from ``resource``.
+ """
+ name,config=cls._get_resource_config(resource)
+ dest_config=config['destinationTable']
+ dataset=Dataset(dest_config['datasetId'],client)
+ destination=Table(dest_config['tableId'],dataset)
+ source_urls=config.get('sourceUris',())
+ job=cls(name,destination,source_urls,client=client)
+ job._set_properties(resource)
+ returnjob
+
+
+class_CopyConfiguration(object):
+ """User-settable configuration options for copy jobs.
+
+ Values which are ``None`` -> server defaults.
+ """
+ _create_disposition=None
+ _write_disposition=None
+
+
+
[docs]classCopyJob(_AsyncJob):
+ """Asynchronous job: copy data into a table from other tables.
+
+ :type name: string
+ :param name: the name of the job
+
+ :type destination: :class:`gcloud.bigquery.table.Table`
+ :param destination: Table into which data is to be loaded.
+
+ :type sources: list of :class:`gcloud.bigquery.table.Table`
+ :param sources: Table into which data is to be loaded.
+
+ :type client: :class:`gcloud.bigquery.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the dataset (which requires a project).
+ """
+
+ _JOB_TYPE='copy'
+
+ def__init__(self,name,destination,sources,client):
+ super(CopyJob,self).__init__(name,client)
+ self.destination=destination
+ self.sources=sources
+ self._configuration=_CopyConfiguration()
+
+ create_disposition=CreateDisposition('create_disposition')
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.createDisposition
+ """
+
+ write_disposition=WriteDisposition('write_disposition')
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy.writeDisposition
+ """
+
+ def_populate_config_resource(self,configuration):
+ """Helper for _build_resource: copy config properties to resource"""
+ ifself.create_dispositionisnotNone:
+ configuration['createDisposition']=self.create_disposition
+ ifself.write_dispositionisnotNone:
+ configuration['writeDisposition']=self.write_disposition
+
+ def_build_resource(self):
+ """Generate a resource for :meth:`begin`."""
+
+ source_refs=[{
+ 'projectId':table.project,
+ 'datasetId':table.dataset_name,
+ 'tableId':table.name,
+ }fortableinself.sources]
+
+ resource={
+ 'jobReference':{
+ 'projectId':self.project,
+ 'jobId':self.name,
+ },
+ 'configuration':{
+ self._JOB_TYPE:{
+ 'sourceTables':source_refs,
+ 'destinationTable':{
+ 'projectId':self.destination.project,
+ 'datasetId':self.destination.dataset_name,
+ 'tableId':self.destination.name,
+ },
+ },
+ },
+ }
+ configuration=resource['configuration'][self._JOB_TYPE]
+ self._populate_config_resource(configuration)
+
+ returnresource
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,client):
+ """Factory: construct a job given its API representation
+
+ .. note:
+
+ This method assumes that the project found in the resource matches
+ the client's project.
+
+ :type resource: dict
+ :param resource: dataset job representation returned from the API
+
+ :type client: :class:`gcloud.bigquery.client.Client`
+ :param client: Client which holds credentials and project
+ configuration for the dataset.
+
+ :rtype: :class:`gcloud.bigquery.job.CopyJob`
+ :returns: Job parsed from ``resource``.
+ """
+ name,config=cls._get_resource_config(resource)
+ dest_config=config['destinationTable']
+ dataset=Dataset(dest_config['datasetId'],client)
+ destination=Table(dest_config['tableId'],dataset)
+ sources=[]
+ forsource_configinconfig['sourceTables']:
+ dataset=Dataset(source_config['datasetId'],client)
+ sources.append(Table(source_config['tableId'],dataset))
+ job=cls(name,destination,sources,client=client)
+ job._set_properties(resource)
+ returnjob
+
+
+class_ExtractConfiguration(object):
+ """User-settable configuration options for extract jobs.
+
+ Values which are ``None`` -> server defaults.
+ """
+ _compression=None
+ _destination_format=None
+ _field_delimiter=None
+ _print_header=None
+
+
+
[docs]classExtractTableToStorageJob(_AsyncJob):
+ """Asynchronous job: extract data from a table into Cloud Storage.
+
+ :type name: string
+ :param name: the name of the job
+
+ :type source: :class:`gcloud.bigquery.table.Table`
+ :param source: Table into which data is to be loaded.
+
+ :type destination_uris: list of string
+ :param destination_uris: URIs describing Cloud Storage blobs into which
+ extracted data will be written, in format
+ ``gs://<bucket_name>/<object_name_or_glob>``.
+
+ :type client: :class:`gcloud.bigquery.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the dataset (which requires a project).
+ """
+ _JOB_TYPE='extract'
+
+ def__init__(self,name,source,destination_uris,client):
+ super(ExtractTableToStorageJob,self).__init__(name,client)
+ self.source=source
+ self.destination_uris=destination_uris
+ self._configuration=_ExtractConfiguration()
+
+ compression=Compression('compression')
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.compression
+ """
+
+ destination_format=DestinationFormat('destination_format')
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.destinationFormat
+ """
+
+ field_delimiter=_TypedProperty('field_delimiter',six.string_types)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.fieldDelimiter
+ """
+
+ print_header=_TypedProperty('print_header',bool)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.extracted.printHeader
+ """
+
+ def_populate_config_resource(self,configuration):
+ """Helper for _build_resource: copy config properties to resource"""
+ ifself.compressionisnotNone:
+ configuration['compression']=self.compression
+ ifself.destination_formatisnotNone:
+ configuration['destinationFormat']=self.destination_format
+ ifself.field_delimiterisnotNone:
+ configuration['fieldDelimiter']=self.field_delimiter
+ ifself.print_headerisnotNone:
+ configuration['printHeader']=self.print_header
+
+ def_build_resource(self):
+ """Generate a resource for :meth:`begin`."""
+
+ source_ref={
+ 'projectId':self.source.project,
+ 'datasetId':self.source.dataset_name,
+ 'tableId':self.source.name,
+ }
+
+ resource={
+ 'jobReference':{
+ 'projectId':self.project,
+ 'jobId':self.name,
+ },
+ 'configuration':{
+ self._JOB_TYPE:{
+ 'sourceTable':source_ref,
+ 'destinationUris':self.destination_uris,
+ },
+ },
+ }
+ configuration=resource['configuration'][self._JOB_TYPE]
+ self._populate_config_resource(configuration)
+
+ returnresource
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,client):
+ """Factory: construct a job given its API representation
+
+ .. note:
+
+ This method assumes that the project found in the resource matches
+ the client's project.
+
+ :type resource: dict
+ :param resource: dataset job representation returned from the API
+
+ :type client: :class:`gcloud.bigquery.client.Client`
+ :param client: Client which holds credentials and project
+ configuration for the dataset.
+
+ :rtype: :class:`gcloud.bigquery.job.ExtractTableToStorageJob`
+ :returns: Job parsed from ``resource``.
+ """
+ name,config=cls._get_resource_config(resource)
+ source_config=config['sourceTable']
+ dataset=Dataset(source_config['datasetId'],client)
+ source=Table(source_config['tableId'],dataset)
+ destination_uris=config['destinationUris']
+ job=cls(name,source,destination_uris,client=client)
+ job._set_properties(resource)
+ returnjob
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define API Queries."""
+
+importsix
+
+fromgcloud.bigquery._helpersimport_TypedProperty
+fromgcloud.bigquery._helpersimport_rows_from_json
+fromgcloud.bigquery.datasetimportDataset
+fromgcloud.bigquery.jobimportQueryJob
+fromgcloud.bigquery.jobimportUDFResourcesProperty
+fromgcloud.bigquery.jobimport_build_udf_resources
+fromgcloud.bigquery.tableimport_parse_schema_resource
+
+
+class_SyncQueryConfiguration(object):
+ """User-settable configuration options for synchronous query jobs.
+
+ Values which are ``None`` -> server defaults.
+ """
+ _default_dataset=None
+ _dry_run=None
+ _max_results=None
+ _timeout_ms=None
+ _preserve_nulls=None
+ _use_query_cache=None
+ _use_legacy_sql=None
+
+
+
[docs]classQueryResults(object):
+ """Synchronous job: query tables.
+
+ :type query: string
+ :param query: SQL query string
+
+ :type client: :class:`gcloud.bigquery.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the dataset (which requires a project).
+
+ :type udf_resources: tuple
+ :param udf_resources: An iterable of
+ :class:`gcloud.bigquery.job.UDFResource`
+ (empty by default)
+ """
+
+ _UDF_KEY='userDefinedFunctionResources'
+
+ def__init__(self,query,client,udf_resources=()):
+ self._client=client
+ self._properties={}
+ self.query=query
+ self._configuration=_SyncQueryConfiguration()
+ self.udf_resources=udf_resources
+ self._job=None
+
+ @property
+ defproject(self):
+ """Project bound to the job.
+
+ :rtype: string
+ :returns: the project (derived from the client).
+ """
+ returnself._client.project
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :rtype: :class:`gcloud.bigquery.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self._client
+ returnclient
+
+ @property
+ defcache_hit(self):
+ """Query results served from cache.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#cacheHit
+
+ :rtype: boolean or ``NoneType``
+ :returns: True if the query results were served from cache (None
+ until set by the server).
+ """
+ returnself._properties.get('cacheHit')
+
+ @property
+ defcomplete(self):
+ """Server completed query.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#jobComplete
+
+ :rtype: boolean or ``NoneType``
+ :returns: True if the query completed on the server (None
+ until set by the server).
+ """
+ returnself._properties.get('jobComplete')
+
+ @property
+ deferrors(self):
+ """Errors generated by the query.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#errors
+
+ :rtype: list of mapping, or ``NoneType``
+ :returns: Mappings describing errors generated on the server (None
+ until set by the server).
+ """
+ returnself._properties.get('errors')
+
+ @property
+ defname(self):
+ """Job name, generated by the back-end.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#jobReference
+
+ :rtype: list of mapping, or ``NoneType``
+ :returns: Mappings describing errors generated on the server (None
+ until set by the server).
+ """
+ returnself._properties.get('jobReference',{}).get('jobId')
+
+ @property
+ defjob(self):
+ """Job instance used to run the query.
+
+ :rtype: :class:`gcloud.bigquery.job.QueryJob`, or ``NoneType``
+ :returns: Job instance used to run the query (None until
+ ``jobReference`` property is set by the server).
+ """
+ ifself._jobisNone:
+ job_ref=self._properties.get('jobReference')
+ ifjob_refisnotNone:
+ self._job=QueryJob(job_ref['jobId'],self.query,
+ self._client)
+ returnself._job
+
+ @property
+ defpage_token(self):
+ """Token for fetching next bach of results.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#pageToken
+
+ :rtype: string, or ``NoneType``
+ :returns: Token generated on the server (None until set by the server).
+ """
+ returnself._properties.get('pageToken')
+
+ @property
+ deftotal_rows(self):
+ """Total number of rows returned by the query
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#totalRows
+
+ :rtype: integer, or ``NoneType``
+ :returns: Count generated on the server (None until set by the server).
+ """
+ returnself._properties.get('totalRows')
+
+ @property
+ deftotal_bytes_processed(self):
+ """Total number of bytes processed by the query
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#totalBytesProcessed
+
+ :rtype: integer, or ``NoneType``
+ :returns: Count generated on the server (None until set by the server).
+ """
+ returnself._properties.get('totalBytesProcessed')
+
+ @property
+ defrows(self):
+ """Query results.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#rows
+
+ :rtype: list of tuples of row values, or ``NoneType``
+ :returns: fields describing the schema (None until set by the server).
+ """
+ return_rows_from_json(self._properties.get('rows',()),self.schema)
+
+ @property
+ defschema(self):
+ """Schema for query results.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#schema
+
+ :rtype: list of :class:`SchemaField`, or ``NoneType``
+ :returns: fields describing the schema (None until set by the server).
+ """
+ return_parse_schema_resource(self._properties.get('schema',{}))
+
+ default_dataset=_TypedProperty('default_dataset',Dataset)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#defaultDataset
+ """
+
+ dry_run=_TypedProperty('dry_run',bool)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#dryRun
+ """
+
+ max_results=_TypedProperty('max_results',six.integer_types)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#maxResults
+ """
+
+ preserve_nulls=_TypedProperty('preserve_nulls',bool)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#preserveNulls
+ """
+
+ timeout_ms=_TypedProperty('timeout_ms',six.integer_types)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#timeoutMs
+ """
+
+ udf_resources=UDFResourcesProperty()
+
+ use_query_cache=_TypedProperty('use_query_cache',bool)
+ """See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#useQueryCache
+ """
+
+ use_legacy_sql=_TypedProperty('use_legacy_sql',bool)
+ """See:
+ https://cloud.google.com/bigquery/docs/\
+ reference/v2/jobs/query#useLegacySql
+ """
+
+ def_set_properties(self,api_response):
+ """Update properties from resource in body of ``api_response``
+
+ :type api_response: httplib2.Response
+ :param api_response: response returned from an API call
+ """
+ self._properties.clear()
+ self._properties.update(api_response)
+
+ def_build_resource(self):
+ """Generate a resource for :meth:`begin`."""
+ resource={'query':self.query}
+
+ ifself.default_datasetisnotNone:
+ resource['defaultDataset']={
+ 'projectId':self.project,
+ 'datasetId':self.default_dataset.name,
+ }
+
+ ifself.max_resultsisnotNone:
+ resource['maxResults']=self.max_results
+
+ ifself.preserve_nullsisnotNone:
+ resource['preserveNulls']=self.preserve_nulls
+
+ ifself.timeout_msisnotNone:
+ resource['timeoutMs']=self.timeout_ms
+
+ ifself.use_query_cacheisnotNone:
+ resource['useQueryCache']=self.use_query_cache
+
+ ifself.use_legacy_sqlisnotNone:
+ resource['useLegacySql']=self.use_legacy_sql
+
+ ifself.dry_runisnotNone:
+ resource['dryRun']=self.dry_run
+
+ iflen(self._udf_resources)>0:
+ resource[self._UDF_KEY]=_build_udf_resources(self._udf_resources)
+
+ returnresource
+
+
[docs]defrun(self,client=None):
+ """API call: run the query via a POST request
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/query
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+ path='/projects/%s/queries'%(self.project,)
+ api_response=client.connection.api_request(
+ method='POST',path=path,data=self._build_resource())
+ self._set_properties(api_response)
+
+
[docs]deffetch_data(self,max_results=None,page_token=None,start_index=None,
+ timeout_ms=None,client=None):
+ """API call: fetch a page of query result data via a GET request
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/jobs/getQueryResults
+
+ :type max_results: integer or ``NoneType``
+ :param max_results: maximum number of rows to return.
+
+ :type page_token: string or ``NoneType``
+ :param page_token: token representing a cursor into the table's rows.
+
+ :type start_index: integer or ``NoneType``
+ :param start_index: zero-based index of starting row
+
+ :type timeout_ms: integer or ``NoneType``
+ :param timeout_ms: timeout, in milliseconds, to wait for query to
+ complete
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :rtype: tuple
+ :returns: ``(row_data, total_rows, page_token)``, where ``row_data``
+ is a list of tuples, one per result row, containing only
+ the values; ``total_rows`` is a count of the total number
+ of rows in the table; and ``page_token`` is an opaque
+ string which can be used to fetch the next batch of rows
+ (``None`` if no further batches can be fetched).
+ :raises: ValueError if the query has not yet been executed.
+ """
+ ifself.nameisNone:
+ raiseValueError("Query not yet executed: call 'run()'")
+
+ client=self._require_client(client)
+ params={}
+
+ ifmax_resultsisnotNone:
+ params['maxResults']=max_results
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ ifstart_indexisnotNone:
+ params['startIndex']=start_index
+
+ iftimeout_msisnotNone:
+ params['timeoutMs']=timeout_ms
+
+ path='/projects/%s/queries/%s'%(self.project,self.name)
+ response=client.connection.api_request(method='GET',
+ path=path,
+ query_params=params)
+ self._set_properties(response)
+
+ total_rows=response.get('totalRows')
+ iftotal_rowsisnotNone:
+ total_rows=int(total_rows)
+ page_token=response.get('pageToken')
+ rows_data=_rows_from_json(response.get('rows',()),self.schema)
+
+ returnrows_data,total_rows,page_token
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define API Datasets."""
+
+importdatetime
+importjson
+importos
+
+importsix
+
+fromgcloud._helpersimport_datetime_from_microseconds
+fromgcloud._helpersimport_microseconds_from_datetime
+fromgcloud._helpersimport_millis_from_datetime
+fromgcloud.exceptionsimportNotFound
+fromgcloud.streaming.http_wrapperimportRequest
+fromgcloud.streaming.http_wrapperimportmake_api_request
+fromgcloud.streaming.transferimportRESUMABLE_UPLOAD
+fromgcloud.streaming.transferimportUpload
+fromgcloud.bigquery._helpersimport_rows_from_json
+
+
+_MARKER=object()
+
+
+
[docs]classSchemaField(object):
+ """Describe a single field within a table schema.
+
+ :type name: str
+ :param name: the name of the field
+
+ :type field_type: str
+ :param field_type: the type of the field (one of 'STRING', 'INTEGER',
+ 'FLOAT', 'BOOLEAN', 'TIMESTAMP' or 'RECORD')
+
+ :type mode: str
+ :param mode: the type of the field (one of 'NULLABLE', 'REQUIRED',
+ or 'REPEATED')
+
+ :type description: str
+ :param description: optional description for the field
+
+ :type fields: list of :class:`SchemaField`, or None
+ :param fields: subfields (requires ``field_type`` of 'RECORD').
+ """
+ def__init__(self,name,field_type,mode='NULLABLE',description=None,
+ fields=None):
+ self.name=name
+ self.field_type=field_type
+ self.mode=mode
+ self.description=description
+ self.fields=fields
+
+ def__eq__(self,other):
+ return(
+ self.name==other.nameand
+ self.field_type.lower()==other.field_type.lower()and
+ self.mode==other.modeand
+ self.description==other.descriptionand
+ self.fields==other.fields)
+
+
+
[docs]classTable(object):
+ """Tables represent a set of rows whose values correspond to a schema.
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/tables
+
+ :type name: str
+ :param name: the name of the table
+
+ :type dataset: :class:`gcloud.bigquery.dataset.Dataset`
+ :param dataset: The dataset which contains the table.
+
+ :type schema: list of :class:`SchemaField`
+ :param schema: The table's schema
+ """
+
+ _schema=None
+
+ def__init__(self,name,dataset,schema=()):
+ self.name=name
+ self._dataset=dataset
+ self._properties={}
+ # Let the @property do validation.
+ self.schema=schema
+
+ @property
+ defproject(self):
+ """Project bound to the table.
+
+ :rtype: str
+ :returns: the project (derived from the dataset).
+ """
+ returnself._dataset.project
+
+ @property
+ defdataset_name(self):
+ """Name of dataset containing the table.
+
+ :rtype: str
+ :returns: the ID (derived from the dataset).
+ """
+ returnself._dataset.name
+
+ @property
+ defpath(self):
+ """URL path for the table's APIs.
+
+ :rtype: str
+ :returns: the path based on project and dataste name.
+ """
+ return'%s/tables/%s'%(self._dataset.path,self.name)
+
+ @property
+ defschema(self):
+ """Table's schema.
+
+ :rtype: list of :class:`SchemaField`
+ :returns: fields describing the schema
+ """
+ returnlist(self._schema)
+
+ @schema.setter
+ defschema(self,value):
+ """Update table's schema
+
+ :type value: list of :class:`SchemaField`
+ :param value: fields describing the schema
+
+ :raises: TypeError if 'value' is not a sequence, or ValueError if
+ any item in the sequence is not a SchemaField
+ """
+ ifnotall(isinstance(field,SchemaField)forfieldinvalue):
+ raiseValueError('Schema items must be fields')
+ self._schema=tuple(value)
+
+ @property
+ defcreated(self):
+ """Datetime at which the table was created.
+
+ :rtype: ``datetime.datetime``, or ``NoneType``
+ :returns: the creation time (None until set from the server).
+ """
+ creation_time=self._properties.get('creationTime')
+ ifcreation_timeisnotNone:
+ # creation_time will be in milliseconds.
+ return_datetime_from_microseconds(1000.0*creation_time)
+
+ @property
+ defetag(self):
+ """ETag for the table resource.
+
+ :rtype: str, or ``NoneType``
+ :returns: the ETag (None until set from the server).
+ """
+ returnself._properties.get('etag')
+
+ @property
+ defmodified(self):
+ """Datetime at which the table was last modified.
+
+ :rtype: ``datetime.datetime``, or ``NoneType``
+ :returns: the modification time (None until set from the server).
+ """
+ modified_time=self._properties.get('lastModifiedTime')
+ ifmodified_timeisnotNone:
+ # modified_time will be in milliseconds.
+ return_datetime_from_microseconds(1000.0*modified_time)
+
+ @property
+ defnum_bytes(self):
+ """The size of the table in bytes.
+
+ :rtype: integer, or ``NoneType``
+ :returns: the byte count (None until set from the server).
+ """
+ num_bytes_as_str=self._properties.get('numBytes')
+ ifnum_bytes_as_strisnotNone:
+ returnint(num_bytes_as_str)
+
+ @property
+ defnum_rows(self):
+ """The number of rows in the table.
+
+ :rtype: integer, or ``NoneType``
+ :returns: the row count (None until set from the server).
+ """
+ num_rows_as_str=self._properties.get('numRows')
+ ifnum_rows_as_strisnotNone:
+ returnint(num_rows_as_str)
+
+ @property
+ defself_link(self):
+ """URL for the table resource.
+
+ :rtype: str, or ``NoneType``
+ :returns: the URL (None until set from the server).
+ """
+ returnself._properties.get('selfLink')
+
+ @property
+ deftable_id(self):
+ """ID for the table resource.
+
+ :rtype: str, or ``NoneType``
+ :returns: the ID (None until set from the server).
+ """
+ returnself._properties.get('id')
+
+ @property
+ deftable_type(self):
+ """The type of the table.
+
+ Possible values are "TABLE" or "VIEW".
+
+ :rtype: str, or ``NoneType``
+ :returns: the URL (None until set from the server).
+ """
+ returnself._properties.get('type')
+
+ @property
+ defdescription(self):
+ """Description of the table.
+
+ :rtype: str, or ``NoneType``
+ :returns: The description as set by the user, or None (the default).
+ """
+ returnself._properties.get('description')
+
+ @description.setter
+ defdescription(self,value):
+ """Update description of the table.
+
+ :type value: str, or ``NoneType``
+ :param value: new description
+
+ :raises: ValueError for invalid value types.
+ """
+ ifnotisinstance(value,six.string_types)andvalueisnotNone:
+ raiseValueError("Pass a string, or None")
+ self._properties['description']=value
+
+ @property
+ defexpires(self):
+ """Datetime at which the table will be removed.
+
+ :rtype: ``datetime.datetime``, or ``NoneType``
+ :returns: the expiration time, or None
+ """
+ expiration_time=self._properties.get('expirationTime')
+ ifexpiration_timeisnotNone:
+ # expiration_time will be in milliseconds.
+ return_datetime_from_microseconds(1000.0*expiration_time)
+
+ @expires.setter
+ defexpires(self,value):
+ """Update datetime at which the table will be removed.
+
+ :type value: ``datetime.datetime``, or ``NoneType``
+ :param value: the new expiration time, or None
+ """
+ ifnotisinstance(value,datetime.datetime)andvalueisnotNone:
+ raiseValueError("Pass a datetime, or None")
+ self._properties['expirationTime']=_millis_from_datetime(value)
+
+ @property
+ deffriendly_name(self):
+ """Title of the table.
+
+ :rtype: str, or ``NoneType``
+ :returns: The name as set by the user, or None (the default).
+ """
+ returnself._properties.get('friendlyName')
+
+ @friendly_name.setter
+ deffriendly_name(self,value):
+ """Update title of the table.
+
+ :type value: str, or ``NoneType``
+ :param value: new title
+
+ :raises: ValueError for invalid value types.
+ """
+ ifnotisinstance(value,six.string_types)andvalueisnotNone:
+ raiseValueError("Pass a string, or None")
+ self._properties['friendlyName']=value
+
+ @property
+ deflocation(self):
+ """Location in which the table is hosted.
+
+ :rtype: str, or ``NoneType``
+ :returns: The location as set by the user, or None (the default).
+ """
+ returnself._properties.get('location')
+
+ @location.setter
+ deflocation(self,value):
+ """Update location in which the table is hosted.
+
+ :type value: str, or ``NoneType``
+ :param value: new location
+
+ :raises: ValueError for invalid value types.
+ """
+ ifnotisinstance(value,six.string_types)andvalueisnotNone:
+ raiseValueError("Pass a string, or None")
+ self._properties['location']=value
+
+ @property
+ defview_query(self):
+ """SQL query defining the table as a view.
+
+ :rtype: str, or ``NoneType``
+ :returns: The query as set by the user, or None (the default).
+ """
+ view=self._properties.get('view')
+ ifviewisnotNone:
+ returnview.get('query')
+
+ @view_query.setter
+ defview_query(self,value):
+ """Update SQL query defining the table as a view.
+
+ :type value: str
+ :param value: new query
+
+ :raises: ValueError for invalid value types.
+ """
+ ifnotisinstance(value,six.string_types):
+ raiseValueError("Pass a string")
+ self._properties['view']={'query':value}
+
+ @view_query.deleter
+ defview_query(self):
+ """Delete SQL query defining the table as a view."""
+ self._properties.pop('view',None)
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,dataset):
+ """Factory: construct a table given its API representation
+
+ :type resource: dict
+ :param resource: table resource representation returned from the API
+
+ :type dataset: :class:`gcloud.bigquery.dataset.Dataset`
+ :param dataset: The dataset containing the table.
+
+ :rtype: :class:`gcloud.bigquery.table.Table`
+ :returns: Table parsed from ``resource``.
+ """
+ if('tableReference'notinresourceor
+ 'tableId'notinresource['tableReference']):
+ raiseKeyError('Resource lacks required identity information:'
+ '["tableReference"]["tableId"]')
+ table_name=resource['tableReference']['tableId']
+ table=cls(table_name,dataset=dataset)
+ table._set_properties(resource)
+ returntable
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :rtype: :class:`gcloud.bigquery.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self._dataset._client
+ returnclient
+
+ def_set_properties(self,api_response):
+ """Update properties from resource in body of ``api_response``
+
+ :type api_response: httplib2.Response
+ :param api_response: response returned from an API call
+ """
+ self._properties.clear()
+ cleaned=api_response.copy()
+ schema=cleaned.pop('schema',{'fields':()})
+ self.schema=_parse_schema_resource(schema)
+ if'creationTime'incleaned:
+ cleaned['creationTime']=float(cleaned['creationTime'])
+ if'lastModifiedTime'incleaned:
+ cleaned['lastModifiedTime']=float(cleaned['lastModifiedTime'])
+ if'expirationTime'incleaned:
+ cleaned['expirationTime']=float(cleaned['expirationTime'])
+ self._properties.update(cleaned)
+
+ def_build_resource(self):
+ """Generate a resource for ``create`` or ``update``."""
+ resource={
+ 'tableReference':{
+ 'projectId':self._dataset.project,
+ 'datasetId':self._dataset.name,
+ 'tableId':self.name},
+ }
+ ifself.descriptionisnotNone:
+ resource['description']=self.description
+
+ ifself.expiresisnotNone:
+ value=_millis_from_datetime(self.expires)
+ resource['expirationTime']=value
+
+ ifself.friendly_nameisnotNone:
+ resource['friendlyName']=self.friendly_name
+
+ ifself.locationisnotNone:
+ resource['location']=self.location
+
+ ifself.view_queryisnotNone:
+ view=resource['view']={}
+ view['query']=self.view_query
+ elifself._schema:
+ resource['schema']={
+ 'fields':_build_schema_resource(self._schema)
+ }
+ else:
+ raiseValueError("Set either 'view_query' or 'schema'.")
+
+ returnresource
+
+
[docs]defcreate(self,client=None):
+ """API call: create the dataset via a PUT request
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/tables/insert
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+ path='/projects/%s/datasets/%s/tables'%(
+ self._dataset.project,self._dataset.name)
+ api_response=client.connection.api_request(
+ method='POST',path=path,data=self._build_resource())
+ self._set_properties(api_response)
+
+
[docs]defexists(self,client=None):
+ """API call: test for the existence of the table via a GET request
+
+ See
+ https://cloud.google.com/bigquery/docs/reference/v2/tables/get
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :rtype: bool
+ :returns: Boolean indicating existence of the table.
+ """
+ client=self._require_client(client)
+
+ try:
+ client.connection.api_request(method='GET',path=self.path,
+ query_params={'fields':'id'})
+ exceptNotFound:
+ returnFalse
+ else:
+ returnTrue
+
+
[docs]defreload(self,client=None):
+ """API call: refresh table properties via a GET request
+
+ See
+ https://cloud.google.com/bigquery/docs/reference/v2/tables/get
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+
+ api_response=client.connection.api_request(
+ method='GET',path=self.path)
+ self._set_properties(api_response)
+
+
[docs]defpatch(self,
+ client=None,
+ friendly_name=_MARKER,
+ description=_MARKER,
+ location=_MARKER,
+ expires=_MARKER,
+ view_query=_MARKER,
+ schema=_MARKER):
+ """API call: update individual table properties via a PATCH request
+
+ See
+ https://cloud.google.com/bigquery/docs/reference/v2/tables/patch
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :type friendly_name: str or ``NoneType``
+ :param friendly_name: point in time at which the table expires.
+
+ :type description: str or ``NoneType``
+ :param description: point in time at which the table expires.
+
+ :type location: str or ``NoneType``
+ :param location: point in time at which the table expires.
+
+ :type expires: :class:`datetime.datetime` or ``NoneType``
+ :param expires: point in time at which the table expires.
+
+ :type view_query: str
+ :param view_query: SQL query defining the table as a view
+
+ :type schema: list of :class:`SchemaField`
+ :param schema: fields describing the schema
+
+ :raises: ValueError for invalid value types.
+ """
+ client=self._require_client(client)
+
+ partial={}
+
+ ifexpiresisnot_MARKER:
+ if(notisinstance(expires,datetime.datetime)and
+ expiresisnotNone):
+ raiseValueError("Pass a datetime, or None")
+ partial['expirationTime']=_millis_from_datetime(expires)
+
+ ifdescriptionisnot_MARKER:
+ partial['description']=description
+
+ iffriendly_nameisnot_MARKER:
+ partial['friendlyName']=friendly_name
+
+ iflocationisnot_MARKER:
+ partial['location']=location
+
+ ifview_queryisnot_MARKER:
+ ifview_queryisNone:
+ partial['view']=None
+ else:
+ partial['view']={'query':view_query}
+
+ ifschemaisnot_MARKER:
+ ifschemaisNone:
+ partial['schema']=None
+ else:
+ partial['schema']={
+ 'fields':_build_schema_resource(schema)}
+
+ api_response=client.connection.api_request(
+ method='PATCH',path=self.path,data=partial)
+ self._set_properties(api_response)
+
+
[docs]defupdate(self,client=None):
+ """API call: update table properties via a PUT request
+
+ See
+ https://cloud.google.com/bigquery/docs/reference/v2/tables/update
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+ api_response=client.connection.api_request(
+ method='PUT',path=self.path,data=self._build_resource())
+ self._set_properties(api_response)
+
+
[docs]defdelete(self,client=None):
+ """API call: delete the table via a DELETE request
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/tables/delete
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+ """
+ client=self._require_client(client)
+ client.connection.api_request(method='DELETE',path=self.path)
+
+
[docs]deffetch_data(self,max_results=None,page_token=None,client=None):
+ """API call: fetch the table data via a GET request
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list
+
+ .. note::
+
+ This method assumes that its instance's ``schema`` attribute is
+ up-to-date with the schema as defined on the back-end: if the
+ two schemas are not identical, the values returned may be
+ incomplete. To ensure that the local copy of the schema is
+ up-to-date, call the table's ``reload`` method.
+
+ :type max_results: integer or ``NoneType``
+ :param max_results: maximum number of rows to return.
+
+ :type page_token: str or ``NoneType``
+ :param page_token: token representing a cursor into the table's rows.
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :rtype: tuple
+ :returns: ``(row_data, total_rows, page_token)``, where ``row_data``
+ is a list of tuples, one per result row, containing only
+ the values; ``total_rows`` is a count of the total number
+ of rows in the table; and ``page_token`` is an opaque
+ string which can be used to fetch the next batch of rows
+ (``None`` if no further batches can be fetched).
+ """
+ client=self._require_client(client)
+ params={}
+
+ ifmax_resultsisnotNone:
+ params['maxResults']=max_results
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ response=client.connection.api_request(method='GET',
+ path='%s/data'%self.path,
+ query_params=params)
+ total_rows=response.get('totalRows')
+ iftotal_rowsisnotNone:
+ total_rows=int(total_rows)
+ page_token=response.get('pageToken')
+ rows_data=_rows_from_json(response.get('rows',()),self._schema)
+
+ returnrows_data,total_rows,page_token
+
+
[docs]definsert_data(self,
+ rows,
+ row_ids=None,
+ skip_invalid_rows=None,
+ ignore_unknown_values=None,
+ template_suffix=None,
+ client=None):
+ """API call: insert table data via a POST request
+
+ See:
+ https://cloud.google.com/bigquery/docs/reference/v2/tabledata/insertAll
+
+ :type rows: list of tuples
+ :param rows: Row data to be inserted. Each tuple should contain data
+ for each schema field on the current table and in the
+ same order as the schema fields.
+
+ :type row_ids: list of string
+ :param row_ids: Unique ids, one per row being inserted. If not
+ passed, no de-duplication occurs.
+
+ :type skip_invalid_rows: boolean or ``NoneType``
+ :param skip_invalid_rows: skip rows w/ invalid data?
+
+ :type ignore_unknown_values: boolean or ``NoneType``
+ :param ignore_unknown_values: ignore columns beyond schema?
+
+ :type template_suffix: str or ``NoneType``
+ :param template_suffix: treat ``name`` as a template table and provide
+ a suffix. BigQuery will create the table
+ ``<name> + <template_suffix>`` based on the
+ schema of the template table. See:
+ https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables
+
+ :type client: :class:`gcloud.bigquery.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current dataset.
+
+ :rtype: list of mappings
+ :returns: One mapping per row with insert errors: the "index" key
+ identifies the row, and the "errors" key contains a list
+ of the mappings describing one or more problems with the
+ row.
+ """
+ client=self._require_client(client)
+ rows_info=[]
+ data={'rows':rows_info}
+
+ forindex,rowinenumerate(rows):
+ row_info={}
+
+ forfield,valueinzip(self._schema,row):
+ iffield.field_type=='TIMESTAMP'andvalueisnotNone:
+ # BigQuery stores TIMESTAMP data internally as a
+ # UNIX timestamp with microsecond precision.
+ # Specifies the number of seconds since the epoch.
+ value=_microseconds_from_datetime(value)*1e-6
+ row_info[field.name]=value
+
+ info={'json':row_info}
+ ifrow_idsisnotNone:
+ info['insertId']=row_ids[index]
+
+ rows_info.append(info)
+
+ ifskip_invalid_rowsisnotNone:
+ data['skipInvalidRows']=skip_invalid_rows
+
+ ifignore_unknown_valuesisnotNone:
+ data['ignoreUnknownValues']=ignore_unknown_values
+
+ iftemplate_suffixisnotNone:
+ data['templateSuffix']=template_suffix
+
+ response=client.connection.api_request(
+ method='POST',
+ path='%s/insertAll'%self.path,
+ data=data)
+ errors=[]
+
+ forerrorinresponse.get('insertErrors',()):
+ errors.append({'index':int(error['index']),
+ 'errors':error['errors']})
+
+ returnerrors
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Parent client for calling the Google Cloud Bigtable API.
+
+This is the base from which all interactions with the API occur.
+
+In the hierarchy of API concepts
+
+* a :class:`Client` owns an :class:`.Instance`
+* a :class:`.Instance` owns a :class:`Table <gcloud.bigtable.table.Table>`
+* a :class:`Table <gcloud.bigtable.table.Table>` owns a
+ :class:`ColumnFamily <.column_family.ColumnFamily>`
+* a :class:`Table <gcloud.bigtable.table.Table>` owns a :class:`Row <.row.Row>`
+ (and all the cells in the row)
+"""
+
+
+frompkg_resourcesimportget_distribution
+
+fromgrpc.betaimportimplementations
+
+fromgcloud.bigtable._generatedimport(
+ bigtable_instance_admin_pb2asinstance_admin_v2_pb2)
+# V1 table admin service
+fromgcloud.bigtable._generatedimport(
+ bigtable_table_admin_pb2astable_admin_v2_pb2)
+# V1 data service
+fromgcloud.bigtable._generatedimport(
+ bigtable_pb2asdata_v2_pb2)
+
+fromgcloud.bigtable._generatedimport(
+ operations_grpc_pb2asoperations_grpc_v2_pb2)
+
+fromgcloud.bigtable.clusterimportDEFAULT_SERVE_NODES
+fromgcloud.bigtable.instanceimportInstance
+fromgcloud.bigtable.instanceimport_EXISTING_INSTANCE_LOCATION_ID
+fromgcloud.clientimport_ClientFactoryMixin
+fromgcloud.clientimport_ClientProjectMixin
+fromgcloud.credentialsimportget_credentials
+
+
+TABLE_STUB_FACTORY_V2=(
+ table_admin_v2_pb2.beta_create_BigtableTableAdmin_stub)
+TABLE_ADMIN_HOST_V2='bigtableadmin.googleapis.com'
+"""Table Admin API request host."""
+TABLE_ADMIN_PORT_V2=443
+"""Table Admin API request port."""
+
+INSTANCE_STUB_FACTORY_V2=(
+ instance_admin_v2_pb2.beta_create_BigtableInstanceAdmin_stub)
+INSTANCE_ADMIN_HOST_V2='bigtableadmin.googleapis.com'
+"""Cluster Admin API request host."""
+INSTANCE_ADMIN_PORT_V2=443
+"""Cluster Admin API request port."""
+
+DATA_STUB_FACTORY_V2=data_v2_pb2.beta_create_Bigtable_stub
+DATA_API_HOST_V2='bigtable.googleapis.com'
+"""Data API request host."""
+DATA_API_PORT_V2=443
+"""Data API request port."""
+
+OPERATIONS_STUB_FACTORY_V2=operations_grpc_v2_pb2.beta_create_Operations_stub
+OPERATIONS_API_HOST_V2=INSTANCE_ADMIN_HOST_V2
+OPERATIONS_API_PORT_V2=INSTANCE_ADMIN_PORT_V2
+
+ADMIN_SCOPE='https://www.googleapis.com/auth/bigtable.admin'
+"""Scope for interacting with the Cluster Admin and Table Admin APIs."""
+DATA_SCOPE='https://www.googleapis.com/auth/bigtable.data'
+"""Scope for reading and writing table data."""
+READ_ONLY_SCOPE='https://www.googleapis.com/auth/bigtable.data.readonly'
+"""Scope for reading table data."""
+
+DEFAULT_TIMEOUT_SECONDS=10
+"""The default timeout to use for API requests."""
+
+DEFAULT_USER_AGENT='gcloud-python/{0}'.format(
+ get_distribution('gcloud').version)
+"""The default user agent for API requests."""
+
+
+
[docs]classClient(_ClientFactoryMixin,_ClientProjectMixin):
+ """Client for interacting with Google Cloud Bigtable API.
+
+ .. note::
+
+ Since the Cloud Bigtable API requires the gRPC transport, no
+ ``http`` argument is accepted by this class.
+
+ :type project: :class:`str` or :func:`unicode <unicode>`
+ :param project: (Optional) The ID of the project which owns the
+ instances, tables and data. If not provided, will
+ attempt to determine from the environment.
+
+ :type credentials:
+ :class:`OAuth2Credentials <oauth2client.client.OAuth2Credentials>` or
+ :data:`NoneType <types.NoneType>`
+ :param credentials: (Optional) The OAuth2 Credentials to use for this
+ client. If not provided, defaults to the Google
+ Application Default Credentials.
+
+ :type read_only: bool
+ :param read_only: (Optional) Boolean indicating if the data scope should be
+ for reading only (or for writing as well). Defaults to
+ :data:`False`.
+
+ :type admin: bool
+ :param admin: (Optional) Boolean indicating if the client will be used to
+ interact with the Instance Admin or Table Admin APIs. This
+ requires the :const:`ADMIN_SCOPE`. Defaults to :data:`False`.
+
+ :type user_agent: str
+ :param user_agent: (Optional) The user agent to be used with API request.
+ Defaults to :const:`DEFAULT_USER_AGENT`.
+
+ :type timeout_seconds: int
+ :param timeout_seconds: Number of seconds for request time-out. If not
+ passed, defaults to
+ :const:`DEFAULT_TIMEOUT_SECONDS`.
+
+ :raises: :class:`ValueError <exceptions.ValueError>` if both ``read_only``
+ and ``admin`` are :data:`True`
+ """
+
+ def__init__(self,project=None,credentials=None,
+ read_only=False,admin=False,user_agent=DEFAULT_USER_AGENT,
+ timeout_seconds=DEFAULT_TIMEOUT_SECONDS):
+ _ClientProjectMixin.__init__(self,project=project)
+ ifcredentialsisNone:
+ credentials=get_credentials()
+
+ ifread_onlyandadmin:
+ raiseValueError('A read-only client cannot also perform'
+ 'administrative actions.')
+
+ scopes=[]
+ ifread_only:
+ scopes.append(READ_ONLY_SCOPE)
+ else:
+ scopes.append(DATA_SCOPE)
+
+ ifadmin:
+ scopes.append(ADMIN_SCOPE)
+
+ self._admin=bool(admin)
+ try:
+ credentials=credentials.create_scoped(scopes)
+ exceptAttributeError:
+ pass
+ self._credentials=credentials
+ self.user_agent=user_agent
+ self.timeout_seconds=timeout_seconds
+
+ # These will be set in start().
+ self._data_stub_internal=None
+ self._instance_stub_internal=None
+ self._operations_stub_internal=None
+ self._table_stub_internal=None
+
+
[docs]defcopy(self):
+ """Make a copy of this client.
+
+ Copies the local data stored as simple types but does not copy the
+ current state of any open connections with the Cloud Bigtable API.
+
+ :rtype: :class:`.Client`
+ :returns: A copy of the current client.
+ """
+ credentials=self._credentials
+ copied_creds=credentials.create_scoped(credentials.scopes)
+ returnself.__class__(
+ self.project,
+ copied_creds,
+ READ_ONLY_SCOPEincopied_creds.scopes,
+ self._admin,
+ self.user_agent,
+ self.timeout_seconds,
+ )
+
+ @property
+ defcredentials(self):
+ """Getter for client's credentials.
+
+ :rtype:
+ :class:`OAuth2Credentials <oauth2client.client.OAuth2Credentials>`
+ :returns: The credentials stored on the client.
+ """
+ returnself._credentials
+
+ @property
+ defproject_name(self):
+ """Project name to be used with Instance Admin API.
+
+ .. note::
+
+ This property will not change if ``project`` does not, but the
+ return value is not cached.
+
+ The project name is of the form
+
+ ``"projects/{project}"``
+
+ :rtype: str
+ :returns: The project name to be used with the Cloud Bigtable Admin
+ API RPC service.
+ """
+ return'projects/'+self.project
+
+ @property
+ def_data_stub(self):
+ """Getter for the gRPC stub used for the Data API.
+
+ :rtype: :class:`grpc.beta._stub._AutoIntermediary`
+ :returns: A gRPC stub object.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the current
+ client has not been :meth:`start`-ed.
+ """
+ ifself._data_stub_internalisNone:
+ raiseValueError('Client has not been started.')
+ returnself._data_stub_internal
+
+ @property
+ def_instance_stub(self):
+ """Getter for the gRPC stub used for the Instance Admin API.
+
+ :rtype: :class:`grpc.beta._stub._AutoIntermediary`
+ :returns: A gRPC stub object.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the current
+ client is not an admin client or if it has not been
+ :meth:`start`-ed.
+ """
+ ifnotself._admin:
+ raiseValueError('Client is not an admin client.')
+ ifself._instance_stub_internalisNone:
+ raiseValueError('Client has not been started.')
+ returnself._instance_stub_internal
+
+ @property
+ def_operations_stub(self):
+ """Getter for the gRPC stub used for the Operations API.
+
+ :rtype: :class:`grpc.beta._stub._AutoIntermediary`
+ :returns: A gRPC stub object.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the current
+ client is not an admin client or if it has not been
+ :meth:`start`-ed.
+ """
+ ifnotself._admin:
+ raiseValueError('Client is not an admin client.')
+ ifself._operations_stub_internalisNone:
+ raiseValueError('Client has not been started.')
+ returnself._operations_stub_internal
+
+ @property
+ def_table_stub(self):
+ """Getter for the gRPC stub used for the Table Admin API.
+
+ :rtype: :class:`grpc.beta._stub._AutoIntermediary`
+ :returns: A gRPC stub object.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the current
+ client is not an admin client or if it has not been
+ :meth:`start`-ed.
+ """
+ ifnotself._admin:
+ raiseValueError('Client is not an admin client.')
+ ifself._table_stub_internalisNone:
+ raiseValueError('Client has not been started.')
+ returnself._table_stub_internal
+
+ def_make_data_stub(self):
+ """Creates gRPC stub to make requests to the Data API.
+
+ :rtype: :class:`grpc.beta._stub._AutoIntermediary`
+ :returns: A gRPC stub object.
+ """
+ return_make_stub(self,DATA_STUB_FACTORY_V2,
+ DATA_API_HOST_V2,DATA_API_PORT_V2)
+
+ def_make_instance_stub(self):
+ """Creates gRPC stub to make requests to the Instance Admin API.
+
+ :rtype: :class:`grpc.beta._stub._AutoIntermediary`
+ :returns: A gRPC stub object.
+ """
+ return_make_stub(self,INSTANCE_STUB_FACTORY_V2,
+ INSTANCE_ADMIN_HOST_V2,INSTANCE_ADMIN_PORT_V2)
+
+ def_make_operations_stub(self):
+ """Creates gRPC stub to make requests to the Operations API.
+
+ These are for long-running operations of the Instance Admin API,
+ hence the host and port matching.
+
+ :rtype: :class:`grpc.beta._stub._AutoIntermediary`
+ :returns: A gRPC stub object.
+ """
+ return_make_stub(self,OPERATIONS_STUB_FACTORY_V2,
+ OPERATIONS_API_HOST_V2,OPERATIONS_API_PORT_V2)
+
+ def_make_table_stub(self):
+ """Creates gRPC stub to make requests to the Table Admin API.
+
+ :rtype: :class:`grpc.beta._stub._AutoIntermediary`
+ :returns: A gRPC stub object.
+ """
+ return_make_stub(self,TABLE_STUB_FACTORY_V2,
+ TABLE_ADMIN_HOST_V2,TABLE_ADMIN_PORT_V2)
+
+
[docs]defis_started(self):
+ """Check if the client has been started.
+
+ :rtype: bool
+ :returns: Boolean indicating if the client has been started.
+ """
+ returnself._data_stub_internalisnotNone
+
+
[docs]defstart(self):
+ """Prepare the client to make requests.
+
+ Activates gRPC contexts for making requests to the Bigtable
+ Service(s).
+ """
+ ifself.is_started():
+ return
+
+ # NOTE: We __enter__ the stubs more-or-less permanently. This is
+ # because only after entering the context managers is the
+ # connection created. We don't want to immediately close
+ # those connections since the client will make many
+ # requests with it over HTTP/2.
+ self._data_stub_internal=self._make_data_stub()
+ self._data_stub_internal.__enter__()
+ ifself._admin:
+ self._instance_stub_internal=self._make_instance_stub()
+ self._operations_stub_internal=self._make_operations_stub()
+ self._table_stub_internal=self._make_table_stub()
+
+ self._instance_stub_internal.__enter__()
+ self._operations_stub_internal.__enter__()
+ self._table_stub_internal.__enter__()
+
+ def__enter__(self):
+ """Starts the client as a context manager."""
+ self.start()
+ returnself
+
+
[docs]defstop(self):
+ """Closes all the open gRPC clients."""
+ ifnotself.is_started():
+ return
+
+ # When exit-ing, we pass None as the exception type, value and
+ # traceback to __exit__.
+ self._data_stub_internal.__exit__(None,None,None)
+ ifself._admin:
+ self._instance_stub_internal.__exit__(None,None,None)
+ self._operations_stub_internal.__exit__(None,None,None)
+ self._table_stub_internal.__exit__(None,None,None)
+
+ self._data_stub_internal=None
+ self._instance_stub_internal=None
+ self._operations_stub_internal=None
+ self._table_stub_internal=None
+
+ def__exit__(self,exc_type,exc_val,exc_t):
+ """Stops the client as a context manager."""
+ self.stop()
+
+
[docs]definstance(self,instance_id,location=_EXISTING_INSTANCE_LOCATION_ID,
+ display_name=None,serve_nodes=DEFAULT_SERVE_NODES):
+ """Factory to create a instance associated with this client.
+
+ :type instance_id: str
+ :param instance_id: The ID of the instance.
+
+ :type location: string
+ :param location: location name, in form
+ ``projects/<project>/locations/<location>``; used to
+ set up the instance's cluster.
+
+ :type display_name: str
+ :param display_name: (Optional) The display name for the instance in
+ the Cloud Console UI. (Must be between 4 and 30
+ characters.) If this value is not set in the
+ constructor, will fall back to the instance ID.
+
+ :type serve_nodes: int
+ :param serve_nodes: (Optional) The number of nodes in the instance's
+ cluster; used to set up the instance's cluster.
+
+ :rtype: :class:`.Instance`
+ :returns: an instance owned by this client.
+ """
+ returnInstance(instance_id,self,location,
+ display_name=display_name,serve_nodes=serve_nodes)
+
+
[docs]deflist_instances(self):
+ """List instances owned by the project.
+
+ :rtype: tuple
+ :returns: A pair of results, the first is a list of
+ :class:`.Instance` objects returned and the second is a
+ list of strings (the failed locations in the request).
+ """
+ request_pb=instance_admin_v2_pb2.ListInstancesRequest(
+ parent=self.project_name)
+
+ response=self._instance_stub.ListInstances(
+ request_pb,self.timeout_seconds)
+
+ instances=[Instance.from_pb(instance_pb,self)
+ forinstance_pbinresponse.instances]
+ returninstances,response.failed_locations
+
+
+class_MetadataPlugin(object):
+ """Callable class to transform metadata for gRPC requests.
+
+ :type client: :class:`.client.Client`
+ :param client: The client that owns the instance.
+ Provides authorization and user agent.
+ """
+
+ def__init__(self,client):
+ self._credentials=client.credentials
+ self._user_agent=client.user_agent
+
+ def__call__(self,unused_context,callback):
+ """Adds authorization header to request metadata."""
+ access_token=self._credentials.get_access_token().access_token
+ headers=[
+ ('Authorization','Bearer '+access_token),
+ ('User-agent',self._user_agent),
+ ]
+ callback(headers,None)
+
+
+def_make_stub(client,stub_factory,host,port):
+ """Makes a stub for an RPC service.
+
+ Uses / depends on the beta implementation of gRPC.
+
+ :type client: :class:`.client.Client`
+ :param client: The client that owns the instance.
+ Provides authorization and user agent.
+
+ :type stub_factory: callable
+ :param stub_factory: A factory which will create a gRPC stub for
+ a given service.
+
+ :type host: str
+ :param host: The host for the service.
+
+ :type port: int
+ :param port: The port for the service.
+
+ :rtype: :class:`grpc.beta._stub._AutoIntermediary`
+ :returns: The stub object used to make gRPC requests to a given API.
+ """
+ # Leaving the first argument to ssl_channel_credentials() as None
+ # loads root certificates from `grpc/_adapter/credentials/roots.pem`.
+ transport_creds=implementations.ssl_channel_credentials(None,None,None)
+ custom_metadata_plugin=_MetadataPlugin(client)
+ auth_creds=implementations.metadata_call_credentials(
+ custom_metadata_plugin,name='google_creds')
+ channel_creds=implementations.composite_channel_credentials(
+ transport_creds,auth_creds)
+ channel=implementations.secure_channel(host,port,channel_creds)
+ returnstub_factory(channel)
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""User friendly container for Google Cloud Bigtable Cluster."""
+
+
+importre
+
+fromgoogle.longrunningimportoperations_pb2
+
+fromgcloud.bigtable._generatedimport(
+ instance_pb2asdata_v2_pb2)
+fromgcloud.bigtable._generatedimport(
+ bigtable_instance_admin_pb2asmessages_v2_pb2)
+
+
+_CLUSTER_NAME_RE=re.compile(r'^projects/(?P<project>[^/]+)/'
+ r'instances/(?P<instance>[^/]+)/clusters/'
+ r'(?P<cluster_id>[a-z][-a-z0-9]*)$')
+_OPERATION_NAME_RE=re.compile(r'^operations/'
+ r'projects/([^/]+)/'
+ r'instances/([^/]+)/'
+ r'clusters/([a-z][-a-z0-9]*)/'
+ r'operations/(?P<operation_id>\d+)$')
+_TYPE_URL_MAP={
+}
+
+DEFAULT_SERVE_NODES=3
+"""Default number of nodes to use when creating a cluster."""
+
+
+def_prepare_create_request(cluster):
+ """Creates a protobuf request for a CreateCluster request.
+
+ :type cluster: :class:`Cluster`
+ :param cluster: The cluster to be created.
+
+ :rtype: :class:`.messages_v2_pb2.CreateClusterRequest`
+ :returns: The CreateCluster request object containing the cluster info.
+ """
+ returnmessages_v2_pb2.CreateClusterRequest(
+ parent=cluster._instance.name,
+ cluster_id=cluster.cluster_id,
+ cluster=data_v2_pb2.Cluster(
+ serve_nodes=cluster.serve_nodes,
+ ),
+ )
+
+
+def_parse_pb_any_to_native(any_val,expected_type=None):
+ """Convert a serialized "google.protobuf.Any" value to actual type.
+
+ :type any_val: :class:`google.protobuf.any_pb2.Any`
+ :param any_val: A serialized protobuf value container.
+
+ :type expected_type: str
+ :param expected_type: (Optional) The type URL we expect ``any_val``
+ to have.
+
+ :rtype: object
+ :returns: The de-serialized object.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the
+ ``expected_type`` does not match the ``type_url`` on the input.
+ """
+ ifexpected_typeisnotNoneandexpected_type!=any_val.type_url:
+ raiseValueError('Expected type: %s, Received: %s'%(
+ expected_type,any_val.type_url))
+ container_class=_TYPE_URL_MAP[any_val.type_url]
+ returncontainer_class.FromString(any_val.value)
+
+
+def_process_operation(operation_pb):
+ """Processes a create protobuf response.
+
+ :type operation_pb: :class:`google.longrunning.operations_pb2.Operation`
+ :param operation_pb: The long-running operation response from a
+ Create/Update/Undelete cluster request.
+
+ :rtype: tuple
+ :returns: integer ID of the operation (``operation_id``).
+ :raises: :class:`ValueError <exceptions.ValueError>` if the operation name
+ doesn't match the :data:`_OPERATION_NAME_RE` regex.
+ """
+ match=_OPERATION_NAME_RE.match(operation_pb.name)
+ ifmatchisNone:
+ raiseValueError('Operation name was not in the expected '
+ 'format after a cluster modification.',
+ operation_pb.name)
+ operation_id=int(match.group('operation_id'))
+
+ returnoperation_id
+
+
+
[docs]classOperation(object):
+ """Representation of a Google API Long-Running Operation.
+
+ In particular, these will be the result of operations on
+ clusters using the Cloud Bigtable API.
+
+ :type op_type: str
+ :param op_type: The type of operation being performed. Expect
+ ``create``, ``update`` or ``undelete``.
+
+ :type op_id: int
+ :param op_id: The ID of the operation.
+
+ :type cluster: :class:`Cluster`
+ :param cluster: The cluster that created the operation.
+ """
+
+ def__init__(self,op_type,op_id,cluster=None):
+ self.op_type=op_type
+ self.op_id=op_id
+ self._cluster=cluster
+ self._complete=False
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ return(other.op_type==self.op_typeand
+ other.op_id==self.op_idand
+ other._cluster==self._clusterand
+ other._complete==self._complete)
+
+ def__ne__(self,other):
+ returnnotself.__eq__(other)
+
+
[docs]deffinished(self):
+ """Check if the operation has finished.
+
+ :rtype: bool
+ :returns: A boolean indicating if the current operation has completed.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the operation
+ has already completed.
+ """
+ ifself._complete:
+ raiseValueError('The operation has completed.')
+
+ operation_name=('operations/'+self._cluster.name+
+ '/operations/%d'%(self.op_id,))
+ request_pb=operations_pb2.GetOperationRequest(name=operation_name)
+ # We expect a `google.longrunning.operations_pb2.Operation`.
+ client=self._cluster._instance._client
+ operation_pb=client._operations_stub.GetOperation(
+ request_pb,client.timeout_seconds)
+
+ ifoperation_pb.done:
+ self._complete=True
+ returnTrue
+ else:
+ returnFalse
+
+
+
[docs]classCluster(object):
+ """Representation of a Google Cloud Bigtable Cluster.
+
+ We can use a :class:`Cluster` to:
+
+ * :meth:`reload` itself
+ * :meth:`create` itself
+ * :meth:`update` itself
+ * :meth:`delete` itself
+ * :meth:`undelete` itself
+
+ .. note::
+
+ For now, we leave out the ``default_storage_type`` (an enum)
+ which if not sent will end up as :data:`.data_v2_pb2.STORAGE_SSD`.
+
+ :type cluster_id: str
+ :param cluster_id: The ID of the cluster.
+
+ :type instance: :class:`.instance.Instance`
+ :param instance: The instance where the cluster resides.
+
+ :type serve_nodes: int
+ :param serve_nodes: (Optional) The number of nodes in the cluster.
+ Defaults to :data:`DEFAULT_SERVE_NODES`.
+ """
+
+ def__init__(self,cluster_id,instance,
+ serve_nodes=DEFAULT_SERVE_NODES):
+ self.cluster_id=cluster_id
+ self._instance=instance
+ self.serve_nodes=serve_nodes
+ self.location=None
+
+ def_update_from_pb(self,cluster_pb):
+ """Refresh self from the server-provided protobuf.
+
+ Helper for :meth:`from_pb` and :meth:`reload`.
+ """
+ ifnotcluster_pb.serve_nodes:# Simple field (int32)
+ raiseValueError('Cluster protobuf does not contain serve_nodes')
+ self.serve_nodes=cluster_pb.serve_nodes
+ self.location=cluster_pb.location
+
+ @classmethod
+
[docs]deffrom_pb(cls,cluster_pb,instance):
+ """Creates a cluster instance from a protobuf.
+
+ :type cluster_pb: :class:`instance_pb2.Cluster`
+ :param cluster_pb: A cluster protobuf object.
+
+ :type instance: :class:`.instance.Instance>`
+ :param instance: The instance that owns the cluster.
+
+ :rtype: :class:`Cluster`
+ :returns: The cluster parsed from the protobuf response.
+ :raises:
+ :class:`ValueError <exceptions.ValueError>` if the cluster
+ name does not match
+ ``projects/{project}/instances/{instance}/clusters/{cluster_id}``
+ or if the parsed project ID does not match the project ID
+ on the client.
+ """
+ match=_CLUSTER_NAME_RE.match(cluster_pb.name)
+ ifmatchisNone:
+ raiseValueError('Cluster protobuf name was not in the '
+ 'expected format.',cluster_pb.name)
+ ifmatch.group('project')!=instance._client.project:
+ raiseValueError('Project ID on cluster does not match the '
+ 'project ID on the client')
+ ifmatch.group('instance')!=instance.instance_id:
+ raiseValueError('Instance ID on cluster does not match the '
+ 'instance ID on the client')
+
+ result=cls(match.group('cluster_id'),instance)
+ result._update_from_pb(cluster_pb)
+ returnresult
+
+
[docs]defcopy(self):
+ """Make a copy of this cluster.
+
+ Copies the local data stored as simple types and copies the client
+ attached to this instance.
+
+ :rtype: :class:`.Cluster`
+ :returns: A copy of the current cluster.
+ """
+ new_instance=self._instance.copy()
+ returnself.__class__(self.cluster_id,new_instance,
+ serve_nodes=self.serve_nodes)
+
+ @property
+ defname(self):
+ """Cluster name used in requests.
+
+ .. note::
+ This property will not change if ``_instance`` and ``cluster_id``
+ do not, but the return value is not cached.
+
+ The cluster name is of the form
+
+ ``"projects/{project}/instances/{instance}/clusters/{cluster_id}"``
+
+ :rtype: str
+ :returns: The cluster name.
+ """
+ returnself._instance.name+'/clusters/'+self.cluster_id
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ # NOTE: This does not compare the configuration values, such as
+ # the serve_nodes. Instead, it only compares
+ # identifying values instance, cluster ID and client. This is
+ # intentional, since the same cluster can be in different states
+ # if not synchronized. Clusters with similar instance/cluster
+ # settings but different clients can't be used in the same way.
+ return(other.cluster_id==self.cluster_idand
+ other._instance==self._instance)
+
+ def__ne__(self,other):
+ returnnotself.__eq__(other)
+
+
[docs]defreload(self):
+ """Reload the metadata for this cluster."""
+ request_pb=messages_v2_pb2.GetClusterRequest(name=self.name)
+ # We expect a `._generated.instance_pb2.Cluster`.
+ cluster_pb=self._instance._client._instance_stub.GetCluster(
+ request_pb,self._instance._client.timeout_seconds)
+
+ # NOTE: _update_from_pb does not check that the project, instance and
+ # cluster ID on the response match the request.
+ self._update_from_pb(cluster_pb)
+
+
[docs]defcreate(self):
+ """Create this cluster.
+
+ .. note::
+
+ Uses the ``project``, ``instance`` and ``cluster_id`` on the
+ current :class:`Cluster` in addition to the ``serve_nodes``.
+ To change them before creating, reset the values via
+
+ .. code:: python
+
+ cluster.serve_nodes = 8
+ cluster.cluster_id = 'i-changed-my-mind'
+
+ before calling :meth:`create`.
+
+ :rtype: :class:`Operation`
+ :returns: The long-running operation corresponding to the
+ create operation.
+ """
+ request_pb=_prepare_create_request(self)
+ # We expect a `google.longrunning.operations_pb2.Operation`.
+ operation_pb=self._instance._client._instance_stub.CreateCluster(
+ request_pb,self._instance._client.timeout_seconds)
+
+ op_id=_process_operation(operation_pb)
+ returnOperation('create',op_id,cluster=self)
+
+
[docs]defupdate(self):
+ """Update this cluster.
+
+ .. note::
+
+ Updates the ``serve_nodes``. If you'd like to
+ change them before updating, reset the values via
+
+ .. code:: python
+
+ cluster.serve_nodes = 8
+
+ before calling :meth:`update`.
+
+ :rtype: :class:`Operation`
+ :returns: The long-running operation corresponding to the
+ update operation.
+ """
+ request_pb=data_v2_pb2.Cluster(
+ name=self.name,
+ serve_nodes=self.serve_nodes,
+ )
+ # Ignore expected `._generated.instance_pb2.Cluster`.
+ operation_pb=self._instance._client._instance_stub.UpdateCluster(
+ request_pb,self._instance._client.timeout_seconds)
+
+ op_id=_process_operation(operation_pb)
+ returnOperation('update',op_id,cluster=self)
+
+
[docs]defdelete(self):
+ """Delete this cluster.
+
+ Marks a cluster and all of its tables for permanent deletion in 7 days.
+
+ Immediately upon completion of the request:
+
+ * Billing will cease for all of the cluster's reserved resources.
+ * The cluster's ``delete_time`` field will be set 7 days in the future.
+
+ Soon afterward:
+
+ * All tables within the cluster will become unavailable.
+
+ Prior to the cluster's ``delete_time``:
+
+ * The cluster can be recovered with a call to ``UndeleteCluster``.
+ * All other attempts to modify or delete the cluster will be rejected.
+
+ At the cluster's ``delete_time``:
+
+ * The cluster and **all of its tables** will immediately and
+ irrevocably disappear from the API, and their data will be
+ permanently deleted.
+ """
+ request_pb=messages_v2_pb2.DeleteClusterRequest(name=self.name)
+ # We expect a `google.protobuf.empty_pb2.Empty`
+ self._instance._client._instance_stub.DeleteCluster(
+ request_pb,self._instance._client.timeout_seconds)
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""User friendly container for Google Cloud Bigtable Column Family."""
+
+
+importdatetime
+
+fromgoogle.protobufimportduration_pb2
+
+fromgcloud._helpersimport_total_seconds
+fromgcloud.bigtable._generatedimport(
+ table_pb2astable_v2_pb2)
+fromgcloud.bigtable._generatedimport(
+ bigtable_table_admin_pb2astable_admin_v2_pb2)
+
+
+def_timedelta_to_duration_pb(timedelta_val):
+ """Convert a Python timedelta object to a duration protobuf.
+
+ .. note::
+
+ The Python timedelta has a granularity of microseconds while
+ the protobuf duration type has a duration of nanoseconds.
+
+ :type timedelta_val: :class:`datetime.timedelta`
+ :param timedelta_val: A timedelta object.
+
+ :rtype: :class:`google.protobuf.duration_pb2.Duration`
+ :returns: A duration object equivalent to the time delta.
+ """
+ seconds_decimal=_total_seconds(timedelta_val)
+ # Truncate the parts other than the integer.
+ seconds=int(seconds_decimal)
+ ifseconds_decimal<0:
+ signed_micros=timedelta_val.microseconds-10**6
+ else:
+ signed_micros=timedelta_val.microseconds
+ # Convert nanoseconds to microseconds.
+ nanos=1000*signed_micros
+ returnduration_pb2.Duration(seconds=seconds,nanos=nanos)
+
+
+def_duration_pb_to_timedelta(duration_pb):
+ """Convert a duration protobuf to a Python timedelta object.
+
+ .. note::
+
+ The Python timedelta has a granularity of microseconds while
+ the protobuf duration type has a duration of nanoseconds.
+
+ :type duration_pb: :class:`google.protobuf.duration_pb2.Duration`
+ :param duration_pb: A protobuf duration object.
+
+ :rtype: :class:`datetime.timedelta`
+ :returns: The converted timedelta object.
+ """
+ returndatetime.timedelta(
+ seconds=duration_pb.seconds,
+ microseconds=(duration_pb.nanos/1000.0),
+ )
+
+
+
[docs]classGarbageCollectionRule(object):
+ """Garbage collection rule for column families within a table.
+
+ Cells in the column family (within a table) fitting the rule will be
+ deleted during garbage collection.
+
+ .. note::
+
+ This class is a do-nothing base class for all GC rules.
+
+ .. note::
+
+ A string ``gc_expression`` can also be used with API requests, but
+ that value would be superceded by a ``gc_rule``. As a result, we
+ don't support that feature and instead support via native classes.
+ """
+
+ def__ne__(self,other):
+ returnnotself.__eq__(other)
+
+
+
[docs]classMaxVersionsGCRule(GarbageCollectionRule):
+ """Garbage collection limiting the number of versions of a cell.
+
+ :type max_num_versions: int
+ :param max_num_versions: The maximum number of versions
+ """
+
+ def__init__(self,max_num_versions):
+ self.max_num_versions=max_num_versions
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother.max_num_versions==self.max_num_versions
+
+
[docs]defto_pb(self):
+ """Converts the garbage collection rule to a protobuf.
+
+ :rtype: :class:`.table_v2_pb2.GcRule`
+ :returns: The converted current object.
+ """
+ returntable_v2_pb2.GcRule(max_num_versions=self.max_num_versions)
+
+
+
[docs]classMaxAgeGCRule(GarbageCollectionRule):
+ """Garbage collection limiting the age of a cell.
+
+ :type max_age: :class:`datetime.timedelta`
+ :param max_age: The maximum age allowed for a cell in the table.
+ """
+
+ def__init__(self,max_age):
+ self.max_age=max_age
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother.max_age==self.max_age
+
+
[docs]defto_pb(self):
+ """Converts the garbage collection rule to a protobuf.
+
+ :rtype: :class:`.table_v2_pb2.GcRule`
+ :returns: The converted current object.
+ """
+ max_age=_timedelta_to_duration_pb(self.max_age)
+ returntable_v2_pb2.GcRule(max_age=max_age)
+
+
+
[docs]classGCRuleUnion(GarbageCollectionRule):
+ """Union of garbage collection rules.
+
+ :type rules: list
+ :param rules: List of :class:`GarbageCollectionRule`.
+ """
+
+ def__init__(self,rules):
+ self.rules=rules
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother.rules==self.rules
+
+
[docs]defto_pb(self):
+ """Converts the union into a single GC rule as a protobuf.
+
+ :rtype: :class:`.table_v2_pb2.GcRule`
+ :returns: The converted current object.
+ """
+ union=table_v2_pb2.GcRule.Union(
+ rules=[rule.to_pb()forruleinself.rules])
+ returntable_v2_pb2.GcRule(union=union)
+
+
+
[docs]classGCRuleIntersection(GarbageCollectionRule):
+ """Intersection of garbage collection rules.
+
+ :type rules: list
+ :param rules: List of :class:`GarbageCollectionRule`.
+ """
+
+ def__init__(self,rules):
+ self.rules=rules
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother.rules==self.rules
+
+
[docs]defto_pb(self):
+ """Converts the intersection into a single GC rule as a protobuf.
+
+ :rtype: :class:`.table_v2_pb2.GcRule`
+ :returns: The converted current object.
+ """
+ intersection=table_v2_pb2.GcRule.Intersection(
+ rules=[rule.to_pb()forruleinself.rules])
+ returntable_v2_pb2.GcRule(intersection=intersection)
+
+
+
[docs]classColumnFamily(object):
+ """Representation of a Google Cloud Bigtable Column Family.
+
+ We can use a :class:`ColumnFamily` to:
+
+ * :meth:`create` itself
+ * :meth:`update` itself
+ * :meth:`delete` itself
+
+ :type column_family_id: str
+ :param column_family_id: The ID of the column family. Must be of the
+ form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type table: :class:`Table <gcloud.bigtable.table.Table>`
+ :param table: The table that owns the column family.
+
+ :type gc_rule: :class:`GarbageCollectionRule`
+ :param gc_rule: (Optional) The garbage collection settings for this
+ column family.
+ """
+
+ def__init__(self,column_family_id,table,gc_rule=None):
+ self.column_family_id=column_family_id
+ self._table=table
+ self.gc_rule=gc_rule
+
+ @property
+ defname(self):
+ """Column family name used in requests.
+
+ .. note::
+
+ This property will not change if ``column_family_id`` does not, but
+ the return value is not cached.
+
+ The table name is of the form
+
+ ``"projects/../zones/../clusters/../tables/../columnFamilies/.."``
+
+ :rtype: str
+ :returns: The column family name.
+ """
+ returnself._table.name+'/columnFamilies/'+self.column_family_id
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ return(other.column_family_id==self.column_family_idand
+ other._table==self._tableand
+ other.gc_rule==self.gc_rule)
+
+ def__ne__(self,other):
+ returnnotself.__eq__(other)
+
+
[docs]defto_pb(self):
+ """Converts the column family to a protobuf.
+
+ :rtype: :class:`.table_v2_pb2.ColumnFamily`
+ :returns: The converted current object.
+ """
+ ifself.gc_ruleisNone:
+ returntable_v2_pb2.ColumnFamily()
+ else:
+ returntable_v2_pb2.ColumnFamily(gc_rule=self.gc_rule.to_pb())
+
+
[docs]defcreate(self):
+ """Create this column family."""
+ column_family=self.to_pb()
+ request_pb=table_admin_v2_pb2.ModifyColumnFamiliesRequest(
+ name=self._table.name)
+ request_pb.modifications.add(
+ id=self.column_family_id,
+ create=column_family,
+ )
+ client=self._table._instance._client
+ # We expect a `.table_v2_pb2.ColumnFamily`. We ignore it since the only
+ # data it contains are the GC rule and the column family ID already
+ # stored on this instance.
+ client._table_stub.ModifyColumnFamilies(request_pb,
+ client.timeout_seconds)
+
+
[docs]defupdate(self):
+ """Update this column family.
+
+ .. note::
+
+ Only the GC rule can be updated. By changing the column family ID,
+ you will simply be referring to a different column family.
+ """
+ column_family=self.to_pb()
+ request_pb=table_admin_v2_pb2.ModifyColumnFamiliesRequest(
+ name=self._table.name)
+ request_pb.modifications.add(
+ id=self.column_family_id,
+ update=column_family)
+ client=self._table._instance._client
+ # We expect a `.table_v2_pb2.ColumnFamily`. We ignore it since the only
+ # data it contains are the GC rule and the column family ID already
+ # stored on this instance.
+ client._table_stub.ModifyColumnFamilies(request_pb,
+ client.timeout_seconds)
+
+
[docs]defdelete(self):
+ """Delete this column family."""
+ request_pb=table_admin_v2_pb2.ModifyColumnFamiliesRequest(
+ name=self._table.name)
+ request_pb.modifications.add(
+ id=self.column_family_id,
+ drop=True)
+ client=self._table._instance._client
+ # We expect a `google.protobuf.empty_pb2.Empty`
+ client._table_stub.ModifyColumnFamilies(request_pb,
+ client.timeout_seconds)
+
+
+def_gc_rule_from_pb(gc_rule_pb):
+ """Convert a protobuf GC rule to a native object.
+
+ :type gc_rule_pb: :class:`.table_v2_pb2.GcRule`
+ :param gc_rule_pb: The GC rule to convert.
+
+ :rtype: :class:`GarbageCollectionRule` or :data:`NoneType <types.NoneType>`
+ :returns: An instance of one of the native rules defined
+ in :module:`column_family` or :data:`None` if no values were
+ set on the protobuf passed in.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the rule name
+ is unexpected.
+ """
+ rule_name=gc_rule_pb.WhichOneof('rule')
+ ifrule_nameisNone:
+ returnNone
+
+ ifrule_name=='max_num_versions':
+ returnMaxVersionsGCRule(gc_rule_pb.max_num_versions)
+ elifrule_name=='max_age':
+ max_age=_duration_pb_to_timedelta(gc_rule_pb.max_age)
+ returnMaxAgeGCRule(max_age)
+ elifrule_name=='union':
+ returnGCRuleUnion([_gc_rule_from_pb(rule)
+ forruleingc_rule_pb.union.rules])
+ elifrule_name=='intersection':
+ rules=[_gc_rule_from_pb(rule)
+ forruleingc_rule_pb.intersection.rules]
+ returnGCRuleIntersection(rules)
+ else:
+ raiseValueError('Unexpected rule name',rule_name)
+
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Cloud Bigtable HappyBase batch module."""
+
+
+importdatetime
+importwarnings
+
+importsix
+
+fromgcloud._helpersimport_datetime_from_microseconds
+fromgcloud.bigtable.row_filtersimportTimestampRange
+
+
+_WAL_SENTINEL=object()
+# Assumed granularity of timestamps in Cloud Bigtable.
+_ONE_MILLISECOND=datetime.timedelta(microseconds=1000)
+_WARN=warnings.warn
+_WAL_WARNING=('The wal argument (Write-Ahead-Log) is not '
+ 'supported by Cloud Bigtable.')
+
+
+
[docs]classBatch(object):
+ """Batch class for accumulating mutations.
+
+ .. note::
+
+ When using a batch with ``transaction=False`` as a context manager
+ (i.e. in a ``with`` statement), mutations will still be sent as
+ row mutations even if the context manager exits with an error.
+ This behavior is in place to match the behavior in the HappyBase
+ HBase / Thrift implementation.
+
+ :type table: :class:`Table <gcloud.bigtable.happybase.table.Table>`
+ :param table: The table where mutations will be applied.
+
+ :type timestamp: int
+ :param timestamp: (Optional) Timestamp (in milliseconds since the epoch)
+ that all mutations will be applied at.
+
+ :type batch_size: int
+ :param batch_size: (Optional) The maximum number of mutations to allow
+ to accumulate before committing them.
+
+ :type transaction: bool
+ :param transaction: Flag indicating if the mutations should be sent
+ transactionally or not. If ``transaction=True`` and
+ an error occurs while a :class:`Batch` is active,
+ then none of the accumulated mutations will be
+ committed. If ``batch_size`` is set, the mutation
+ can't be transactional.
+
+ :type wal: object
+ :param wal: Unused parameter (Boolean for using the HBase Write Ahead Log).
+ Provided for compatibility with HappyBase, but irrelevant for
+ Cloud Bigtable since it does not have a Write Ahead Log.
+
+ :raises: :class:`TypeError <exceptions.TypeError>` if ``batch_size``
+ is set and ``transaction=True``.
+ :class:`ValueError <exceptions.ValueError>` if ``batch_size``
+ is not positive.
+ """
+
+ def__init__(self,table,timestamp=None,batch_size=None,
+ transaction=False,wal=_WAL_SENTINEL):
+ ifwalisnot_WAL_SENTINEL:
+ _WARN(_WAL_WARNING)
+
+ ifbatch_sizeisnotNone:
+ iftransaction:
+ raiseTypeError('When batch_size is set, a Batch cannot be '
+ 'transactional')
+ ifbatch_size<=0:
+ raiseValueError('batch_size must be positive')
+
+ self._table=table
+ self._batch_size=batch_size
+ self._timestamp=self._delete_range=None
+
+ # Timestamp is in milliseconds, convert to microseconds.
+ iftimestampisnotNone:
+ self._timestamp=_datetime_from_microseconds(1000*timestamp)
+ # For deletes, we get the very next timestamp (assuming timestamp
+ # granularity is milliseconds). This is because HappyBase users
+ # expect HBase deletes to go **up to** and **including** the
+ # timestamp while Cloud Bigtable Time Ranges **exclude** the
+ # final timestamp.
+ next_timestamp=self._timestamp+_ONE_MILLISECOND
+ self._delete_range=TimestampRange(end=next_timestamp)
+
+ self._transaction=transaction
+
+ # Internal state for tracking mutations.
+ self._row_map={}
+ self._mutation_count=0
+
+
[docs]defsend(self):
+ """Send / commit the batch of mutations to the server."""
+ forrowinself._row_map.values():
+ # commit() does nothing if row hasn't accumulated any mutations.
+ row.commit()
+
+ self._row_map.clear()
+ self._mutation_count=0
+
+ def_try_send(self):
+ """Send / commit the batch if mutations have exceeded batch size."""
+ ifself._batch_sizeandself._mutation_count>=self._batch_size:
+ self.send()
+
+ def_get_row(self,row_key):
+ """Gets a row that will hold mutations.
+
+ If the row is not already cached on the current batch, a new row will
+ be created.
+
+ :type row_key: str
+ :param row_key: The row key for a row stored in the map.
+
+ :rtype: :class:`Row <gcloud.bigtable.row.Row>`
+ :returns: The newly created or stored row that will hold mutations.
+ """
+ ifrow_keynotinself._row_map:
+ table=self._table._low_level_table
+ self._row_map[row_key]=table.row(row_key)
+
+ returnself._row_map[row_key]
+
+
[docs]defput(self,row,data,wal=_WAL_SENTINEL):
+ """Insert data into a row in the table owned by this batch.
+
+ :type row: str
+ :param row: The row key where the mutation will be "put".
+
+ :type data: dict
+ :param data: Dictionary containing the data to be inserted. The keys
+ are columns names (of the form ``fam:col``) and the values
+ are strings (bytes) to be stored in those columns.
+
+ :type wal: object
+ :param wal: Unused parameter (to over-ride the default on the
+ instance). Provided for compatibility with HappyBase, but
+ irrelevant for Cloud Bigtable since it does not have a
+ Write Ahead Log.
+ """
+ ifwalisnot_WAL_SENTINEL:
+ _WARN(_WAL_WARNING)
+
+ row_object=self._get_row(row)
+ # Make sure all the keys are valid before beginning
+ # to add mutations.
+ column_pairs=_get_column_pairs(six.iterkeys(data),
+ require_qualifier=True)
+ forcolumn_family_id,column_qualifierincolumn_pairs:
+ value=data[column_family_id+':'+column_qualifier]
+ row_object.set_cell(column_family_id,column_qualifier,
+ value,timestamp=self._timestamp)
+
+ self._mutation_count+=len(data)
+ self._try_send()
+
+ def_delete_columns(self,columns,row_object):
+ """Adds delete mutations for a list of columns and column families.
+
+ :type columns: list
+ :param columns: Iterable containing column names (as
+ strings). Each column name can be either
+
+ * an entire column family: ``fam`` or ``fam:``
+ * a single column: ``fam:col``
+
+ :type row_object: :class:`Row <gcloud_bigtable.row.Row>`
+ :param row_object: The row which will hold the delete mutations.
+
+ :raises: :class:`ValueError <exceptions.ValueError>` if the delete
+ timestamp range is set on the current batch, but a
+ column family delete is attempted.
+ """
+ column_pairs=_get_column_pairs(columns)
+ forcolumn_family_id,column_qualifierincolumn_pairs:
+ ifcolumn_qualifierisNone:
+ ifself._delete_rangeisnotNone:
+ raiseValueError('The Cloud Bigtable API does not support '
+ 'adding a timestamp to '
+ '"DeleteFromFamily" ')
+ row_object.delete_cells(column_family_id,
+ columns=row_object.ALL_COLUMNS)
+ else:
+ row_object.delete_cell(column_family_id,
+ column_qualifier,
+ time_range=self._delete_range)
+
+
[docs]defdelete(self,row,columns=None,wal=_WAL_SENTINEL):
+ """Delete data from a row in the table owned by this batch.
+
+ :type row: str
+ :param row: The row key where the delete will occur.
+
+ :type columns: list
+ :param columns: (Optional) Iterable containing column names (as
+ strings). Each column name can be either
+
+ * an entire column family: ``fam`` or ``fam:``
+ * a single column: ``fam:col``
+
+ If not used, will delete the entire row.
+
+ :type wal: object
+ :param wal: Unused parameter (to over-ride the default on the
+ instance). Provided for compatibility with HappyBase, but
+ irrelevant for Cloud Bigtable since it does not have a
+ Write Ahead Log.
+
+ :raises: If the delete timestamp range is set on the
+ current batch, but a full row delete is attempted.
+ """
+ ifwalisnot_WAL_SENTINEL:
+ _WARN(_WAL_WARNING)
+
+ row_object=self._get_row(row)
+
+ ifcolumnsisNone:
+ # Delete entire row.
+ ifself._delete_rangeisnotNone:
+ raiseValueError('The Cloud Bigtable API does not support '
+ 'adding a timestamp to "DeleteFromRow" '
+ 'mutations')
+ row_object.delete()
+ self._mutation_count+=1
+ else:
+ self._delete_columns(columns,row_object)
+ self._mutation_count+=len(columns)
+
+ self._try_send()
+
+ def__enter__(self):
+ """Enter context manager, no set-up required."""
+ returnself
+
+ def__exit__(self,exc_type,exc_value,traceback):
+ """Exit context manager, no set-up required.
+
+ :type exc_type: type
+ :param exc_type: The type of the exception if one occurred while the
+ context manager was active. Otherwise, :data:`None`.
+
+ :type exc_value: :class:`Exception <exceptions.Exception>`
+ :param exc_value: An instance of ``exc_type`` if an exception occurred
+ while the context was active.
+ Otherwise, :data:`None`.
+
+ :type traceback: ``traceback`` type
+ :param traceback: The traceback where the exception occurred (if one
+ did occur). Otherwise, :data:`None`.
+ """
+ # If the context manager encountered an exception and the batch is
+ # transactional, we don't commit the mutations.
+ ifself._transactionandexc_typeisnotNone:
+ return
+
+ # NOTE: For non-transactional batches, this will even commit mutations
+ # if an error occurred during the context manager.
+ self.send()
+
+
+def_get_column_pairs(columns,require_qualifier=False):
+ """Turns a list of column or column families into parsed pairs.
+
+ Turns a column family (``fam`` or ``fam:``) into a pair such
+ as ``['fam', None]`` and turns a column (``fam:col``) into
+ ``['fam', 'col']``.
+
+ :type columns: list
+ :param columns: Iterable containing column names (as
+ strings). Each column name can be either
+
+ * an entire column family: ``fam`` or ``fam:``
+ * a single column: ``fam:col``
+
+ :type require_qualifier: bool
+ :param require_qualifier: Boolean indicating if the columns should
+ all have a qualifier or not.
+
+ :rtype: list
+ :returns: List of pairs, where the first element in each pair is the
+ column family and the second is the column qualifier
+ (or :data:`None`).
+ :raises: :class:`ValueError <exceptions.ValueError>` if any of the columns
+ are not of the expected format.
+ :class:`ValueError <exceptions.ValueError>` if
+ ``require_qualifier`` is :data:`True` and one of the values is
+ for an entire column family
+ """
+ column_pairs=[]
+ forcolumnincolumns:
+ ifisinstance(column,six.binary_type):
+ column=column.decode('utf-8')
+ # Remove trailing colons (i.e. for standalone column family).
+ ifcolumn.endswith(u':'):
+ column=column[:-1]
+ num_colons=column.count(u':')
+ ifnum_colons==0:
+ # column is a column family.
+ ifrequire_qualifier:
+ raiseValueError('column does not contain a qualifier',
+ column)
+ else:
+ column_pairs.append([column,None])
+ elifnum_colons==1:
+ column_pairs.append(column.split(u':'))
+ else:
+ raiseValueError('Column contains the : separator more than once')
+
+ returncolumn_pairs
+
Source code for gcloud.bigtable.happybase.connection
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Cloud Bigtable HappyBase connection module."""
+
+
+importdatetime
+importwarnings
+
+importsix
+
+fromgrpc.betaimportinterfaces
+fromgrpc.framework.interfaces.faceimportface
+
+try:
+ fromhappybase.hbase.ttypesimportAlreadyExists
+exceptImportError:
+ fromgcloud.exceptionsimportConflictasAlreadyExists
+
+fromgcloud.bigtable.clientimportClient
+fromgcloud.bigtable.column_familyimportGCRuleIntersection
+fromgcloud.bigtable.column_familyimportMaxAgeGCRule
+fromgcloud.bigtable.column_familyimportMaxVersionsGCRule
+fromgcloud.bigtable.happybase.tableimportTable
+fromgcloud.bigtable.tableimportTableas_LowLevelTable
+
+
+# Constants reproduced here for HappyBase compatibility, though values
+# are all null.
+COMPAT_MODES=None
+THRIFT_TRANSPORTS=None
+THRIFT_PROTOCOLS=None
+DEFAULT_HOST=None
+DEFAULT_PORT=None
+DEFAULT_TRANSPORT=None
+DEFAULT_COMPAT=None
+DEFAULT_PROTOCOL=None
+
+_LEGACY_ARGS=frozenset(('host','port','compat','transport','protocol'))
+_WARN=warnings.warn
+_BASE_DISABLE='Cloud Bigtable has no concept of enabled / disabled tables.'
+_DISABLE_DELETE_MSG=('The disable argument should not be used in '
+ 'delete_table(). ')+_BASE_DISABLE
+_ENABLE_TMPL='Connection.enable_table(%r) was called, but '+_BASE_DISABLE
+_DISABLE_TMPL='Connection.disable_table(%r) was called, but '+_BASE_DISABLE
+_IS_ENABLED_TMPL=('Connection.is_table_enabled(%r) was called, but '+
+ _BASE_DISABLE)
+_COMPACT_TMPL=('Connection.compact_table(%r, major=%r) was called, but the '
+ 'Cloud Bigtable API handles table compactions automatically '
+ 'and does not expose an API for it.')
+
+
+def_get_instance(timeout=None):
+ """Gets instance for the default project.
+
+ Creates a client with the inferred credentials and project ID from
+ the local environment. Then uses
+ :meth:`.bigtable.client.Client.list_instances` to
+ get the unique instance owned by the project.
+
+ If the request fails for any reason, or if there isn't exactly one instance
+ owned by the project, then this function will fail.
+
+ :type timeout: int
+ :param timeout: (Optional) The socket timeout in milliseconds.
+
+ :rtype: :class:`gcloud.bigtable.instance.Instance`
+ :returns: The unique instance owned by the project inferred from
+ the environment.
+ :raises ValueError: if there is a failed location or any number of
+ instances other than one.
+ """
+ client_kwargs={'admin':True}
+ iftimeoutisnotNone:
+ client_kwargs['timeout_seconds']=timeout/1000.0
+ client=Client(**client_kwargs)
+ try:
+ client.start()
+ instances,failed_locations=client.list_instances()
+ finally:
+ client.stop()
+
+ iflen(failed_locations)!=0:
+ raiseValueError('Determining instance via ListInstances encountered '
+ 'failed locations.')
+ iflen(instances)==0:
+ raiseValueError('This client doesn\'t have access to any instances.')
+ iflen(instances)>1:
+ raiseValueError('This client has access to more than one instance. '
+ 'Please directly pass the instance you\'d '
+ 'like to use.')
+ returninstances[0]
+
+
+
[docs]classConnection(object):
+ """Connection to Cloud Bigtable backend.
+
+ .. note::
+
+ If you pass a ``instance``, it will be :meth:`.Instance.copy`-ed before
+ being stored on the new connection. This also copies the
+ :class:`Client <gcloud.bigtable.client.Client>` that created the
+ :class:`Instance <gcloud.bigtable.instance.Instance>` instance and the
+ :class:`Credentials <oauth2client.client.Credentials>` stored on the
+ client.
+
+ The arguments ``host``, ``port``, ``compat``, ``transport`` and
+ ``protocol`` are allowed (as keyword arguments) for compatibility with
+ HappyBase. However, they will not be used in any way, and will cause a
+ warning if passed.
+
+ :type timeout: int
+ :param timeout: (Optional) The socket timeout in milliseconds.
+
+ :type autoconnect: bool
+ :param autoconnect: (Optional) Whether the connection should be
+ :meth:`open`-ed during construction.
+
+ :type table_prefix: str
+ :param table_prefix: (Optional) Prefix used to construct table names.
+
+ :type table_prefix_separator: str
+ :param table_prefix_separator: (Optional) Separator used with
+ ``table_prefix``. Defaults to ``_``.
+
+ :type instance: :class:`Instance <gcloud.bigtable.instance.Instance>`
+ :param instance: (Optional) A Cloud Bigtable instance. The instance also
+ owns a client for making gRPC requests to the Cloud
+ Bigtable API. If not passed in, defaults to creating client
+ with ``admin=True`` and using the ``timeout`` here for the
+ ``timeout_seconds`` argument to the
+ :class:`Client <gcloud.bigtable.client.Client>`
+ constructor. The credentials for the client
+ will be the implicit ones loaded from the environment.
+ Then that client is used to retrieve all the instances
+ owned by the client's project.
+
+ :type kwargs: dict
+ :param kwargs: Remaining keyword arguments. Provided for HappyBase
+ compatibility.
+ """
+
+ _instance=None
+
+ def__init__(self,timeout=None,autoconnect=True,table_prefix=None,
+ table_prefix_separator='_',instance=None,**kwargs):
+ self._handle_legacy_args(kwargs)
+ iftable_prefixisnotNone:
+ ifnotisinstance(table_prefix,six.string_types):
+ raiseTypeError('table_prefix must be a string','received',
+ table_prefix,type(table_prefix))
+
+ ifnotisinstance(table_prefix_separator,six.string_types):
+ raiseTypeError('table_prefix_separator must be a string',
+ 'received',table_prefix_separator,
+ type(table_prefix_separator))
+
+ self.table_prefix=table_prefix
+ self.table_prefix_separator=table_prefix_separator
+
+ ifinstanceisNone:
+ self._instance=_get_instance(timeout=timeout)
+ else:
+ iftimeoutisnotNone:
+ raiseValueError('Timeout cannot be used when an existing '
+ 'instance is passed')
+ self._instance=instance.copy()
+
+ ifautoconnect:
+ self.open()
+
+ self._initialized=True
+
+ @staticmethod
+ def_handle_legacy_args(arguments_dict):
+ """Check legacy HappyBase arguments and warn if set.
+
+ :type arguments_dict: dict
+ :param arguments_dict: Unused keyword arguments.
+
+ :raises TypeError: if a keyword other than ``host``, ``port``,
+ ``compat``, ``transport`` or ``protocol`` is used.
+ """
+ common_args=_LEGACY_ARGS.intersection(six.iterkeys(arguments_dict))
+ ifcommon_args:
+ all_args=', '.join(common_args)
+ message=('The HappyBase legacy arguments %s were used. These '
+ 'arguments are unused by gcloud.'%(all_args,))
+ _WARN(message)
+ forarg_nameincommon_args:
+ arguments_dict.pop(arg_name)
+ ifarguments_dict:
+ unexpected_names=arguments_dict.keys()
+ raiseTypeError('Received unexpected arguments',unexpected_names)
+
+
[docs]defopen(self):
+ """Open the underlying transport to Cloud Bigtable.
+
+ This method opens the underlying HTTP/2 gRPC connection using a
+ :class:`Client <gcloud.bigtable.client.Client>` bound to the
+ :class:`Instance <gcloud.bigtable.instance.Instance>` owned by
+ this connection.
+ """
+ self._instance._client.start()
+
+
[docs]defclose(self):
+ """Close the underlying transport to Cloud Bigtable.
+
+ This method closes the underlying HTTP/2 gRPC connection using a
+ :class:`Client <gcloud.bigtable.client.Client>` bound to the
+ :class:`Instance <gcloud.bigtable.instance.Instance>` owned by
+ this connection.
+ """
+ self._instance._client.stop()
+
+ def__del__(self):
+ ifself._instanceisnotNone:
+ self.close()
+
+ def_table_name(self,name):
+ """Construct a table name by optionally adding a table name prefix.
+
+ :type name: str
+ :param name: The name to have a prefix added to it.
+
+ :rtype: str
+ :returns: The prefixed name, if the current connection has a table
+ prefix set.
+ """
+ ifself.table_prefixisNone:
+ returnname
+
+ returnself.table_prefix+self.table_prefix_separator+name
+
+
[docs]deftable(self,name,use_prefix=True):
+ """Table factory.
+
+ :type name: str
+ :param name: The name of the table to be created.
+
+ :type use_prefix: bool
+ :param use_prefix: Whether to use the table prefix (if any).
+
+ :rtype: :class:`Table <gcloud.bigtable.happybase.table.Table>`
+ :returns: Table instance owned by this connection.
+ """
+ ifuse_prefix:
+ name=self._table_name(name)
+ returnTable(name,self)
+
+
[docs]deftables(self):
+ """Return a list of table names available to this connection.
+
+ .. note::
+
+ This lists every table in the instance owned by this connection,
+ **not** every table that a given user may have access to.
+
+ .. note::
+
+ If ``table_prefix`` is set on this connection, only returns the
+ table names which match that prefix.
+
+ :rtype: list
+ :returns: List of string table names.
+ """
+ low_level_table_instances=self._instance.list_tables()
+ table_names=[table_instance.table_id
+ fortable_instanceinlow_level_table_instances]
+
+ # Filter using prefix, and strip prefix from names
+ ifself.table_prefixisnotNone:
+ prefix=self._table_name('')
+ offset=len(prefix)
+ table_names=[name[offset:]fornameintable_names
+ ifname.startswith(prefix)]
+
+ returntable_names
+
+
[docs]defcreate_table(self,name,families):
+ """Create a table.
+
+ .. warning::
+
+ The only column family options from HappyBase that are able to be
+ used with Cloud Bigtable are ``max_versions`` and ``time_to_live``.
+
+ Values in ``families`` represent column family options. In HappyBase,
+ these are dictionaries, corresponding to the ``ColumnDescriptor``
+ structure in the Thrift API. The accepted keys are:
+
+ * ``max_versions`` (``int``)
+ * ``compression`` (``str``)
+ * ``in_memory`` (``bool``)
+ * ``bloom_filter_type`` (``str``)
+ * ``bloom_filter_vector_size`` (``int``)
+ * ``bloom_filter_nb_hashes`` (``int``)
+ * ``block_cache_enabled`` (``bool``)
+ * ``time_to_live`` (``int``)
+
+ :type name: str
+ :param name: The name of the table to be created.
+
+ :type families: dict
+ :param families: Dictionary with column family names as keys and column
+ family options as the values. The options can be among
+
+ * :class:`dict`
+ * :class:`.GarbageCollectionRule`
+
+ :raises TypeError: If ``families`` is not a dictionary.
+ :raises ValueError: If ``families`` has no entries.
+ :raises AlreadyExists: If creation fails due to an already
+ existing table.
+ :raises NetworkError: If creation fails for a reason other than
+ table exists.
+ """
+ ifnotisinstance(families,dict):
+ raiseTypeError('families arg must be a dictionary')
+
+ ifnotfamilies:
+ raiseValueError('Cannot create table %r (no column '
+ 'families specified)'%(name,))
+
+ # Parse all keys before making any API requests.
+ gc_rule_dict={}
+ forcolumn_family_name,optioninfamilies.items():
+ ifisinstance(column_family_name,six.binary_type):
+ column_family_name=column_family_name.decode('utf-8')
+ ifcolumn_family_name.endswith(':'):
+ column_family_name=column_family_name[:-1]
+ gc_rule_dict[column_family_name]=_parse_family_option(option)
+
+ # Create table instance and then make API calls.
+ name=self._table_name(name)
+ low_level_table=_LowLevelTable(name,self._instance)
+ column_families=(
+ low_level_table.column_family(column_family_name,gc_rule=gc_rule)
+ forcolumn_family_name,gc_ruleinsix.iteritems(gc_rule_dict)
+ )
+ try:
+ low_level_table.create(column_families=column_families)
+ exceptface.NetworkErrorasnetwork_err:
+ ifnetwork_err.code==interfaces.StatusCode.ALREADY_EXISTS:
+ raiseAlreadyExists(name)
+ else:
+ raise
+
+
[docs]defdelete_table(self,name,disable=False):
+ """Delete the specified table.
+
+ :type name: str
+ :param name: The name of the table to be deleted. If ``table_prefix``
+ is set, a prefix will be added to the ``name``.
+
+ :type disable: bool
+ :param disable: Whether to first disable the table if needed. This
+ is provided for compatibility with HappyBase, but is
+ not relevant for Cloud Bigtable since it has no concept
+ of enabled / disabled tables.
+ """
+ ifdisable:
+ _WARN(_DISABLE_DELETE_MSG)
+
+ name=self._table_name(name)
+ _LowLevelTable(name,self._instance).delete()
+
+ @staticmethod
+
[docs]defenable_table(name):
+ """Enable the specified table.
+
+ .. warning::
+
+ Cloud Bigtable has no concept of enabled / disabled tables so this
+ method does nothing. It is provided simply for compatibility.
+
+ :type name: str
+ :param name: The name of the table to be enabled.
+ """
+ _WARN(_ENABLE_TMPL%(name,))
+
+ @staticmethod
+
[docs]defdisable_table(name):
+ """Disable the specified table.
+
+ .. warning::
+
+ Cloud Bigtable has no concept of enabled / disabled tables so this
+ method does nothing. It is provided simply for compatibility.
+
+ :type name: str
+ :param name: The name of the table to be disabled.
+ """
+ _WARN(_DISABLE_TMPL%(name,))
+
+ @staticmethod
+
[docs]defis_table_enabled(name):
+ """Return whether the specified table is enabled.
+
+ .. warning::
+
+ Cloud Bigtable has no concept of enabled / disabled tables so this
+ method always returns :data:`True`. It is provided simply for
+ compatibility.
+
+ :type name: str
+ :param name: The name of the table to check enabled / disabled status.
+
+ :rtype: bool
+ :returns: The value :data:`True` always.
+ """
+ _WARN(_IS_ENABLED_TMPL%(name,))
+ returnTrue
+
+ @staticmethod
+
[docs]defcompact_table(name,major=False):
+ """Compact the specified table.
+
+ .. warning::
+
+ Cloud Bigtable supports table compactions, it just doesn't expose
+ an API for that feature, so this method does nothing. It is
+ provided simply for compatibility.
+
+ :type name: str
+ :param name: The name of the table to compact.
+
+ :type major: bool
+ :param major: Whether to perform a major compaction.
+ """
+ _WARN(_COMPACT_TMPL%(name,major))
+
+
+def_parse_family_option(option):
+ """Parses a column family option into a garbage collection rule.
+
+ .. note::
+
+ If ``option`` is not a dictionary, the type is not checked.
+ If ``option`` is :data:`None`, there is nothing to do, since this
+ is the correct output.
+
+ :type option: :class:`dict`,
+ :data:`NoneType <types.NoneType>`,
+ :class:`.GarbageCollectionRule`
+ :param option: A column family option passes as a dictionary value in
+ :meth:`Connection.create_table`.
+
+ :rtype: :class:`.GarbageCollectionRule`
+ :returns: A garbage collection rule parsed from the input.
+ """
+ result=option
+ ifisinstance(result,dict):
+ ifnotset(result.keys())<=set(['max_versions','time_to_live']):
+ all_keys=', '.join(repr(key)forkeyinresult.keys())
+ warning_msg=('Cloud Bigtable only supports max_versions and '
+ 'time_to_live column family settings. '
+ 'Received: %s'%(all_keys,))
+ _WARN(warning_msg)
+
+ max_num_versions=result.get('max_versions')
+ max_age=None
+ if'time_to_live'inresult:
+ max_age=datetime.timedelta(seconds=result['time_to_live'])
+
+ versions_rule=age_rule=None
+ ifmax_num_versionsisnotNone:
+ versions_rule=MaxVersionsGCRule(max_num_versions)
+ ifmax_ageisnotNone:
+ age_rule=MaxAgeGCRule(max_age)
+
+ ifversions_ruleisNone:
+ result=age_rule
+ else:
+ ifage_ruleisNone:
+ result=versions_rule
+ else:
+ result=GCRuleIntersection(rules=[age_rule,versions_rule])
+
+ returnresult
+
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Cloud Bigtable HappyBase pool module."""
+
+
+importcontextlib
+importthreading
+
+importsix
+
+fromgcloud.bigtable.happybase.connectionimportConnection
+fromgcloud.bigtable.happybase.connectionimport_get_instance
+
+
+_MIN_POOL_SIZE=1
+"""Minimum allowable size of a connection pool."""
+
+
+
[docs]classNoConnectionsAvailable(RuntimeError):
+ """Exception raised when no connections are available.
+
+ This happens if a timeout was specified when obtaining a connection,
+ and no connection became available within the specified timeout.
+ """
+
+
+
[docs]classConnectionPool(object):
+ """Thread-safe connection pool.
+
+ .. note::
+
+ All keyword arguments are passed unmodified to the
+ :class:`Connection <.happybase.connection.Connection>` constructor
+ **except** for ``autoconnect``. This is because the ``open`` /
+ ``closed`` status of a connection is managed by the pool. In addition,
+ if ``instance`` is not passed, the default / inferred instance is
+ determined by the pool and then passed to each
+ :class:`Connection <.happybase.connection.Connection>` that is created.
+
+ :type size: int
+ :param size: The maximum number of concurrently open connections.
+
+ :type kwargs: dict
+ :param kwargs: Keyword arguments passed to
+ :class:`Connection <.happybase.Connection>`
+ constructor.
+
+ :raises: :class:`TypeError <exceptions.TypeError>` if ``size``
+ is non an integer.
+ :class:`ValueError <exceptions.ValueError>` if ``size``
+ is not positive.
+ """
+ def__init__(self,size,**kwargs):
+ ifnotisinstance(size,six.integer_types):
+ raiseTypeError('Pool size arg must be an integer')
+
+ ifsize<_MIN_POOL_SIZE:
+ raiseValueError('Pool size must be positive')
+
+ self._lock=threading.Lock()
+ self._queue=six.moves.queue.LifoQueue(maxsize=size)
+ self._thread_connections=threading.local()
+
+ connection_kwargs=kwargs
+ connection_kwargs['autoconnect']=False
+ if'instance'notinconnection_kwargs:
+ connection_kwargs['instance']=_get_instance(
+ timeout=kwargs.get('timeout'))
+
+ for_insix.moves.range(size):
+ connection=Connection(**connection_kwargs)
+ self._queue.put(connection)
+
+ def_acquire_connection(self,timeout=None):
+ """Acquire a connection from the pool.
+
+ :type timeout: int
+ :param timeout: (Optional) Time (in seconds) to wait for a connection
+ to open.
+
+ :rtype: :class:`Connection <.happybase.Connection>`
+ :returns: An active connection from the queue stored on the pool.
+ :raises: :class:`NoConnectionsAvailable` if ``Queue.get`` fails
+ before the ``timeout`` (only if a timeout is specified).
+ """
+ try:
+ returnself._queue.get(block=True,timeout=timeout)
+ exceptsix.moves.queue.Empty:
+ raiseNoConnectionsAvailable('No connection available from pool '
+ 'within specified timeout')
+
+ @contextlib.contextmanager
+
[docs]defconnection(self,timeout=None):
+ """Obtain a connection from the pool.
+
+ Must be used as a context manager, for example::
+
+ with pool.connection() as connection:
+ pass # do something with the connection
+
+ If ``timeout`` is omitted, this method waits forever for a connection
+ to become available from the local queue.
+
+ Yields an active :class:`Connection <.happybase.connection.Connection>`
+ from the pool.
+
+ :type timeout: int
+ :param timeout: (Optional) Time (in seconds) to wait for a connection
+ to open.
+
+ :raises: :class:`NoConnectionsAvailable` if no connection can be
+ retrieved from the pool before the ``timeout`` (only if
+ a timeout is specified).
+ """
+ connection=getattr(self._thread_connections,'current',None)
+
+ retrieved_new_cnxn=False
+ ifconnectionisNone:
+ # In this case we need to actually grab a connection from the
+ # pool. After retrieval, the connection is stored on a thread
+ # local so that nested connection requests from the same
+ # thread can re-use the same connection instance.
+ #
+ # NOTE: This code acquires a lock before assigning to the
+ # thread local; see
+ # ('https://emptysqua.re/blog/'
+ # 'another-thing-about-pythons-threadlocals/')
+ retrieved_new_cnxn=True
+ connection=self._acquire_connection(timeout)
+ withself._lock:
+ self._thread_connections.current=connection
+
+ # This is a no-op for connections that have already been opened
+ # since they just call Client.start().
+ connection.open()
+ yieldconnection
+
+ # Remove thread local reference after the outermost 'with' block
+ # ends. Afterwards the thread no longer owns the connection.
+ ifretrieved_new_cnxn:
+ delself._thread_connections.current
+ self._queue.put(connection)
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Google Cloud Bigtable HappyBase table module."""
+
+
+importstruct
+importwarnings
+
+importsix
+
+fromgcloud._helpersimport_datetime_from_microseconds
+fromgcloud._helpersimport_microseconds_from_datetime
+fromgcloud._helpersimport_to_bytes
+fromgcloud._helpersimport_total_seconds
+fromgcloud.bigtable.column_familyimportGCRuleIntersection
+fromgcloud.bigtable.column_familyimportMaxAgeGCRule
+fromgcloud.bigtable.column_familyimportMaxVersionsGCRule
+fromgcloud.bigtable.happybase.batchimport_get_column_pairs
+fromgcloud.bigtable.happybase.batchimport_WAL_SENTINEL
+fromgcloud.bigtable.happybase.batchimportBatch
+fromgcloud.bigtable.row_filtersimportCellsColumnLimitFilter
+fromgcloud.bigtable.row_filtersimportColumnQualifierRegexFilter
+fromgcloud.bigtable.row_filtersimportFamilyNameRegexFilter
+fromgcloud.bigtable.row_filtersimportRowFilterChain
+fromgcloud.bigtable.row_filtersimportRowFilterUnion
+fromgcloud.bigtable.row_filtersimportRowKeyRegexFilter
+fromgcloud.bigtable.row_filtersimportTimestampRange
+fromgcloud.bigtable.row_filtersimportTimestampRangeFilter
+fromgcloud.bigtable.tableimportTableas_LowLevelTable
+
+
+_WARN=warnings.warn
+_PACK_I64=struct.Struct('>q').pack
+_UNPACK_I64=struct.Struct('>q').unpack
+_SIMPLE_GC_RULES=(MaxAgeGCRule,MaxVersionsGCRule)
+
+
+
[docs]defmake_row(cell_map,include_timestamp):
+ """Make a row dict for a Thrift cell mapping.
+
+ .. warning::
+
+ This method is only provided for HappyBase compatibility, but does not
+ actually work.
+
+ :type cell_map: dict
+ :param cell_map: Dictionary with ``fam:col`` strings as keys and ``TCell``
+ instances as values.
+
+ :type include_timestamp: bool
+ :param include_timestamp: Flag to indicate if cell timestamps should be
+ included with the output.
+
+ :raises: :class:`NotImplementedError <exceptions.NotImplementedError>`
+ always
+ """
+ raiseNotImplementedError('The Cloud Bigtable API output is not the same '
+ 'as the output from the Thrift server, so this '
+ 'helper can not be implemented.','Called with',
+ cell_map,include_timestamp)
+
+
+
[docs]defmake_ordered_row(sorted_columns,include_timestamp):
+ """Make a row dict for sorted Thrift column results from scans.
+
+ .. warning::
+
+ This method is only provided for HappyBase compatibility, but does not
+ actually work.
+
+ :type sorted_columns: list
+ :param sorted_columns: List of ``TColumn`` instances from Thrift.
+
+ :type include_timestamp: bool
+ :param include_timestamp: Flag to indicate if cell timestamps should be
+ included with the output.
+
+ :raises: :class:`NotImplementedError <exceptions.NotImplementedError>`
+ always
+ """
+ raiseNotImplementedError('The Cloud Bigtable API output is not the same '
+ 'as the output from the Thrift server, so this '
+ 'helper can not be implemented.','Called with',
+ sorted_columns,include_timestamp)
+
+
+
[docs]classTable(object):
+ """Representation of Cloud Bigtable table.
+
+ Used for adding data and
+
+ :type name: str
+ :param name: The name of the table.
+
+ :type connection: :class:`Connection <.happybase.connection.Connection>`
+ :param connection: The connection which has access to the table.
+ """
+
+ def__init__(self,name,connection):
+ self.name=name
+ # This remains as legacy for HappyBase, but only the instance
+ # from the connection is needed.
+ self.connection=connection
+ self._low_level_table=None
+ ifself.connectionisnotNone:
+ self._low_level_table=_LowLevelTable(self.name,
+ self.connection._instance)
+
+ def__repr__(self):
+ return'<table.Table name=%r>'%(self.name,)
+
+
[docs]deffamilies(self):
+ """Retrieve the column families for this table.
+
+ :rtype: dict
+ :returns: Mapping from column family name to garbage collection rule
+ for a column family.
+ """
+ column_family_map=self._low_level_table.list_column_families()
+ result={}
+ forcol_fam,col_fam_objinsix.iteritems(column_family_map):
+ result[col_fam]=_gc_rule_to_dict(col_fam_obj.gc_rule)
+ returnresult
+
+
[docs]defregions(self):
+ """Retrieve the regions for this table.
+
+ .. warning::
+
+ Cloud Bigtable does not give information about how a table is laid
+ out in memory, so this method does not work. It is
+ provided simply for compatibility.
+
+ :raises: :class:`NotImplementedError <exceptions.NotImplementedError>`
+ always
+ """
+ raiseNotImplementedError('The Cloud Bigtable API does not have a '
+ 'concept of splitting a table into regions.')
+
+
[docs]defrow(self,row,columns=None,timestamp=None,include_timestamp=False):
+ """Retrieve a single row of data.
+
+ Returns the latest cells in each column (or all columns if ``columns``
+ is not specified). If a ``timestamp`` is set, then **latest** becomes
+ **latest** up until ``timestamp``.
+
+ :type row: str
+ :param row: Row key for the row we are reading from.
+
+ :type columns: list
+ :param columns: (Optional) Iterable containing column names (as
+ strings). Each column name can be either
+
+ * an entire column family: ``fam`` or ``fam:``
+ * a single column: ``fam:col``
+
+ :type timestamp: int
+ :param timestamp: (Optional) Timestamp (in milliseconds since the
+ epoch). If specified, only cells returned before the
+ the timestamp will be returned.
+
+ :type include_timestamp: bool
+ :param include_timestamp: Flag to indicate if cell timestamps should be
+ included with the output.
+
+ :rtype: dict
+ :returns: Dictionary containing all the latest column values in
+ the row.
+ """
+ filters=[]
+ ifcolumnsisnotNone:
+ filters.append(_columns_filter_helper(columns))
+ # versions == 1 since we only want the latest.
+ filter_=_filter_chain_helper(versions=1,timestamp=timestamp,
+ filters=filters)
+
+ partial_row_data=self._low_level_table.read_row(
+ row,filter_=filter_)
+ ifpartial_row_dataisNone:
+ return{}
+
+ return_partial_row_to_dict(partial_row_data,
+ include_timestamp=include_timestamp)
+
+
[docs]defrows(self,rows,columns=None,timestamp=None,
+ include_timestamp=False):
+ """Retrieve multiple rows of data.
+
+ All optional arguments behave the same in this method as they do in
+ :meth:`row`.
+
+ :type rows: list
+ :param rows: Iterable of the row keys for the rows we are reading from.
+
+ :type columns: list
+ :param columns: (Optional) Iterable containing column names (as
+ strings). Each column name can be either
+
+ * an entire column family: ``fam`` or ``fam:``
+ * a single column: ``fam:col``
+
+ :type timestamp: int
+ :param timestamp: (Optional) Timestamp (in milliseconds since the
+ epoch). If specified, only cells returned before (or
+ at) the timestamp will be returned.
+
+ :type include_timestamp: bool
+ :param include_timestamp: Flag to indicate if cell timestamps should be
+ included with the output.
+
+ :rtype: list
+ :returns: A list of pairs, where the first is the row key and the
+ second is a dictionary with the filtered values returned.
+ """
+ ifnotrows:
+ # Avoid round-trip if the result is empty anyway
+ return[]
+
+ filters=[]
+ ifcolumnsisnotNone:
+ filters.append(_columns_filter_helper(columns))
+ filters.append(_row_keys_filter_helper(rows))
+ # versions == 1 since we only want the latest.
+ filter_=_filter_chain_helper(versions=1,timestamp=timestamp,
+ filters=filters)
+
+ partial_rows_data=self._low_level_table.read_rows(filter_=filter_)
+ # NOTE: We could use max_loops = 1000 or some similar value to ensure
+ # that the stream isn't open too long.
+ partial_rows_data.consume_all()
+
+ result=[]
+ forrow_keyinrows:
+ ifrow_keynotinpartial_rows_data.rows:
+ continue
+ curr_row_data=partial_rows_data.rows[row_key]
+ curr_row_dict=_partial_row_to_dict(
+ curr_row_data,include_timestamp=include_timestamp)
+ result.append((row_key,curr_row_dict))
+
+ returnresult
+
+
[docs]defcells(self,row,column,versions=None,timestamp=None,
+ include_timestamp=False):
+ """Retrieve multiple versions of a single cell from the table.
+
+ :type row: str
+ :param row: Row key for the row we are reading from.
+
+ :type column: str
+ :param column: Column we are reading from; of the form ``fam:col``.
+
+ :type versions: int
+ :param versions: (Optional) The maximum number of cells to return. If
+ not set, returns all cells found.
+
+ :type timestamp: int
+ :param timestamp: (Optional) Timestamp (in milliseconds since the
+ epoch). If specified, only cells returned before (or
+ at) the timestamp will be returned.
+
+ :type include_timestamp: bool
+ :param include_timestamp: Flag to indicate if cell timestamps should be
+ included with the output.
+
+ :rtype: list
+ :returns: List of values in the cell (with timestamps if
+ ``include_timestamp`` is :data:`True`).
+ """
+ filter_=_filter_chain_helper(column=column,versions=versions,
+ timestamp=timestamp)
+ partial_row_data=self._low_level_table.read_row(row,filter_=filter_)
+ ifpartial_row_dataisNone:
+ return[]
+ else:
+ cells=partial_row_data._cells
+ # We know that `_filter_chain_helper` has already verified that
+ # column will split as such.
+ column_family_id,column_qualifier=column.split(':')
+ # NOTE: We expect the only key in `cells` is `column_family_id`
+ # and the only key `cells[column_family_id]` is
+ # `column_qualifier`. But we don't check that this is true.
+ curr_cells=cells[column_family_id][column_qualifier]
+ return_cells_to_pairs(
+ curr_cells,include_timestamp=include_timestamp)
+
+
[docs]defscan(self,row_start=None,row_stop=None,row_prefix=None,
+ columns=None,timestamp=None,
+ include_timestamp=False,limit=None,**kwargs):
+ """Create a scanner for data in this table.
+
+ This method returns a generator that can be used for looping over the
+ matching rows.
+
+ If ``row_prefix`` is specified, only rows with row keys matching the
+ prefix will be returned. If given, ``row_start`` and ``row_stop``
+ cannot be used.
+
+ .. note::
+
+ Both ``row_start`` and ``row_stop`` can be :data:`None` to specify
+ the start and the end of the table respectively. If both are
+ omitted, a full table scan is done. Note that this usually results
+ in severe performance problems.
+
+ The keyword argument ``filter`` is also supported (beyond column and
+ row range filters supported here). HappyBase / HBase users will have
+ used this as an HBase filter string. (See the `Thrift docs`_ for more
+ details on those filters.) However, Google Cloud Bigtable doesn't
+ support those filter strings so a
+ :class:`~gcloud.bigtable.row.RowFilter` should be used instead.
+
+ .. _Thrift docs: http://hbase.apache.org/0.94/book/thrift.html
+
+ The arguments ``batch_size``, ``scan_batching`` and ``sorted_columns``
+ are allowed (as keyword arguments) for compatibility with
+ HappyBase. However, they will not be used in any way, and will cause a
+ warning if passed. (The ``batch_size`` determines the number of
+ results to retrieve per request. The HBase scanner defaults to reading
+ one record at a time, so this argument allows HappyBase to increase
+ that number. However, the Cloud Bigtable API uses HTTP/2 streaming so
+ there is no concept of a batched scan. The ``sorted_columns`` flag
+ tells HBase to return columns in order, but Cloud Bigtable doesn't
+ have this feature.)
+
+ :type row_start: str
+ :param row_start: (Optional) Row key where the scanner should start
+ (includes ``row_start``). If not specified, reads
+ from the first key. If the table does not contain
+ ``row_start``, it will start from the next key after
+ it that **is** contained in the table.
+
+ :type row_stop: str
+ :param row_stop: (Optional) Row key where the scanner should stop
+ (excludes ``row_stop``). If not specified, reads
+ until the last key. The table does not have to contain
+ ``row_stop``.
+
+ :type row_prefix: str
+ :param row_prefix: (Optional) Prefix to match row keys.
+
+ :type columns: list
+ :param columns: (Optional) Iterable containing column names (as
+ strings). Each column name can be either
+
+ * an entire column family: ``fam`` or ``fam:``
+ * a single column: ``fam:col``
+
+ :type timestamp: int
+ :param timestamp: (Optional) Timestamp (in milliseconds since the
+ epoch). If specified, only cells returned before (or
+ at) the timestamp will be returned.
+
+ :type include_timestamp: bool
+ :param include_timestamp: Flag to indicate if cell timestamps should be
+ included with the output.
+
+ :type limit: int
+ :param limit: (Optional) Maximum number of rows to return.
+
+ :type kwargs: dict
+ :param kwargs: Remaining keyword arguments. Provided for HappyBase
+ compatibility.
+
+ :raises: If ``limit`` is set but non-positive, or if ``row_prefix`` is
+ used with row start/stop,
+ :class:`TypeError <exceptions.TypeError>` if a string
+ ``filter`` is used.
+ """
+ row_start,row_stop,filter_chain=_scan_filter_helper(
+ row_start,row_stop,row_prefix,columns,timestamp,limit,kwargs)
+
+ partial_rows_data=self._low_level_table.read_rows(
+ start_key=row_start,end_key=row_stop,
+ limit=limit,filter_=filter_chain)
+
+ # Mutable copy of data.
+ rows_dict=partial_rows_data.rows
+ whileTrue:
+ try:
+ partial_rows_data.consume_next()
+ forrow_keyinsorted(rows_dict):
+ curr_row_data=rows_dict.pop(row_key)
+ # NOTE: We expect len(rows_dict) == 0, but don't check it.
+ curr_row_dict=_partial_row_to_dict(
+ curr_row_data,include_timestamp=include_timestamp)
+ yield(row_key,curr_row_dict)
+ exceptStopIteration:
+ break
+
+
[docs]defput(self,row,data,timestamp=None,wal=_WAL_SENTINEL):
+ """Insert data into a row in this table.
+
+ .. note::
+
+ This method will send a request with a single "put" mutation.
+ In many situations, :meth:`batch` is a more appropriate
+ method to manipulate data since it helps combine many mutations
+ into a single request.
+
+ :type row: str
+ :param row: The row key where the mutation will be "put".
+
+ :type data: dict
+ :param data: Dictionary containing the data to be inserted. The keys
+ are columns names (of the form ``fam:col``) and the values
+ are strings (bytes) to be stored in those columns.
+
+ :type timestamp: int
+ :param timestamp: (Optional) Timestamp (in milliseconds since the
+ epoch) that the mutation will be applied at.
+
+ :type wal: object
+ :param wal: Unused parameter (to be passed to a created batch).
+ Provided for compatibility with HappyBase, but irrelevant
+ for Cloud Bigtable since it does not have a Write Ahead
+ Log.
+ """
+ withself.batch(timestamp=timestamp,wal=wal)asbatch:
+ batch.put(row,data)
+
+
[docs]defdelete(self,row,columns=None,timestamp=None,wal=_WAL_SENTINEL):
+ """Delete data from a row in this table.
+
+ This method deletes the entire ``row`` if ``columns`` is not
+ specified.
+
+ .. note::
+
+ This method will send a request with a single delete mutation.
+ In many situations, :meth:`batch` is a more appropriate
+ method to manipulate data since it helps combine many mutations
+ into a single request.
+
+ :type row: str
+ :param row: The row key where the delete will occur.
+
+ :type columns: list
+ :param columns: (Optional) Iterable containing column names (as
+ strings). Each column name can be either
+
+ * an entire column family: ``fam`` or ``fam:``
+ * a single column: ``fam:col``
+
+ :type timestamp: int
+ :param timestamp: (Optional) Timestamp (in milliseconds since the
+ epoch) that the mutation will be applied at.
+
+ :type wal: object
+ :param wal: Unused parameter (to be passed to a created batch).
+ Provided for compatibility with HappyBase, but irrelevant
+ for Cloud Bigtable since it does not have a Write Ahead
+ Log.
+ """
+ withself.batch(timestamp=timestamp,wal=wal)asbatch:
+ batch.delete(row,columns)
+
+
[docs]defbatch(self,timestamp=None,batch_size=None,transaction=False,
+ wal=_WAL_SENTINEL):
+ """Create a new batch operation for this table.
+
+ This method returns a new
+ :class:`Batch <.happybase.batch.Batch>` instance that can be
+ used for mass data manipulation.
+
+ :type timestamp: int
+ :param timestamp: (Optional) Timestamp (in milliseconds since the
+ epoch) that all mutations will be applied at.
+
+ :type batch_size: int
+ :param batch_size: (Optional) The maximum number of mutations to allow
+ to accumulate before committing them.
+
+ :type transaction: bool
+ :param transaction: Flag indicating if the mutations should be sent
+ transactionally or not. If ``transaction=True`` and
+ an error occurs while a
+ :class:`Batch <.happybase.batch.Batch>` is
+ active, then none of the accumulated mutations will
+ be committed. If ``batch_size`` is set, the
+ mutation can't be transactional.
+
+ :type wal: object
+ :param wal: Unused parameter (to be passed to the created batch).
+ Provided for compatibility with HappyBase, but irrelevant
+ for Cloud Bigtable since it does not have a Write Ahead
+ Log.
+
+ :rtype: :class:`Batch <gcloud.bigtable.happybase.batch.Batch>`
+ :returns: A batch bound to this table.
+ """
+ returnBatch(self,timestamp=timestamp,batch_size=batch_size,
+ transaction=transaction,wal=wal)
+
+
[docs]defcounter_get(self,row,column):
+ """Retrieve the current value of a counter column.
+
+ This method retrieves the current value of a counter column. If the
+ counter column does not exist, this function initializes it to ``0``.
+
+ .. note::
+
+ Application code should **never** store a counter value directly;
+ use the atomic :meth:`counter_inc` and :meth:`counter_dec` methods
+ for that.
+
+ :type row: str
+ :param row: Row key for the row we are getting a counter from.
+
+ :type column: str
+ :param column: Column we are ``get``-ing from; of the form ``fam:col``.
+
+ :rtype: int
+ :returns: Counter value (after initializing / incrementing by 0).
+ """
+ # Don't query directly, but increment with value=0 so that the counter
+ # is correctly initialized if didn't exist yet.
+ returnself.counter_inc(row,column,value=0)
+
+
[docs]defcounter_set(self,row,column,value=0):
+ """Set a counter column to a specific value.
+
+ .. note::
+
+ Be careful using this method. It can be useful for setting the
+ initial value of a counter, but it defeats the purpose of using
+ atomic increment and decrement.
+
+ :type row: str
+ :param row: Row key for the row we are setting a counter in.
+
+ :type column: str
+ :param column: Column we are setting a value in; of
+ the form ``fam:col``.
+
+ :type value: int
+ :param value: Value to set the counter to.
+ """
+ self.put(row,{column:_PACK_I64(value)})
+
+
[docs]defcounter_inc(self,row,column,value=1):
+ """Atomically increment a counter column.
+
+ This method atomically increments a counter column in ``row``.
+ If the counter column does not exist, it is automatically initialized
+ to ``0`` before being incremented.
+
+ :type row: str
+ :param row: Row key for the row we are incrementing a counter in.
+
+ :type column: str
+ :param column: Column we are incrementing a value in; of the
+ form ``fam:col``.
+
+ :type value: int
+ :param value: Amount to increment the counter by. (If negative,
+ this is equivalent to decrement.)
+
+ :rtype: int
+ :returns: Counter value after incrementing.
+ """
+ row=self._low_level_table.row(row,append=True)
+ ifisinstance(column,six.binary_type):
+ column=column.decode('utf-8')
+ column_family_id,column_qualifier=column.split(':')
+ row.increment_cell_value(column_family_id,column_qualifier,value)
+ # See AppendRow.commit() will return a dictionary:
+ # {
+ # u'col-fam-id': {
+ # b'col-name1': [
+ # (b'cell-val', datetime.datetime(...)),
+ # ...
+ # ],
+ # ...
+ # },
+ # }
+ modified_cells=row.commit()
+ # Get the cells in the modified column,
+ column_cells=modified_cells[column_family_id][column_qualifier]
+ # Make sure there is exactly one cell in the column.
+ iflen(column_cells)!=1:
+ raiseValueError('Expected server to return one modified cell.')
+ column_cell=column_cells[0]
+ # Get the bytes value from the column and convert it to an integer.
+ bytes_value=column_cell[0]
+ int_value,=_UNPACK_I64(bytes_value)
+ returnint_value
+
+
[docs]defcounter_dec(self,row,column,value=1):
+ """Atomically decrement a counter column.
+
+ This method atomically decrements a counter column in ``row``.
+ If the counter column does not exist, it is automatically initialized
+ to ``0`` before being decremented.
+
+ :type row: str
+ :param row: Row key for the row we are decrementing a counter in.
+
+ :type column: str
+ :param column: Column we are decrementing a value in; of the
+ form ``fam:col``.
+
+ :type value: int
+ :param value: Amount to decrement the counter by. (If negative,
+ this is equivalent to increment.)
+
+ :rtype: int
+ :returns: Counter value after decrementing.
+ """
+ returnself.counter_inc(row,column,-value)
+
+
+def_gc_rule_to_dict(gc_rule):
+ """Converts garbage collection rule to dictionary if possible.
+
+ This is in place to support dictionary values as was done
+ in HappyBase, which has somewhat different garbage collection rule
+ settings for column families.
+
+ Only does this if the garbage collection rule is:
+
+ * :class:`gcloud.bigtable.column_family.MaxAgeGCRule`
+ * :class:`gcloud.bigtable.column_family.MaxVersionsGCRule`
+ * Composite :class:`gcloud.bigtable.column_family.GCRuleIntersection`
+ with two rules, one each of type
+ :class:`gcloud.bigtable.column_family.MaxAgeGCRule` and
+ :class:`gcloud.bigtable.column_family.MaxVersionsGCRule`
+
+ Otherwise, just returns the input without change.
+
+ :type gc_rule: :data:`NoneType <types.NoneType>`,
+ :class:`.GarbageCollectionRule`
+ :param gc_rule: A garbage collection rule to convert to a dictionary
+ (if possible).
+
+ :rtype: dict or
+ :class:`gcloud.bigtable.column_family.GarbageCollectionRule`
+ :returns: The converted garbage collection rule.
+ """
+ result=gc_rule
+ ifgc_ruleisNone:
+ result={}
+ elifisinstance(gc_rule,MaxAgeGCRule):
+ result={'time_to_live':_total_seconds(gc_rule.max_age)}
+ elifisinstance(gc_rule,MaxVersionsGCRule):
+ result={'max_versions':gc_rule.max_num_versions}
+ elifisinstance(gc_rule,GCRuleIntersection):
+ iflen(gc_rule.rules)==2:
+ rule1,rule2=gc_rule.rules
+ if(isinstance(rule1,_SIMPLE_GC_RULES)and
+ isinstance(rule2,_SIMPLE_GC_RULES)):
+ rule1=_gc_rule_to_dict(rule1)
+ rule2=_gc_rule_to_dict(rule2)
+ key1,=rule1.keys()
+ key2,=rule2.keys()
+ ifkey1!=key2:
+ result={key1:rule1[key1],key2:rule2[key2]}
+ returnresult
+
+
+def_next_char(str_val,index):
+ """Gets the next character based on a position in a string.
+
+ :type str_val: str
+ :param str_val: A string containing the character to update.
+
+ :type index: int
+ :param index: An integer index in ``str_val``.
+
+ :rtype: str
+ :returns: The next character after the character at ``index``
+ in ``str_val``.
+ """
+ ord_val=six.indexbytes(str_val,index)
+ return_to_bytes(chr(ord_val+1),encoding='latin-1')
+
+
+def_string_successor(str_val):
+ """Increment and truncate a byte string.
+
+ Determines shortest string that sorts after the given string when
+ compared using regular string comparison semantics.
+
+ Modeled after implementation in ``gcloud-golang``.
+
+ Increments the last byte that is smaller than ``0xFF``, and
+ drops everything after it. If the string only contains ``0xFF`` bytes,
+ ``''`` is returned.
+
+ :type str_val: str
+ :param str_val: String to increment.
+
+ :rtype: str
+ :returns: The next string in lexical order after ``str_val``.
+ """
+ str_val=_to_bytes(str_val,encoding='latin-1')
+ ifstr_val==b'':
+ returnstr_val
+
+ index=len(str_val)-1
+ whileindex>=0:
+ ifsix.indexbytes(str_val,index)!=0xff:
+ break
+ index-=1
+
+ ifindex==-1:
+ returnb''
+
+ returnstr_val[:index]+_next_char(str_val,index)
+
+
+def_convert_to_time_range(timestamp=None):
+ """Create a timestamp range from an HBase / HappyBase timestamp.
+
+ HBase uses timestamp as an argument to specify an exclusive end
+ deadline. Cloud Bigtable also uses exclusive end times, so
+ the behavior matches.
+
+ :type timestamp: int
+ :param timestamp: (Optional) Timestamp (in milliseconds since the
+ epoch). Intended to be used as the end of an HBase
+ time range, which is exclusive.
+
+ :rtype: :class:`gcloud.bigtable.row.TimestampRange`,
+ :data:`NoneType <types.NoneType>`
+ :returns: The timestamp range corresponding to the passed in
+ ``timestamp``.
+ """
+ iftimestampisNone:
+ returnNone
+
+ next_timestamp=_datetime_from_microseconds(1000*timestamp)
+ returnTimestampRange(end=next_timestamp)
+
+
+def_cells_to_pairs(cells,include_timestamp=False):
+ """Converts list of cells to HappyBase format.
+
+ For example::
+
+ >>> import datetime
+ >>> from gcloud.bigtable.row_data import Cell
+ >>> cell1 = Cell(b'val1', datetime.datetime.utcnow())
+ >>> cell2 = Cell(b'val2', datetime.datetime.utcnow())
+ >>> _cells_to_pairs([cell1, cell2])
+ [b'val1', b'val2']
+ >>> _cells_to_pairs([cell1, cell2], include_timestamp=True)
+ [(b'val1', 1456361486255), (b'val2', 1456361491927)]
+
+ :type cells: list
+ :param cells: List of :class:`gcloud.bigtable.row_data.Cell` returned
+ from a read request.
+
+ :type include_timestamp: bool
+ :param include_timestamp: Flag to indicate if cell timestamps should be
+ included with the output.
+
+ :rtype: list
+ :returns: List of values in the cell. If ``include_timestamp=True``, each
+ value will be a pair, with the first part the bytes value in
+ the cell and the second part the number of milliseconds in the
+ timestamp on the cell.
+ """
+ result=[]
+ forcellincells:
+ ifinclude_timestamp:
+ ts_millis=_microseconds_from_datetime(cell.timestamp)//1000
+ result.append((cell.value,ts_millis))
+ else:
+ result.append(cell.value)
+ returnresult
+
+
+def_partial_row_to_dict(partial_row_data,include_timestamp=False):
+ """Convert a low-level row data object to a dictionary.
+
+ Assumes only the latest value in each row is needed. This assumption
+ is due to the fact that this method is used by callers which use
+ a ``CellsColumnLimitFilter(1)`` filter.
+
+ For example::
+
+ >>> import datetime
+ >>> from gcloud.bigtable.row_data import Cell, PartialRowData
+ >>> cell1 = Cell(b'val1', datetime.datetime.utcnow())
+ >>> cell2 = Cell(b'val2', datetime.datetime.utcnow())
+ >>> row_data = PartialRowData(b'row-key')
+ >>> _partial_row_to_dict(row_data)
+ {}
+ >>> row_data._cells[u'fam1'] = {b'col1': [cell1], b'col2': [cell2]}
+ >>> _partial_row_to_dict(row_data)
+ {b'fam1:col2': b'val2', b'fam1:col1': b'val1'}
+ >>> _partial_row_to_dict(row_data, include_timestamp=True)
+ {b'fam1:col2': (b'val2', 1456361724480),
+ b'fam1:col1': (b'val1', 1456361721135)}
+
+ :type partial_row_data: :class:`.row_data.PartialRowData`
+ :param partial_row_data: Row data consumed from a stream.
+
+ :type include_timestamp: bool
+ :param include_timestamp: Flag to indicate if cell timestamps should be
+ included with the output.
+
+ :rtype: dict
+ :returns: The row data converted to a dictionary.
+ """
+ result={}
+ forcolumn,cellsinsix.iteritems(partial_row_data.to_dict()):
+ cell_vals=_cells_to_pairs(cells,
+ include_timestamp=include_timestamp)
+ # NOTE: We assume there is exactly 1 version since we used that in
+ # our filter, but we don't check this.
+ result[column]=cell_vals[0]
+ returnresult
+
+
+def_filter_chain_helper(column=None,versions=None,timestamp=None,
+ filters=None):
+ """Create filter chain to limit a results set.
+
+ :type column: str
+ :param column: (Optional) The column (``fam:col``) to be selected
+ with the filter.
+
+ :type versions: int
+ :param versions: (Optional) The maximum number of cells to return.
+
+ :type timestamp: int
+ :param timestamp: (Optional) Timestamp (in milliseconds since the
+ epoch). If specified, only cells returned before (or
+ at) the timestamp will be matched.
+
+ :type filters: list
+ :param filters: (Optional) List of existing filters to be extended.
+
+ :rtype: :class:`RowFilter <gcloud.bigtable.row.RowFilter>`
+ :returns: The chained filter created, or just a single filter if only
+ one was needed.
+ :raises: :class:`ValueError <exceptions.ValueError>` if there are no
+ filters to chain.
+ """
+ iffiltersisNone:
+ filters=[]
+
+ ifcolumnisnotNone:
+ ifisinstance(column,six.binary_type):
+ column=column.decode('utf-8')
+ column_family_id,column_qualifier=column.split(':')
+ fam_filter=FamilyNameRegexFilter(column_family_id)
+ qual_filter=ColumnQualifierRegexFilter(column_qualifier)
+ filters.extend([fam_filter,qual_filter])
+ ifversionsisnotNone:
+ filters.append(CellsColumnLimitFilter(versions))
+ time_range=_convert_to_time_range(timestamp=timestamp)
+ iftime_rangeisnotNone:
+ filters.append(TimestampRangeFilter(time_range))
+
+ num_filters=len(filters)
+ ifnum_filters==0:
+ raiseValueError('Must have at least one filter.')
+ elifnum_filters==1:
+ returnfilters[0]
+ else:
+ returnRowFilterChain(filters=filters)
+
+
+def_scan_filter_helper(row_start,row_stop,row_prefix,columns,
+ timestamp,limit,kwargs):
+ """Helper for :meth:`scan`: build up a filter chain."""
+ filter_=kwargs.pop('filter',None)
+ legacy_args=[]
+ forkw_namein('batch_size','scan_batching','sorted_columns'):
+ ifkw_nameinkwargs:
+ legacy_args.append(kw_name)
+ kwargs.pop(kw_name)
+ iflegacy_args:
+ legacy_args=', '.join(legacy_args)
+ message=('The HappyBase legacy arguments %s were used. These '
+ 'arguments are unused by gcloud.'%(legacy_args,))
+ _WARN(message)
+ ifkwargs:
+ raiseTypeError('Received unexpected arguments',kwargs.keys())
+
+ iflimitisnotNoneandlimit<1:
+ raiseValueError('limit must be positive')
+ ifrow_prefixisnotNone:
+ ifrow_startisnotNoneorrow_stopisnotNone:
+ raiseValueError('row_prefix cannot be combined with '
+ 'row_start or row_stop')
+ row_start=row_prefix
+ row_stop=_string_successor(row_prefix)
+
+ filters=[]
+ ifisinstance(filter_,six.string_types):
+ raiseTypeError('Specifying filters as a string is not supported '
+ 'by Cloud Bigtable. Use a '
+ 'gcloud.bigtable.row.RowFilter instead.')
+ eliffilter_isnotNone:
+ filters.append(filter_)
+
+ ifcolumnsisnotNone:
+ filters.append(_columns_filter_helper(columns))
+
+ # versions == 1 since we only want the latest.
+ filter_=_filter_chain_helper(versions=1,timestamp=timestamp,
+ filters=filters)
+ returnrow_start,row_stop,filter_
+
+
+def_columns_filter_helper(columns):
+ """Creates a union filter for a list of columns.
+
+ :type columns: list
+ :param columns: Iterable containing column names (as strings). Each column
+ name can be either
+
+ * an entire column family: ``fam`` or ``fam:``
+ * a single column: ``fam:col``
+
+ :rtype: :class:`RowFilter <gcloud.bigtable.row.RowFilter>`
+ :returns: The union filter created containing all of the matched columns.
+ :raises: :class:`ValueError <exceptions.ValueError>` if there are no
+ filters to union.
+ """
+ filters=[]
+ forcolumn_family_id,column_qualifierin_get_column_pairs(columns):
+ fam_filter=FamilyNameRegexFilter(column_family_id)
+ ifcolumn_qualifierisnotNone:
+ qual_filter=ColumnQualifierRegexFilter(column_qualifier)
+ combined_filter=RowFilterChain(
+ filters=[fam_filter,qual_filter])
+ filters.append(combined_filter)
+ else:
+ filters.append(fam_filter)
+
+ num_filters=len(filters)
+ ifnum_filters==0:
+ raiseValueError('Must have at least one filter.')
+ elifnum_filters==1:
+ returnfilters[0]
+ else:
+ returnRowFilterUnion(filters=filters)
+
+
+def_row_keys_filter_helper(row_keys):
+ """Creates a union filter for a list of rows.
+
+ :type row_keys: list
+ :param row_keys: Iterable containing row keys (as strings).
+
+ :rtype: :class:`RowFilter <gcloud.bigtable.row.RowFilter>`
+ :returns: The union filter created containing all of the row keys.
+ :raises: :class:`ValueError <exceptions.ValueError>` if there are no
+ filters to union.
+ """
+ filters=[]
+ forrow_keyinrow_keys:
+ filters.append(RowKeyRegexFilter(row_key))
+
+ num_filters=len(filters)
+ ifnum_filters==0:
+ raiseValueError('Must have at least one filter.')
+ elifnum_filters==1:
+ returnfilters[0]
+ else:
+ returnRowFilterUnion(filters=filters)
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""User friendly container for Google Cloud Bigtable Instance."""
+
+
+importre
+
+fromgoogle.longrunningimportoperations_pb2
+
+fromgcloud._helpersimport_pb_timestamp_to_datetime
+fromgcloud.bigtable._generatedimport(
+ instance_pb2asdata_v2_pb2)
+fromgcloud.bigtable._generatedimport(
+ bigtable_instance_admin_pb2asmessages_v2_pb2)
+fromgcloud.bigtable._generatedimport(
+ bigtable_table_admin_pb2astable_messages_v2_pb2)
+fromgcloud.bigtable.clusterimportCluster
+fromgcloud.bigtable.clusterimportDEFAULT_SERVE_NODES
+fromgcloud.bigtable.tableimportTable
+
+
+_EXISTING_INSTANCE_LOCATION_ID='see-existing-cluster'
+_INSTANCE_NAME_RE=re.compile(r'^projects/(?P<project>[^/]+)/'
+ r'instances/(?P<instance_id>[a-z][-a-z0-9]*)$')
+_OPERATION_NAME_RE=re.compile(r'^operations/projects/([^/]+)/'
+ r'instances/([a-z][-a-z0-9]*)/'
+ r'locations/(?P<location_id>[a-z][-a-z0-9]*)/'
+ r'operations/(?P<operation_id>\d+)$')
+_TYPE_URL_BASE='type.googleapis.com/google.bigtable.'
+_ADMIN_TYPE_URL_BASE=_TYPE_URL_BASE+'admin.v2.'
+_INSTANCE_CREATE_METADATA=_ADMIN_TYPE_URL_BASE+'CreateInstanceMetadata'
+_TYPE_URL_MAP={
+ _INSTANCE_CREATE_METADATA:messages_v2_pb2.CreateInstanceMetadata,
+}
+
+
+def_prepare_create_request(instance):
+ """Creates a protobuf request for a CreateInstance request.
+
+ :type instance: :class:`Instance`
+ :param instance: The instance to be created.
+
+ :rtype: :class:`.messages_v2_pb2.CreateInstanceRequest`
+ :returns: The CreateInstance request object containing the instance info.
+ """
+ parent_name=('projects/'+instance._client.project)
+ message=messages_v2_pb2.CreateInstanceRequest(
+ parent=parent_name,
+ instance_id=instance.instance_id,
+ instance=data_v2_pb2.Instance(
+ display_name=instance.display_name,
+ ),
+ )
+ cluster=message.clusters[instance.instance_id]
+ cluster.name=instance.name+'/clusters/'+instance.instance_id
+ cluster.location=(
+ parent_name+'/locations/'+instance._cluster_location_id)
+ cluster.serve_nodes=instance._cluster_serve_nodes
+ returnmessage
+
+
+def_parse_pb_any_to_native(any_val,expected_type=None):
+ """Convert a serialized "google.protobuf.Any" value to actual type.
+
+ :type any_val: :class:`google.protobuf.any_pb2.Any`
+ :param any_val: A serialized protobuf value container.
+
+ :type expected_type: str
+ :param expected_type: (Optional) The type URL we expect ``any_val``
+ to have.
+
+ :rtype: object
+ :returns: The de-serialized object.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the
+ ``expected_type`` does not match the ``type_url`` on the input.
+ """
+ ifexpected_typeisnotNoneandexpected_type!=any_val.type_url:
+ raiseValueError('Expected type: %s, Received: %s'%(
+ expected_type,any_val.type_url))
+ container_class=_TYPE_URL_MAP[any_val.type_url]
+ returncontainer_class.FromString(any_val.value)
+
+
+def_process_operation(operation_pb):
+ """Processes a create protobuf response.
+
+ :type operation_pb: :class:`google.longrunning.operations_pb2.Operation`
+ :param operation_pb: The long-running operation response from a
+ Create/Update/Undelete instance request.
+
+ :rtype: (int, str, datetime)
+ :returns: (operation_id, location_id, operation_begin).
+ :raises: :class:`ValueError <exceptions.ValueError>` if the operation name
+ doesn't match the :data:`_OPERATION_NAME_RE` regex.
+ """
+ match=_OPERATION_NAME_RE.match(operation_pb.name)
+ ifmatchisNone:
+ raiseValueError('Operation name was not in the expected '
+ 'format after instance creation.',
+ operation_pb.name)
+ location_id=match.group('location_id')
+ operation_id=int(match.group('operation_id'))
+
+ request_metadata=_parse_pb_any_to_native(operation_pb.metadata)
+ operation_begin=_pb_timestamp_to_datetime(
+ request_metadata.request_time)
+
+ returnoperation_id,location_id,operation_begin
+
+
+
[docs]classOperation(object):
+ """Representation of a Google API Long-Running Operation.
+
+ In particular, these will be the result of operations on
+ instances using the Cloud Bigtable API.
+
+ :type op_type: str
+ :param op_type: The type of operation being performed. Expect
+ ``create``, ``update`` or ``undelete``.
+
+ :type op_id: int
+ :param op_id: The ID of the operation.
+
+ :type begin: :class:`datetime.datetime`
+ :param begin: The time when the operation was started.
+
+ :type location_id: str
+ :param location_id: ID of the location in which the operation is running
+
+ :type instance: :class:`Instance`
+ :param instance: The instance that created the operation.
+ """
+
+ def__init__(self,op_type,op_id,begin,location_id,instance=None):
+ self.op_type=op_type
+ self.op_id=op_id
+ self.begin=begin
+ self.location_id=location_id
+ self._instance=instance
+ self._complete=False
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ return(other.op_type==self.op_typeand
+ other.op_id==self.op_idand
+ other.begin==self.beginand
+ other.location_id==self.location_idand
+ other._instance==self._instanceand
+ other._complete==self._complete)
+
+ def__ne__(self,other):
+ returnnotself.__eq__(other)
+
+
[docs]deffinished(self):
+ """Check if the operation has finished.
+
+ :rtype: bool
+ :returns: A boolean indicating if the current operation has completed.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the operation
+ has already completed.
+ """
+ ifself._complete:
+ raiseValueError('The operation has completed.')
+
+ operation_name=(
+ 'operations/%s/locations/%s/operations/%d'%
+ (self._instance.name,self.location_id,self.op_id))
+ request_pb=operations_pb2.GetOperationRequest(name=operation_name)
+ # We expect a `google.longrunning.operations_pb2.Operation`.
+ operation_pb=self._instance._client._operations_stub.GetOperation(
+ request_pb,self._instance._client.timeout_seconds)
+
+ ifoperation_pb.done:
+ self._complete=True
+ returnTrue
+ else:
+ returnFalse
+
+
+
[docs]classInstance(object):
+ """Representation of a Google Cloud Bigtable Instance.
+
+ We can use a :class:`Instance` to:
+
+ * :meth:`reload` itself
+ * :meth:`create` itself
+ * :meth:`update` itself
+ * :meth:`delete` itself
+ * :meth:`undelete` itself
+
+ .. note::
+
+ For now, we leave out the ``default_storage_type`` (an enum)
+ which if not sent will end up as :data:`.data_v2_pb2.STORAGE_SSD`.
+
+ :type instance_id: str
+ :param instance_id: The ID of the instance.
+
+ :type client: :class:`Client <gcloud.bigtable.client.Client>`
+ :param client: The client that owns the instance. Provides
+ authorization and a project ID.
+
+ :type location_id: str
+ :param location_id: ID of the location in which the instance will be
+ created. Required for instances which do not yet
+ exist.
+
+ :type display_name: str
+ :param display_name: (Optional) The display name for the instance in the
+ Cloud Console UI. (Must be between 4 and 30
+ characters.) If this value is not set in the
+ constructor, will fall back to the instance ID.
+
+ :type serve_nodes: int
+ :param serve_nodes: (Optional) The number of nodes in the instance's
+ cluster; used to set up the instance's cluster.
+ """
+
+ def__init__(self,instance_id,client,
+ location_id=_EXISTING_INSTANCE_LOCATION_ID,
+ display_name=None,
+ serve_nodes=DEFAULT_SERVE_NODES):
+ self.instance_id=instance_id
+ self.display_name=display_nameorinstance_id
+ self._cluster_location_id=location_id
+ self._cluster_serve_nodes=serve_nodes
+ self._client=client
+
+ def_update_from_pb(self,instance_pb):
+ """Refresh self from the server-provided protobuf.
+
+ Helper for :meth:`from_pb` and :meth:`reload`.
+ """
+ ifnotinstance_pb.display_name:# Simple field (string)
+ raiseValueError('Instance protobuf does not contain display_name')
+ self.display_name=instance_pb.display_name
+
+ @classmethod
+
[docs]deffrom_pb(cls,instance_pb,client):
+ """Creates a instance instance from a protobuf.
+
+ :type instance_pb: :class:`instance_pb2.Instance`
+ :param instance_pb: A instance protobuf object.
+
+ :type client: :class:`Client <gcloud.bigtable.client.Client>`
+ :param client: The client that owns the instance.
+
+ :rtype: :class:`Instance`
+ :returns: The instance parsed from the protobuf response.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the instance
+ name does not match
+ ``projects/{project}/instances/{instance_id}``
+ or if the parsed project ID does not match the project ID
+ on the client.
+ """
+ match=_INSTANCE_NAME_RE.match(instance_pb.name)
+ ifmatchisNone:
+ raiseValueError('Instance protobuf name was not in the '
+ 'expected format.',instance_pb.name)
+ ifmatch.group('project')!=client.project:
+ raiseValueError('Project ID on instance does not match the '
+ 'project ID on the client')
+ instance_id=match.group('instance_id')
+
+ result=cls(instance_id,client,_EXISTING_INSTANCE_LOCATION_ID)
+ result._update_from_pb(instance_pb)
+ returnresult
+
+
[docs]defcopy(self):
+ """Make a copy of this instance.
+
+ Copies the local data stored as simple types and copies the client
+ attached to this instance.
+
+ :rtype: :class:`.Instance`
+ :returns: A copy of the current instance.
+ """
+ new_client=self._client.copy()
+ returnself.__class__(self.instance_id,new_client,
+ self._cluster_location_id,
+ display_name=self.display_name)
+
+ @property
+ defname(self):
+ """Instance name used in requests.
+
+ .. note::
+ This property will not change if ``instance_id`` does not,
+ but the return value is not cached.
+
+ The instance name is of the form
+
+ ``"projects/{project}/instances/{instance_id}"``
+
+ :rtype: str
+ :returns: The instance name.
+ """
+ returnself._client.project_name+'/instances/'+self.instance_id
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ # NOTE: This does not compare the configuration values, such as
+ # the display_name. Instead, it only compares
+ # identifying values instance ID and client. This is
+ # intentional, since the same instance can be in different states
+ # if not synchronized. Instances with similar instance
+ # settings but different clients can't be used in the same way.
+ return(other.instance_id==self.instance_idand
+ other._client==self._client)
+
+ def__ne__(self,other):
+ returnnotself.__eq__(other)
+
+
[docs]defreload(self):
+ """Reload the metadata for this instance."""
+ request_pb=messages_v2_pb2.GetInstanceRequest(name=self.name)
+ # We expect `data_v2_pb2.Instance`.
+ instance_pb=self._client._instance_stub.GetInstance(
+ request_pb,self._client.timeout_seconds)
+
+ # NOTE: _update_from_pb does not check that the project and
+ # instance ID on the response match the request.
+ self._update_from_pb(instance_pb)
+
+
[docs]defcreate(self):
+ """Create this instance.
+
+ .. note::
+
+ Uses the ``project`` and ``instance_id`` on the current
+ :class:`Instance` in addition to the ``display_name``.
+ To change them before creating, reset the values via
+
+ .. code:: python
+
+ instance.display_name = 'New display name'
+ instance.instance_id = 'i-changed-my-mind'
+
+ before calling :meth:`create`.
+
+ :rtype: :class:`Operation`
+ :returns: The long-running operation corresponding to the
+ create operation.
+ """
+ request_pb=_prepare_create_request(self)
+ # We expect a `google.longrunning.operations_pb2.Operation`.
+ operation_pb=self._client._instance_stub.CreateInstance(
+ request_pb,self._client.timeout_seconds)
+
+ op_id,loc_id,op_begin=_process_operation(operation_pb)
+ returnOperation('create',op_id,op_begin,loc_id,instance=self)
+
+
[docs]defupdate(self):
+ """Update this instance.
+
+ .. note::
+
+ Updates the ``display_name``. To change that value before
+ updating, reset its values via
+
+ .. code:: python
+
+ instance.display_name = 'New display name'
+
+ before calling :meth:`update`.
+ """
+ request_pb=data_v2_pb2.Instance(
+ name=self.name,
+ display_name=self.display_name,
+ )
+ # Ignore the expected `data_v2_pb2.Instance`.
+ self._client._instance_stub.UpdateInstance(
+ request_pb,self._client.timeout_seconds)
+
+
[docs]defdelete(self):
+ """Delete this instance.
+
+ Marks a instance and all of its tables for permanent deletion
+ in 7 days.
+
+ Immediately upon completion of the request:
+
+ * Billing will cease for all of the instance's reserved resources.
+ * The instance's ``delete_time`` field will be set 7 days in
+ the future.
+
+ Soon afterward:
+
+ * All tables within the instance will become unavailable.
+
+ Prior to the instance's ``delete_time``:
+
+ * The instance can be recovered with a call to ``UndeleteInstance``.
+ * All other attempts to modify or delete the instance will be rejected.
+
+ At the instance's ``delete_time``:
+
+ * The instance and **all of its tables** will immediately and
+ irrevocably disappear from the API, and their data will be
+ permanently deleted.
+ """
+ request_pb=messages_v2_pb2.DeleteInstanceRequest(name=self.name)
+ # We expect a `google.protobuf.empty_pb2.Empty`
+ self._client._instance_stub.DeleteInstance(
+ request_pb,self._client.timeout_seconds)
+
+
[docs]defcluster(self,cluster_id,serve_nodes=3):
+ """Factory to create a cluster associated with this client.
+
+ :type cluster_id: str
+ :param cluster_id: The ID of the cluster.
+
+ :type serve_nodes: int
+ :param serve_nodes: (Optional) The number of nodes in the cluster.
+ Defaults to 3.
+
+ :rtype: :class:`.Cluster`
+ :returns: The cluster owned by this client.
+ """
+ returnCluster(cluster_id,self,serve_nodes=serve_nodes)
+
+
[docs]deflist_clusters(self):
+ """Lists clusters in this instance.
+
+ :rtype: tuple
+ :returns: A pair of results, the first is a list of :class:`.Cluster` s
+ returned and the second is a list of strings (the failed
+ locations in the request).
+ """
+ request_pb=messages_v2_pb2.ListClustersRequest(parent=self.name)
+ # We expect a `.cluster_messages_v1_pb2.ListClustersResponse`
+ list_clusters_response=self._client._instance_stub.ListClusters(
+ request_pb,self._client.timeout_seconds)
+
+ failed_locations=[
+ locationforlocationinlist_clusters_response.failed_locations]
+ clusters=[Cluster.from_pb(cluster_pb,self)
+ forcluster_pbinlist_clusters_response.clusters]
+ returnclusters,failed_locations
+
+
[docs]deftable(self,table_id):
+ """Factory to create a table associated with this instance.
+
+ :type table_id: str
+ :param table_id: The ID of the table.
+
+ :rtype: :class:`Table <gcloud.bigtable.table.Table>`
+ :returns: The table owned by this instance.
+ """
+ returnTable(table_id,self)
+
+
[docs]deflist_tables(self):
+ """List the tables in this instance.
+
+ :rtype: list of :class:`Table <gcloud.bigtable.table.Table>`
+ :returns: The list of tables owned by the instance.
+ :raises: :class:`ValueError <exceptions.ValueError>` if one of the
+ returned tables has a name that is not of the expected format.
+ """
+ request_pb=table_messages_v2_pb2.ListTablesRequest(parent=self.name)
+ # We expect a `table_messages_v2_pb2.ListTablesResponse`
+ table_list_pb=self._client._table_stub.ListTables(
+ request_pb,self._client.timeout_seconds)
+
+ result=[]
+ fortable_pbintable_list_pb.tables:
+ table_prefix=self.name+'/tables/'
+ ifnottable_pb.name.startswith(table_prefix):
+ raiseValueError('Table name %s not of expected format'%(
+ table_pb.name,))
+ table_id=table_pb.name[len(table_prefix):]
+ result.append(self.table(table_id))
+
+ returnresult
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""User friendly container for Google Cloud Bigtable Row."""
+
+
+importstruct
+
+importsix
+
+fromgcloud._helpersimport_datetime_from_microseconds
+fromgcloud._helpersimport_microseconds_from_datetime
+fromgcloud._helpersimport_to_bytes
+fromgcloud.bigtable._generatedimport(
+ data_pb2asdata_v2_pb2)
+fromgcloud.bigtable._generatedimport(
+ bigtable_pb2asmessages_v2_pb2)
+
+
+_PACK_I64=struct.Struct('>q').pack
+
+MAX_MUTATIONS=100000
+"""The maximum number of mutations that a row can accumulate."""
+
+
+
[docs]classRow(object):
+ """Base representation of a Google Cloud Bigtable Row.
+
+ This class has three subclasses corresponding to the three
+ RPC methods for sending row mutations:
+
+ * :class:`DirectRow` for ``MutateRow``
+ * :class:`ConditionalRow` for ``CheckAndMutateRow``
+ * :class:`AppendRow` for ``ReadModifyWriteRow``
+
+ :type row_key: bytes
+ :param row_key: The key for the current row.
+
+ :type table: :class:`Table <gcloud.bigtable.table.Table>`
+ :param table: The table that owns the row.
+ """
+
+ def__init__(self,row_key,table):
+ self._row_key=_to_bytes(row_key)
+ self._table=table
+
+
+class_SetDeleteRow(Row):
+ """Row helper for setting or deleting cell values.
+
+ Implements helper methods to add mutations to set or delete cell contents:
+
+ * :meth:`set_cell`
+ * :meth:`delete`
+ * :meth:`delete_cell`
+ * :meth:`delete_cells`
+
+ :type row_key: bytes
+ :param row_key: The key for the current row.
+
+ :type table: :class:`Table <gcloud.bigtable.table.Table>`
+ :param table: The table that owns the row.
+ """
+
+ ALL_COLUMNS=object()
+ """Sentinel value used to indicate all columns in a column family."""
+
+ def_get_mutations(self,state):
+ """Gets the list of mutations for a given state.
+
+ This method intended to be implemented by subclasses.
+
+ ``state`` may not need to be used by all subclasses.
+
+ :type state: bool
+ :param state: The state that the mutation should be
+ applied in.
+
+ :raises: :class:`NotImplementedError <exceptions.NotImplementedError>`
+ always.
+ """
+ raiseNotImplementedError
+
+ def_set_cell(self,column_family_id,column,value,timestamp=None,
+ state=None):
+ """Helper for :meth:`set_cell`
+
+ Adds a mutation to set the value in a specific cell.
+
+ ``state`` is unused by :class:`DirectRow` but is used by
+ subclasses.
+
+ :type column_family_id: str
+ :param column_family_id: The column family that contains the column.
+ Must be of the form
+ ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type column: bytes
+ :param column: The column within the column family where the cell
+ is located.
+
+ :type value: bytes or :class:`int`
+ :param value: The value to set in the cell. If an integer is used,
+ will be interpreted as a 64-bit big-endian signed
+ integer (8 bytes).
+
+ :type timestamp: :class:`datetime.datetime`
+ :param timestamp: (Optional) The timestamp of the operation.
+
+ :type state: bool
+ :param state: (Optional) The state that is passed along to
+ :meth:`_get_mutations`.
+ """
+ column=_to_bytes(column)
+ ifisinstance(value,six.integer_types):
+ value=_PACK_I64(value)
+ value=_to_bytes(value)
+ iftimestampisNone:
+ # Use -1 for current Bigtable server time.
+ timestamp_micros=-1
+ else:
+ timestamp_micros=_microseconds_from_datetime(timestamp)
+ # Truncate to millisecond granularity.
+ timestamp_micros-=(timestamp_micros%1000)
+
+ mutation_val=data_v2_pb2.Mutation.SetCell(
+ family_name=column_family_id,
+ column_qualifier=column,
+ timestamp_micros=timestamp_micros,
+ value=value,
+ )
+ mutation_pb=data_v2_pb2.Mutation(set_cell=mutation_val)
+ self._get_mutations(state).append(mutation_pb)
+
+ def_delete(self,state=None):
+ """Helper for :meth:`delete`
+
+ Adds a delete mutation (for the entire row) to the accumulated
+ mutations.
+
+ ``state`` is unused by :class:`DirectRow` but is used by
+ subclasses.
+
+ :type state: bool
+ :param state: (Optional) The state that is passed along to
+ :meth:`_get_mutations`.
+ """
+ mutation_val=data_v2_pb2.Mutation.DeleteFromRow()
+ mutation_pb=data_v2_pb2.Mutation(delete_from_row=mutation_val)
+ self._get_mutations(state).append(mutation_pb)
+
+ def_delete_cells(self,column_family_id,columns,time_range=None,
+ state=None):
+ """Helper for :meth:`delete_cell` and :meth:`delete_cells`.
+
+ ``state`` is unused by :class:`DirectRow` but is used by
+ subclasses.
+
+ :type column_family_id: str
+ :param column_family_id: The column family that contains the column
+ or columns with cells being deleted. Must be
+ of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type columns: :class:`list` of :class:`str` /
+ :func:`unicode <unicode>`, or :class:`object`
+ :param columns: The columns within the column family that will have
+ cells deleted. If :attr:`ALL_COLUMNS` is used then
+ the entire column family will be deleted from the row.
+
+ :type time_range: :class:`TimestampRange`
+ :param time_range: (Optional) The range of time within which cells
+ should be deleted.
+
+ :type state: bool
+ :param state: (Optional) The state that is passed along to
+ :meth:`_get_mutations`.
+ """
+ mutations_list=self._get_mutations(state)
+ ifcolumnsisself.ALL_COLUMNS:
+ mutation_val=data_v2_pb2.Mutation.DeleteFromFamily(
+ family_name=column_family_id,
+ )
+ mutation_pb=data_v2_pb2.Mutation(delete_from_family=mutation_val)
+ mutations_list.append(mutation_pb)
+ else:
+ delete_kwargs={}
+ iftime_rangeisnotNone:
+ delete_kwargs['time_range']=time_range.to_pb()
+
+ to_append=[]
+ forcolumnincolumns:
+ column=_to_bytes(column)
+ # time_range will never change if present, but the rest of
+ # delete_kwargs will
+ delete_kwargs.update(
+ family_name=column_family_id,
+ column_qualifier=column,
+ )
+ mutation_val=data_v2_pb2.Mutation.DeleteFromColumn(
+ **delete_kwargs)
+ mutation_pb=data_v2_pb2.Mutation(
+ delete_from_column=mutation_val)
+ to_append.append(mutation_pb)
+
+ # We don't add the mutations until all columns have been
+ # processed without error.
+ mutations_list.extend(to_append)
+
+
+
[docs]classDirectRow(_SetDeleteRow):
+ """Google Cloud Bigtable Row for sending "direct" mutations.
+
+ These mutations directly set or delete cell contents:
+
+ * :meth:`set_cell`
+ * :meth:`delete`
+ * :meth:`delete_cell`
+ * :meth:`delete_cells`
+
+ These methods can be used directly::
+
+ >>> row = table.row(b'row-key1')
+ >>> row.set_cell(u'fam', b'col1', b'cell-val')
+ >>> row.delete_cell(u'fam', b'col2')
+
+ .. note::
+
+ A :class:`DirectRow` accumulates mutations locally via the
+ :meth:`set_cell`, :meth:`delete`, :meth:`delete_cell` and
+ :meth:`delete_cells` methods. To actually send these mutations to the
+ Google Cloud Bigtable API, you must call :meth:`commit`.
+
+ :type row_key: bytes
+ :param row_key: The key for the current row.
+
+ :type table: :class:`Table <gcloud.bigtable.table.Table>`
+ :param table: The table that owns the row.
+ """
+
+ def__init__(self,row_key,table):
+ super(DirectRow,self).__init__(row_key,table)
+ self._pb_mutations=[]
+
+ def_get_mutations(self,state):# pylint: disable=unused-argument
+ """Gets the list of mutations for a given state.
+
+ ``state`` is unused by :class:`DirectRow` but is used by
+ subclasses.
+
+ :type state: bool
+ :param state: The state that the mutation should be
+ applied in.
+
+ :rtype: list
+ :returns: The list to add new mutations to (for the current state).
+ """
+ returnself._pb_mutations
+
+
[docs]defset_cell(self,column_family_id,column,value,timestamp=None):
+ """Sets a value in this row.
+
+ The cell is determined by the ``row_key`` of this :class:`DirectRow`
+ and the ``column``. The ``column`` must be in an existing
+ :class:`.ColumnFamily` (as determined by ``column_family_id``).
+
+ .. note::
+
+ This method adds a mutation to the accumulated mutations on this
+ row, but does not make an API request. To actually
+ send an API request (with the mutations) to the Google Cloud
+ Bigtable API, call :meth:`commit`.
+
+ :type column_family_id: str
+ :param column_family_id: The column family that contains the column.
+ Must be of the form
+ ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type column: bytes
+ :param column: The column within the column family where the cell
+ is located.
+
+ :type value: bytes or :class:`int`
+ :param value: The value to set in the cell. If an integer is used,
+ will be interpreted as a 64-bit big-endian signed
+ integer (8 bytes).
+
+ :type timestamp: :class:`datetime.datetime`
+ :param timestamp: (Optional) The timestamp of the operation.
+ """
+ self._set_cell(column_family_id,column,value,timestamp=timestamp,
+ state=None)
+
+
[docs]defdelete(self):
+ """Deletes this row from the table.
+
+ .. note::
+
+ This method adds a mutation to the accumulated mutations on this
+ row, but does not make an API request. To actually
+ send an API request (with the mutations) to the Google Cloud
+ Bigtable API, call :meth:`commit`.
+ """
+ self._delete(state=None)
+
+
[docs]defdelete_cell(self,column_family_id,column,time_range=None):
+ """Deletes cell in this row.
+
+ .. note::
+
+ This method adds a mutation to the accumulated mutations on this
+ row, but does not make an API request. To actually
+ send an API request (with the mutations) to the Google Cloud
+ Bigtable API, call :meth:`commit`.
+
+ :type column_family_id: str
+ :param column_family_id: The column family that contains the column
+ or columns with cells being deleted. Must be
+ of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type column: bytes
+ :param column: The column within the column family that will have a
+ cell deleted.
+
+ :type time_range: :class:`TimestampRange`
+ :param time_range: (Optional) The range of time within which cells
+ should be deleted.
+ """
+ self._delete_cells(column_family_id,[column],time_range=time_range,
+ state=None)
+
+
[docs]defdelete_cells(self,column_family_id,columns,time_range=None):
+ """Deletes cells in this row.
+
+ .. note::
+
+ This method adds a mutation to the accumulated mutations on this
+ row, but does not make an API request. To actually
+ send an API request (with the mutations) to the Google Cloud
+ Bigtable API, call :meth:`commit`.
+
+ :type column_family_id: str
+ :param column_family_id: The column family that contains the column
+ or columns with cells being deleted. Must be
+ of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type columns: :class:`list` of :class:`str` /
+ :func:`unicode <unicode>`, or :class:`object`
+ :param columns: The columns within the column family that will have
+ cells deleted. If :attr:`ALL_COLUMNS` is used then
+ the entire column family will be deleted from the row.
+
+ :type time_range: :class:`TimestampRange`
+ :param time_range: (Optional) The range of time within which cells
+ should be deleted.
+ """
+ self._delete_cells(column_family_id,columns,time_range=time_range,
+ state=None)
+
+
[docs]defcommit(self):
+ """Makes a ``MutateRow`` API request.
+
+ If no mutations have been created in the row, no request is made.
+
+ Mutations are applied atomically and in order, meaning that earlier
+ mutations can be masked / negated by later ones. Cells already present
+ in the row are left unchanged unless explicitly changed by a mutation.
+
+ After committing the accumulated mutations, resets the local
+ mutations to an empty list.
+
+ :raises: :class:`ValueError <exceptions.ValueError>` if the number of
+ mutations exceeds the :data:`MAX_MUTATIONS`.
+ """
+ mutations_list=self._get_mutations(None)
+ num_mutations=len(mutations_list)
+ ifnum_mutations==0:
+ return
+ ifnum_mutations>MAX_MUTATIONS:
+ raiseValueError('%d total mutations exceed the maximum allowable '
+ '%d.'%(num_mutations,MAX_MUTATIONS))
+ request_pb=messages_v2_pb2.MutateRowRequest(
+ table_name=self._table.name,
+ row_key=self._row_key,
+ mutations=mutations_list,
+ )
+ # We expect a `google.protobuf.empty_pb2.Empty`
+ client=self._table._instance._client
+ client._data_stub.MutateRow(request_pb,client.timeout_seconds)
+ self.clear()
+
+
[docs]defclear(self):
+ """Removes all currently accumulated mutations on the current row."""
+ delself._pb_mutations[:]
+
+
+
[docs]classConditionalRow(_SetDeleteRow):
+ """Google Cloud Bigtable Row for sending mutations conditionally.
+
+ Each mutation has an associated state: :data:`True` or :data:`False`.
+ When :meth:`commit`-ed, the mutations for the :data:`True`
+ state will be applied if the filter matches any cells in
+ the row, otherwise the :data:`False` state will be applied.
+
+ A :class:`ConditionalRow` accumulates mutations in the same way a
+ :class:`DirectRow` does:
+
+ * :meth:`set_cell`
+ * :meth:`delete`
+ * :meth:`delete_cell`
+ * :meth:`delete_cells`
+
+ with the only change the extra ``state`` parameter::
+
+ >>> row_cond = table.row(b'row-key2', filter_=row_filter)
+ >>> row_cond.set_cell(u'fam', b'col', b'cell-val', state=True)
+ >>> row_cond.delete_cell(u'fam', b'col', state=False)
+
+ .. note::
+
+ As with :class:`DirectRow`, to actually send these mutations to the
+ Google Cloud Bigtable API, you must call :meth:`commit`.
+
+ :type row_key: bytes
+ :param row_key: The key for the current row.
+
+ :type table: :class:`Table <gcloud.bigtable.table.Table>`
+ :param table: The table that owns the row.
+
+ :type filter_: :class:`.RowFilter`
+ :param filter_: Filter to be used for conditional mutations.
+ """
+ def__init__(self,row_key,table,filter_):
+ super(ConditionalRow,self).__init__(row_key,table)
+ self._filter=filter_
+ self._true_pb_mutations=[]
+ self._false_pb_mutations=[]
+
+ def_get_mutations(self,state):
+ """Gets the list of mutations for a given state.
+
+ Over-ridden so that the state can be used in:
+
+ * :meth:`set_cell`
+ * :meth:`delete`
+ * :meth:`delete_cell`
+ * :meth:`delete_cells`
+
+ :type state: bool
+ :param state: The state that the mutation should be
+ applied in.
+
+ :rtype: list
+ :returns: The list to add new mutations to (for the current state).
+ """
+ ifstate:
+ returnself._true_pb_mutations
+ else:
+ returnself._false_pb_mutations
+
+
[docs]defcommit(self):
+ """Makes a ``CheckAndMutateRow`` API request.
+
+ If no mutations have been created in the row, no request is made.
+
+ The mutations will be applied conditionally, based on whether the
+ filter matches any cells in the :class:`ConditionalRow` or not. (Each
+ method which adds a mutation has a ``state`` parameter for this
+ purpose.)
+
+ Mutations are applied atomically and in order, meaning that earlier
+ mutations can be masked / negated by later ones. Cells already present
+ in the row are left unchanged unless explicitly changed by a mutation.
+
+ After committing the accumulated mutations, resets the local
+ mutations.
+
+ :rtype: bool
+ :returns: Flag indicating if the filter was matched (which also
+ indicates which set of mutations were applied by the server).
+ :raises: :class:`ValueError <exceptions.ValueError>` if the number of
+ mutations exceeds the :data:`MAX_MUTATIONS`.
+ """
+ true_mutations=self._get_mutations(state=True)
+ false_mutations=self._get_mutations(state=False)
+ num_true_mutations=len(true_mutations)
+ num_false_mutations=len(false_mutations)
+ ifnum_true_mutations==0andnum_false_mutations==0:
+ return
+ if(num_true_mutations>MAX_MUTATIONSor
+ num_false_mutations>MAX_MUTATIONS):
+ raiseValueError(
+ 'Exceed the maximum allowable mutations (%d). Had %s true '
+ 'mutations and %d false mutations.'%(
+ MAX_MUTATIONS,num_true_mutations,num_false_mutations))
+
+ request_pb=messages_v2_pb2.CheckAndMutateRowRequest(
+ table_name=self._table.name,
+ row_key=self._row_key,
+ predicate_filter=self._filter.to_pb(),
+ true_mutations=true_mutations,
+ false_mutations=false_mutations,
+ )
+ # We expect a `.messages_v2_pb2.CheckAndMutateRowResponse`
+ client=self._table._instance._client
+ resp=client._data_stub.CheckAndMutateRow(
+ request_pb,client.timeout_seconds)
+ self.clear()
+ returnresp.predicate_matched
+
+ # pylint: disable=arguments-differ
+
[docs]defset_cell(self,column_family_id,column,value,timestamp=None,
+ state=True):
+ """Sets a value in this row.
+
+ The cell is determined by the ``row_key`` of this
+ :class:`ConditionalRow` and the ``column``. The ``column`` must be in
+ an existing :class:`.ColumnFamily` (as determined by
+ ``column_family_id``).
+
+ .. note::
+
+ This method adds a mutation to the accumulated mutations on this
+ row, but does not make an API request. To actually
+ send an API request (with the mutations) to the Google Cloud
+ Bigtable API, call :meth:`commit`.
+
+ :type column_family_id: str
+ :param column_family_id: The column family that contains the column.
+ Must be of the form
+ ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type column: bytes
+ :param column: The column within the column family where the cell
+ is located.
+
+ :type value: bytes or :class:`int`
+ :param value: The value to set in the cell. If an integer is used,
+ will be interpreted as a 64-bit big-endian signed
+ integer (8 bytes).
+
+ :type timestamp: :class:`datetime.datetime`
+ :param timestamp: (Optional) The timestamp of the operation.
+
+ :type state: bool
+ :param state: (Optional) The state that the mutation should be
+ applied in. Defaults to :data:`True`.
+ """
+ self._set_cell(column_family_id,column,value,timestamp=timestamp,
+ state=state)
+
+
[docs]defdelete(self,state=True):
+ """Deletes this row from the table.
+
+ .. note::
+
+ This method adds a mutation to the accumulated mutations on this
+ row, but does not make an API request. To actually
+ send an API request (with the mutations) to the Google Cloud
+ Bigtable API, call :meth:`commit`.
+
+ :type state: bool
+ :param state: (Optional) The state that the mutation should be
+ applied in. Defaults to :data:`True`.
+ """
+ self._delete(state=state)
+
+
[docs]defdelete_cell(self,column_family_id,column,time_range=None,
+ state=True):
+ """Deletes cell in this row.
+
+ .. note::
+
+ This method adds a mutation to the accumulated mutations on this
+ row, but does not make an API request. To actually
+ send an API request (with the mutations) to the Google Cloud
+ Bigtable API, call :meth:`commit`.
+
+ :type column_family_id: str
+ :param column_family_id: The column family that contains the column
+ or columns with cells being deleted. Must be
+ of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type column: bytes
+ :param column: The column within the column family that will have a
+ cell deleted.
+
+ :type time_range: :class:`TimestampRange`
+ :param time_range: (Optional) The range of time within which cells
+ should be deleted.
+
+ :type state: bool
+ :param state: (Optional) The state that the mutation should be
+ applied in. Defaults to :data:`True`.
+ """
+ self._delete_cells(column_family_id,[column],time_range=time_range,
+ state=state)
+
+
[docs]defdelete_cells(self,column_family_id,columns,time_range=None,
+ state=True):
+ """Deletes cells in this row.
+
+ .. note::
+
+ This method adds a mutation to the accumulated mutations on this
+ row, but does not make an API request. To actually
+ send an API request (with the mutations) to the Google Cloud
+ Bigtable API, call :meth:`commit`.
+
+ :type column_family_id: str
+ :param column_family_id: The column family that contains the column
+ or columns with cells being deleted. Must be
+ of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type columns: :class:`list` of :class:`str` /
+ :func:`unicode <unicode>`, or :class:`object`
+ :param columns: The columns within the column family that will have
+ cells deleted. If :attr:`ALL_COLUMNS` is used then the
+ entire column family will be deleted from the row.
+
+ :type time_range: :class:`TimestampRange`
+ :param time_range: (Optional) The range of time within which cells
+ should be deleted.
+
+ :type state: bool
+ :param state: (Optional) The state that the mutation should be
+ applied in. Defaults to :data:`True`.
+ """
+ self._delete_cells(column_family_id,columns,time_range=time_range,
+ state=state)
+ # pylint: enable=arguments-differ
+
+
[docs]defclear(self):
+ """Removes all currently accumulated mutations on the current row."""
+ delself._true_pb_mutations[:]
+ delself._false_pb_mutations[:]
+
+
+
[docs]classAppendRow(Row):
+ """Google Cloud Bigtable Row for sending append mutations.
+
+ These mutations are intended to augment the value of an existing cell
+ and uses the methods:
+
+ * :meth:`append_cell_value`
+ * :meth:`increment_cell_value`
+
+ The first works by appending bytes and the second by incrementing an
+ integer (stored in the cell as 8 bytes). In either case, if the
+ cell is empty, assumes the default empty value (empty string for
+ bytes or and 0 for integer).
+
+ :type row_key: bytes
+ :param row_key: The key for the current row.
+
+ :type table: :class:`Table <gcloud.bigtable.table.Table>`
+ :param table: The table that owns the row.
+ """
+
+ def__init__(self,row_key,table):
+ super(AppendRow,self).__init__(row_key,table)
+ self._rule_pb_list=[]
+
+
[docs]defclear(self):
+ """Removes all currently accumulated modifications on current row."""
+ delself._rule_pb_list[:]
+
+
[docs]defappend_cell_value(self,column_family_id,column,value):
+ """Appends a value to an existing cell.
+
+ .. note::
+
+ This method adds a read-modify rule protobuf to the accumulated
+ read-modify rules on this row, but does not make an API
+ request. To actually send an API request (with the rules) to the
+ Google Cloud Bigtable API, call :meth:`commit`.
+
+ :type column_family_id: str
+ :param column_family_id: The column family that contains the column.
+ Must be of the form
+ ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type column: bytes
+ :param column: The column within the column family where the cell
+ is located.
+
+ :type value: bytes
+ :param value: The value to append to the existing value in the cell. If
+ the targeted cell is unset, it will be treated as
+ containing the empty string.
+ """
+ column=_to_bytes(column)
+ value=_to_bytes(value)
+ rule_pb=data_v2_pb2.ReadModifyWriteRule(
+ family_name=column_family_id,
+ column_qualifier=column,
+ append_value=value)
+ self._rule_pb_list.append(rule_pb)
+
+
[docs]defincrement_cell_value(self,column_family_id,column,int_value):
+ """Increments a value in an existing cell.
+
+ Assumes the value in the cell is stored as a 64 bit integer
+ serialized to bytes.
+
+ .. note::
+
+ This method adds a read-modify rule protobuf to the accumulated
+ read-modify rules on this row, but does not make an API
+ request. To actually send an API request (with the rules) to the
+ Google Cloud Bigtable API, call :meth:`commit`.
+
+ :type column_family_id: str
+ :param column_family_id: The column family that contains the column.
+ Must be of the form
+ ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type column: bytes
+ :param column: The column within the column family where the cell
+ is located.
+
+ :type int_value: int
+ :param int_value: The value to increment the existing value in the cell
+ by. If the targeted cell is unset, it will be treated
+ as containing a zero. Otherwise, the targeted cell
+ must contain an 8-byte value (interpreted as a 64-bit
+ big-endian signed integer), or the entire request
+ will fail.
+ """
+ column=_to_bytes(column)
+ rule_pb=data_v2_pb2.ReadModifyWriteRule(
+ family_name=column_family_id,
+ column_qualifier=column,
+ increment_amount=int_value)
+ self._rule_pb_list.append(rule_pb)
+
+
[docs]defcommit(self):
+ """Makes a ``ReadModifyWriteRow`` API request.
+
+ This commits modifications made by :meth:`append_cell_value` and
+ :meth:`increment_cell_value`. If no modifications were made, makes
+ no API request and just returns ``{}``.
+
+ Modifies a row atomically, reading the latest existing
+ timestamp / value from the specified columns and writing a new value by
+ appending / incrementing. The new cell created uses either the current
+ server time or the highest timestamp of a cell in that column (if it
+ exceeds the server time).
+
+ After committing the accumulated mutations, resets the local mutations.
+
+ .. code:: python
+
+ >>> append_row.commit()
+ {
+ u'col-fam-id': {
+ b'col-name1': [
+ (b'cell-val', datetime.datetime(...)),
+ (b'cell-val-newer', datetime.datetime(...)),
+ ],
+ b'col-name2': [
+ (b'altcol-cell-val', datetime.datetime(...)),
+ ],
+ },
+ u'col-fam-id2': {
+ b'col-name3-but-other-fam': [
+ (b'foo', datetime.datetime(...)),
+ ],
+ },
+ }
+
+ :rtype: dict
+ :returns: The new contents of all modified cells. Returned as a
+ dictionary of column families, each of which holds a
+ dictionary of columns. Each column contains a list of cells
+ modified. Each cell is represented with a two-tuple with the
+ value (in bytes) and the timestamp for the cell.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the number of
+ mutations exceeds the :data:`MAX_MUTATIONS`.
+ """
+ num_mutations=len(self._rule_pb_list)
+ ifnum_mutations==0:
+ return{}
+ ifnum_mutations>MAX_MUTATIONS:
+ raiseValueError('%d total append mutations exceed the maximum '
+ 'allowable %d.'%(num_mutations,MAX_MUTATIONS))
+ request_pb=messages_v2_pb2.ReadModifyWriteRowRequest(
+ table_name=self._table.name,
+ row_key=self._row_key,
+ rules=self._rule_pb_list,
+ )
+ # We expect a `.data_v2_pb2.Row`
+ client=self._table._instance._client
+ row_response=client._data_stub.ReadModifyWriteRow(
+ request_pb,client.timeout_seconds)
+
+ # Reset modifications after commit-ing request.
+ self.clear()
+
+ # NOTE: We expect row_response.key == self._row_key but don't check.
+ return_parse_rmw_row_response(row_response)
+
+
+def_parse_rmw_row_response(row_response):
+ """Parses the response to a ``ReadModifyWriteRow`` request.
+
+ :type row_response: :class:`.data_v2_pb2.Row`
+ :param row_response: The response row (with only modified cells) from a
+ ``ReadModifyWriteRow`` request.
+
+ :rtype: dict
+ :returns: The new contents of all modified cells. Returned as a
+ dictionary of column families, each of which holds a
+ dictionary of columns. Each column contains a list of cells
+ modified. Each cell is represented with a two-tuple with the
+ value (in bytes) and the timestamp for the cell. For example:
+
+ .. code:: python
+
+ {
+ u'col-fam-id': {
+ b'col-name1': [
+ (b'cell-val', datetime.datetime(...)),
+ (b'cell-val-newer', datetime.datetime(...)),
+ ],
+ b'col-name2': [
+ (b'altcol-cell-val', datetime.datetime(...)),
+ ],
+ },
+ u'col-fam-id2': {
+ b'col-name3-but-other-fam': [
+ (b'foo', datetime.datetime(...)),
+ ],
+ },
+ }
+ """
+ result={}
+ forcolumn_familyinrow_response.row.families:
+ column_family_id,curr_family=_parse_family_pb(column_family)
+ result[column_family_id]=curr_family
+ returnresult
+
+
+def_parse_family_pb(family_pb):
+ """Parses a Family protobuf into a dictionary.
+
+ :type family_pb: :class:`._generated.data_pb2.Family`
+ :param family_pb: A protobuf
+
+ :rtype: tuple
+ :returns: A string and dictionary. The string is the name of the
+ column family and the dictionary has column names (within the
+ family) as keys and cell lists as values. Each cell is
+ represented with a two-tuple with the value (in bytes) and the
+ timestamp for the cell. For example:
+
+ .. code:: python
+
+ {
+ b'col-name1': [
+ (b'cell-val', datetime.datetime(...)),
+ (b'cell-val-newer', datetime.datetime(...)),
+ ],
+ b'col-name2': [
+ (b'altcol-cell-val', datetime.datetime(...)),
+ ],
+ }
+ """
+ result={}
+ forcolumninfamily_pb.columns:
+ result[column.qualifier]=cells=[]
+ forcellincolumn.cells:
+ val_pair=(
+ cell.value,
+ _datetime_from_microseconds(cell.timestamp_micros),
+ )
+ cells.append(val_pair)
+
+ returnfamily_pb.name,result
+
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Container for Google Cloud Bigtable Cells and Streaming Row Contents."""
+
+
+importcopy
+importsix
+
+fromgcloud._helpersimport_datetime_from_microseconds
+fromgcloud._helpersimport_to_bytes
+
+
+
[docs]classCell(object):
+ """Representation of a Google Cloud Bigtable Cell.
+
+ :type value: bytes
+ :param value: The value stored in the cell.
+
+ :type timestamp: :class:`datetime.datetime`
+ :param timestamp: The timestamp when the cell was stored.
+
+ :type labels: list
+ :param labels: (Optional) List of strings. Labels applied to the cell.
+ """
+
+ def__init__(self,value,timestamp,labels=()):
+ self.value=value
+ self.timestamp=timestamp
+ self.labels=list(labels)
+
+ @classmethod
+
[docs]deffrom_pb(cls,cell_pb):
+ """Create a new cell from a Cell protobuf.
+
+ :type cell_pb: :class:`._generated.data_pb2.Cell`
+ :param cell_pb: The protobuf to convert.
+
+ :rtype: :class:`Cell`
+ :returns: The cell corresponding to the protobuf.
+ """
+ timestamp=_datetime_from_microseconds(cell_pb.timestamp_micros)
+ ifcell_pb.labels:
+ returncls(cell_pb.value,timestamp,labels=cell_pb.labels)
+ else:
+ returncls(cell_pb.value,timestamp)
[docs]classPartialCellData(object):
+ """Representation of partial cell in a Google Cloud Bigtable Table.
+
+ These are expected to be updated directly from a
+ :class:`._generated.bigtable_service_messages_pb2.ReadRowsResponse`
+
+ :type row_key: bytes
+ :param row_key: The key for the row holding the (partial) cell.
+
+ :type family_name: str
+ :param family_name: The family name of the (partial) cell.
+
+ :type qualifier: bytes
+ :param qualifier: The column qualifier of the (partial) cell.
+
+ :type timestamp_micros: int
+ :param timestamp_micros: The timestamp (in microsecods) of the
+ (partial) cell.
+
+ :type labels: list of str
+ :param labels: labels assigned to the (partial) cell
+
+ :type value: bytes
+ :param value: The (accumulated) value of the (partial) cell.
+ """
+ def__init__(self,row_key,family_name,qualifier,timestamp_micros,
+ labels=(),value=b''):
+ self.row_key=row_key
+ self.family_name=family_name
+ self.qualifier=qualifier
+ self.timestamp_micros=timestamp_micros
+ self.labels=labels
+ self.value=value
+
+
[docs]defappend_value(self,value):
+ """Append bytes from a new chunk to value.
+
+ :type value: bytes
+ :param value: bytes to append
+ """
+ self.value+=value
+
+
+
[docs]classPartialRowData(object):
+ """Representation of partial row in a Google Cloud Bigtable Table.
+
+ These are expected to be updated directly from a
+ :class:`._generated.bigtable_service_messages_pb2.ReadRowsResponse`
+
+ :type row_key: bytes
+ :param row_key: The key for the row holding the (partial) data.
+ """
+
+ def__init__(self,row_key):
+ self._row_key=row_key
+ self._cells={}
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ return(other._row_key==self._row_keyand
+ other._cells==self._cells)
+
+ def__ne__(self,other):
+ returnnotself.__eq__(other)
+
+
[docs]defto_dict(self):
+ """Convert the cells to a dictionary.
+
+ This is intended to be used with HappyBase, so the column family and
+ column qualiers are combined (with ``:``).
+
+ :rtype: dict
+ :returns: Dictionary containing all the data in the cells of this row.
+ """
+ result={}
+ forcolumn_family_id,columnsinsix.iteritems(self._cells):
+ forcolumn_qual,cellsinsix.iteritems(columns):
+ key=(_to_bytes(column_family_id)+b':'+
+ _to_bytes(column_qual))
+ result[key]=cells
+ returnresult
+
+ @property
+ defcells(self):
+ """Property returning all the cells accumulated on this partial row.
+
+ :rtype: dict
+ :returns: Dictionary of the :class:`Cell` objects accumulated. This
+ dictionary has two-levels of keys (first for column families
+ and second for column names/qualifiers within a family). For
+ a given column, a list of :class:`Cell` objects is stored.
+ """
+ returncopy.deepcopy(self._cells)
+
+ @property
+ defrow_key(self):
+ """Getter for the current (partial) row's key.
+
+ :rtype: bytes
+ :returns: The current (partial) row's key.
+ """
+ returnself._row_key
+
+
+
[docs]classInvalidReadRowsResponse(RuntimeError):
+ """Exception raised to to invalid response data from back-end."""
+
+
+
[docs]classInvalidChunk(RuntimeError):
+ """Exception raised to to invalid chunk data from back-end."""
+
+
+
[docs]classPartialRowsData(object):
+ """Convenience wrapper for consuming a ``ReadRows`` streaming response.
+
+ :type response_iterator:
+ :class:`grpc.framework.alpha._reexport._CancellableIterator`
+ :param response_iterator: A streaming iterator returned from a
+ ``ReadRows`` request.
+ """
+ START="Start"# No responses yet processed.
+ NEW_ROW="New row"# No cells yet complete for row
+ ROW_IN_PROGRESS="Row in progress"# Some cells complete for row
+ CELL_IN_PROGRESS="Cell in progress"# Incomplete cell for row
+
+ def__init__(self,response_iterator):
+ self._response_iterator=response_iterator
+ # Fully-processed rows, keyed by `row_key`
+ self._rows={}
+ # Counter for responses pulled from iterator
+ self._counter=0
+ # Maybe cached from previous response
+ self._last_scanned_row_key=None
+ # In-progress row, unset until first response, after commit/reset
+ self._row=None
+ # Last complete row, unset until first commit
+ self._previous_row=None
+ # In-progress cell, unset until first response, after completion
+ self._cell=None
+ # Last complete cell, unset until first completion, after new row
+ self._previous_cell=None
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother._response_iterator==self._response_iterator
+
+ def__ne__(self,other):
+ returnnotself.__eq__(other)
+
+ @property
+ defstate(self):
+ """State machine state.
+
+ :rtype: str
+ :returns: name of state corresponding to currrent row / chunk
+ processing.
+ """
+ ifself._last_scanned_row_keyisNone:
+ returnself.START
+ ifself._rowisNone:
+ assertself._cellisNone
+ assertself._previous_cellisNone
+ returnself.NEW_ROW
+ ifself._cellisnotNone:
+ returnself.CELL_IN_PROGRESS
+ ifself._previous_cellisnotNone:
+ returnself.ROW_IN_PROGRESS
+ returnself.NEW_ROW# row added, no chunk yet processed
+
+ @property
+ defrows(self):
+ """Property returning all rows accumulated from the stream.
+
+ :rtype: dict
+ :returns: row_key -> :class:`PartialRowData`.
+ """
+ # NOTE: To avoid duplicating large objects, this is just the
+ # mutable private data.
+ returnself._rows
+
+
[docs]defcancel(self):
+ """Cancels the iterator, closing the stream."""
+ self._response_iterator.cancel()
+
+
[docs]defconsume_next(self):
+ """Consume the next ``ReadRowsResponse`` from the stream.
+
+ Parse the response and its chunks into a new/existing row in
+ :attr:`_rows`
+ """
+ response=six.next(self._response_iterator)
+ self._counter+=1
+
+ ifself._last_scanned_row_keyisNone:# first response
+ ifresponse.last_scanned_row_key:
+ raiseInvalidReadRowsResponse()
+
+ self._last_scanned_row_key=response.last_scanned_row_key
+
+ row=self._row
+ cell=self._cell
+
+ forchunkinresponse.chunks:
+
+ self._validate_chunk(chunk)
+
+ ifchunk.reset_row:
+ row=self._row=None
+ cell=self._cell=self._previous_cell=None
+ continue
+
+ ifrowisNone:
+ row=self._row=PartialRowData(chunk.row_key)
+
+ ifcellisNone:
+ cell=self._cell=PartialCellData(
+ chunk.row_key,
+ chunk.family_name.value,
+ chunk.qualifier.value,
+ chunk.timestamp_micros,
+ chunk.labels,
+ chunk.value)
+ self._copy_from_previous(cell)
+ else:
+ cell.append_value(chunk.value)
+
+ ifchunk.commit_row:
+ self._save_current_row()
+ row=cell=None
+ continue
+
+ ifchunk.value_size==0:
+ self._save_current_cell()
+ cell=None
+
+
[docs]defconsume_all(self,max_loops=None):
+ """Consume the streamed responses until there are no more.
+
+ This simply calls :meth:`consume_next` until there are no
+ more to consume.
+
+ :type max_loops: int
+ :param max_loops: (Optional) Maximum number of times to try to consume
+ an additional ``ReadRowsResponse``. You can use this
+ to avoid long wait times.
+ """
+ curr_loop=0
+ ifmax_loopsisNone:
+ max_loops=float('inf')
+ whilecurr_loop<max_loops:
+ curr_loop+=1
+ try:
+ self.consume_next()
+ exceptStopIteration:
+ break
+
+ @staticmethod
+ def_validate_chunk_status(chunk):
+ """Helper for :meth:`_validate_chunk_row_in_progress`, etc."""
+ # No reseet with other keys
+ ifchunk.reset_row:
+ _raise_if(chunk.row_key)
+ _raise_if(chunk.HasField('family_name'))
+ _raise_if(chunk.HasField('qualifier'))
+ _raise_if(chunk.timestamp_micros)
+ _raise_if(chunk.labels)
+ _raise_if(chunk.value_size)
+ _raise_if(chunk.value)
+ # No commit with value size
+ _raise_if(chunk.commit_rowandchunk.value_size>0)
+ # No negative value_size (inferred as a general constraint).
+ _raise_if(chunk.value_size<0)
+
+ def_validate_chunk_new_row(self,chunk):
+ """Helper for :meth:`_validate_chunk`."""
+ assertself.state==self.NEW_ROW
+ _raise_if(chunk.reset_row)
+ _raise_if(notchunk.row_key)
+ _raise_if(notchunk.family_name)
+ _raise_if(notchunk.qualifier)
+ # This constraint is not enforced in the Go example.
+ _raise_if(chunk.value_size>0andchunk.commit_rowisnotFalse)
+ # This constraint is from the Go example, not the spec.
+ _raise_if(self._previous_rowisnotNoneand
+ chunk.row_key<=self._previous_row.row_key)
+
+ def_same_as_previous(self,chunk):
+ """Helper for :meth:`_validate_chunk_row_in_progress`"""
+ previous=self._previous_cell
+ return(chunk.row_key==previous.row_keyand
+ chunk.family_name==previous.family_nameand
+ chunk.qualifier==previous.qualifierand
+ chunk.labels==previous.labels)
+
+ def_validate_chunk_row_in_progress(self,chunk):
+ """Helper for :meth:`_validate_chunk`"""
+ assertself.state==self.ROW_IN_PROGRESS
+ self._validate_chunk_status(chunk)
+ ifnotchunk.HasField('commit_row')andnotchunk.reset_row:
+ _raise_if(notchunk.timestamp_microsornotchunk.value)
+ _raise_if(chunk.row_keyand
+ chunk.row_key!=self._row.row_key)
+ _raise_if(chunk.HasField('family_name')and
+ notchunk.HasField('qualifier'))
+ previous=self._previous_cell
+ _raise_if(self._same_as_previous(chunk)and
+ chunk.timestamp_micros<=previous.timestamp_micros)
+
+ def_validate_chunk_cell_in_progress(self,chunk):
+ """Helper for :meth:`_validate_chunk`"""
+ assertself.state==self.CELL_IN_PROGRESS
+ self._validate_chunk_status(chunk)
+ self._copy_from_current(chunk)
+
+ def_validate_chunk(self,chunk):
+ """Helper for :meth:`consume_next`."""
+ ifself.state==self.NEW_ROW:
+ self._validate_chunk_new_row(chunk)
+ ifself.state==self.ROW_IN_PROGRESS:
+ self._validate_chunk_row_in_progress(chunk)
+ ifself.state==self.CELL_IN_PROGRESS:
+ self._validate_chunk_cell_in_progress(chunk)
+
+ def_save_current_cell(self):
+ """Helper for :meth:`consume_next`."""
+ row,cell=self._row,self._cell
+ family=row._cells.setdefault(cell.family_name,{})
+ qualified=family.setdefault(cell.qualifier,[])
+ complete=Cell.from_pb(self._cell)
+ qualified.append(complete)
+ self._cell,self._previous_cell=None,cell
+
+ def_copy_from_current(self,chunk):
+ """Helper for :meth:`consume_next`."""
+ current=self._cell
+ ifcurrentisnotNone:
+ ifnotchunk.row_key:
+ chunk.row_key=current.row_key
+ ifnotchunk.HasField('family_name'):
+ chunk.family_name.value=current.family_name
+ ifnotchunk.HasField('qualifier'):
+ chunk.qualifier.value=current.qualifier
+ ifnotchunk.timestamp_micros:
+ chunk.timestamp_micros=current.timestamp_micros
+ ifnotchunk.labels:
+ chunk.labels.extend(current.labels)
+
+ def_copy_from_previous(self,cell):
+ """Helper for :meth:`consume_next`."""
+ previous=self._previous_cell
+ ifpreviousisnotNone:
+ ifnotcell.row_key:
+ cell.row_key=previous.row_key
+ ifnotcell.family_name:
+ cell.family_name=previous.family_name
+ ifnotcell.qualifier:
+ cell.qualifier=previous.qualifier
+
+ def_save_current_row(self):
+ """Helper for :meth:`consume_next`."""
+ ifself._cell:
+ self._save_current_cell()
+ self._rows[self._row.row_key]=self._row
+ self._row,self._previous_row=None,self._row
+ self._previous_cell=None
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Filters for Google Cloud Bigtable Row classes."""
+
+
+fromgcloud._helpersimport_microseconds_from_datetime
+fromgcloud._helpersimport_to_bytes
+fromgcloud.bigtable._generatedimport(
+ data_pb2asdata_v2_pb2)
+
+
+
[docs]classRowFilter(object):
+ """Basic filter to apply to cells in a row.
+
+ These values can be combined via :class:`RowFilterChain`,
+ :class:`RowFilterUnion` and :class:`ConditionalRowFilter`.
+
+ .. note::
+
+ This class is a do-nothing base class for all row filters.
+ """
+
+ def__ne__(self,other):
+ returnnotself.__eq__(other)
+
+
+class_BoolFilter(RowFilter):
+ """Row filter that uses a boolean flag.
+
+ :type flag: bool
+ :param flag: An indicator if a setting is turned on or off.
+ """
+
+ def__init__(self,flag):
+ self.flag=flag
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother.flag==self.flag
+
+
+
[docs]classSinkFilter(_BoolFilter):
+ """Advanced row filter to skip parent filters.
+
+ :type flag: bool
+ :param flag: ADVANCED USE ONLY. Hook for introspection into the row filter.
+ Outputs all cells directly to the output of the read rather
+ than to any parent filter. Cannot be used within the
+ ``predicate_filter``, ``true_filter``, or ``false_filter``
+ of a :class:`ConditionalRowFilter`.
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(sink=self.flag)
+
+
+
[docs]classPassAllFilter(_BoolFilter):
+ """Row filter equivalent to not filtering at all.
+
+ :type flag: bool
+ :param flag: Matches all cells, regardless of input. Functionally
+ equivalent to leaving ``filter`` unset, but included for
+ completeness.
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(pass_all_filter=self.flag)
+
+
+
[docs]classBlockAllFilter(_BoolFilter):
+ """Row filter that doesn't match any cells.
+
+ :type flag: bool
+ :param flag: Does not match any cells, regardless of input. Useful for
+ temporarily disabling just part of a filter.
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(block_all_filter=self.flag)
+
+
+class_RegexFilter(RowFilter):
+ """Row filter that uses a regular expression.
+
+ The ``regex`` must be valid RE2 patterns. See Google's
+ `RE2 reference`_ for the accepted syntax.
+
+ .. _RE2 reference: https://github.com/google/re2/wiki/Syntax
+
+ :type regex: bytes or str
+ :param regex: A regular expression (RE2) for some row filter.
+ """
+
+ def__init__(self,regex):
+ self.regex=_to_bytes(regex)
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother.regex==self.regex
+
+
+
[docs]classRowKeyRegexFilter(_RegexFilter):
+ """Row filter for a row key regular expression.
+
+ The ``regex`` must be valid RE2 patterns. See Google's
+ `RE2 reference`_ for the accepted syntax.
+
+ .. _RE2 reference: https://github.com/google/re2/wiki/Syntax
+
+ .. note::
+
+ Special care need be used with the expression used. Since
+ each of these properties can contain arbitrary bytes, the ``\\C``
+ escape sequence must be used if a true wildcard is desired. The ``.``
+ character will not match the new line character ``\\n``, which may be
+ present in a binary value.
+
+ :type regex: bytes
+ :param regex: A regular expression (RE2) to match cells from rows with row
+ keys that satisfy this regex. For a
+ ``CheckAndMutateRowRequest``, this filter is unnecessary
+ since the row key is already specified.
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(row_key_regex_filter=self.regex)
+
+
+
[docs]classRowSampleFilter(RowFilter):
+ """Matches all cells from a row with probability p.
+
+ :type sample: float
+ :param sample: The probability of matching a cell (must be in the
+ interval ``[0, 1]``).
+ """
+
+ def__init__(self,sample):
+ self.sample=sample
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother.sample==self.sample
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(row_sample_filter=self.sample)
+
+
+
[docs]classFamilyNameRegexFilter(_RegexFilter):
+ """Row filter for a family name regular expression.
+
+ The ``regex`` must be valid RE2 patterns. See Google's
+ `RE2 reference`_ for the accepted syntax.
+
+ .. _RE2 reference: https://github.com/google/re2/wiki/Syntax
+
+ :type regex: str
+ :param regex: A regular expression (RE2) to match cells from columns in a
+ given column family. For technical reasons, the regex must
+ not contain the ``':'`` character, even if it is not being
+ used as a literal.
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(family_name_regex_filter=self.regex)
+
+
+
[docs]classColumnQualifierRegexFilter(_RegexFilter):
+ """Row filter for a column qualifier regular expression.
+
+ The ``regex`` must be valid RE2 patterns. See Google's
+ `RE2 reference`_ for the accepted syntax.
+
+ .. _RE2 reference: https://github.com/google/re2/wiki/Syntax
+
+ .. note::
+
+ Special care need be used with the expression used. Since
+ each of these properties can contain arbitrary bytes, the ``\\C``
+ escape sequence must be used if a true wildcard is desired. The ``.``
+ character will not match the new line character ``\\n``, which may be
+ present in a binary value.
+
+ :type regex: bytes
+ :param regex: A regular expression (RE2) to match cells from column that
+ match this regex (irrespective of column family).
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(column_qualifier_regex_filter=self.regex)
+
+
+
[docs]classTimestampRange(object):
+ """Range of time with inclusive lower and exclusive upper bounds.
+
+ :type start: :class:`datetime.datetime`
+ :param start: (Optional) The (inclusive) lower bound of the timestamp
+ range. If omitted, defaults to Unix epoch.
+
+ :type end: :class:`datetime.datetime`
+ :param end: (Optional) The (exclusive) upper bound of the timestamp
+ range. If omitted, no upper bound is used.
+ """
+
+ def__init__(self,start=None,end=None):
+ self.start=start
+ self.end=end
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ return(other.start==self.startand
+ other.end==self.end)
+
+ def__ne__(self,other):
+ returnnotself.__eq__(other)
+
+
[docs]defto_pb(self):
+ """Converts the :class:`TimestampRange` to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.TimestampRange`
+ :returns: The converted current object.
+ """
+ timestamp_range_kwargs={}
+ ifself.startisnotNone:
+ timestamp_range_kwargs['start_timestamp_micros']=(
+ _microseconds_from_datetime(self.start))
+ ifself.endisnotNone:
+ timestamp_range_kwargs['end_timestamp_micros']=(
+ _microseconds_from_datetime(self.end))
+ returndata_v2_pb2.TimestampRange(**timestamp_range_kwargs)
+
+
+
[docs]classTimestampRangeFilter(RowFilter):
+ """Row filter that limits cells to a range of time.
+
+ :type range_: :class:`TimestampRange`
+ :param range_: Range of time that cells should match against.
+ """
+
+ def__init__(self,range_):
+ self.range_=range_
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother.range_==self.range_
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ First converts the ``range_`` on the current object to a protobuf and
+ then uses it in the ``timestamp_range_filter`` field.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(
+ timestamp_range_filter=self.range_.to_pb())
+
+
+
[docs]classColumnRangeFilter(RowFilter):
+ """A row filter to restrict to a range of columns.
+
+ Both the start and end column can be included or excluded in the range.
+ By default, we include them both, but this can be changed with optional
+ flags.
+
+ :type column_family_id: str
+ :param column_family_id: The column family that contains the columns. Must
+ be of the form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type start_column: bytes
+ :param start_column: The start of the range of columns. If no value is
+ used, the backend applies no upper bound to the
+ values.
+
+ :type end_column: bytes
+ :param end_column: The end of the range of columns. If no value is used,
+ the backend applies no upper bound to the values.
+
+ :type inclusive_start: bool
+ :param inclusive_start: Boolean indicating if the start column should be
+ included in the range (or excluded). Defaults
+ to :data:`True` if ``start_column`` is passed and
+ no ``inclusive_start`` was given.
+
+ :type inclusive_end: bool
+ :param inclusive_end: Boolean indicating if the end column should be
+ included in the range (or excluded). Defaults
+ to :data:`True` if ``end_column`` is passed and
+ no ``inclusive_end`` was given.
+
+ :raises: :class:`ValueError <exceptions.ValueError>` if ``inclusive_start``
+ is set but no ``start_column`` is given or if ``inclusive_end``
+ is set but no ``end_column`` is given
+ """
+
+ def__init__(self,column_family_id,start_column=None,end_column=None,
+ inclusive_start=None,inclusive_end=None):
+ self.column_family_id=column_family_id
+
+ ifinclusive_startisNone:
+ inclusive_start=True
+ elifstart_columnisNone:
+ raiseValueError('Inclusive start was specified but no '
+ 'start column was given.')
+ self.start_column=start_column
+ self.inclusive_start=inclusive_start
+
+ ifinclusive_endisNone:
+ inclusive_end=True
+ elifend_columnisNone:
+ raiseValueError('Inclusive end was specified but no '
+ 'end column was given.')
+ self.end_column=end_column
+ self.inclusive_end=inclusive_end
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ return(other.column_family_id==self.column_family_idand
+ other.start_column==self.start_columnand
+ other.end_column==self.end_columnand
+ other.inclusive_start==self.inclusive_startand
+ other.inclusive_end==self.inclusive_end)
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ First converts to a :class:`.data_v2_pb2.ColumnRange` and then uses it
+ in the ``column_range_filter`` field.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ column_range_kwargs={'family_name':self.column_family_id}
+ ifself.start_columnisnotNone:
+ ifself.inclusive_start:
+ key='start_qualifier_closed'
+ else:
+ key='start_qualifier_open'
+ column_range_kwargs[key]=_to_bytes(self.start_column)
+ ifself.end_columnisnotNone:
+ ifself.inclusive_end:
+ key='end_qualifier_closed'
+ else:
+ key='end_qualifier_open'
+ column_range_kwargs[key]=_to_bytes(self.end_column)
+
+ column_range=data_v2_pb2.ColumnRange(**column_range_kwargs)
+ returndata_v2_pb2.RowFilter(column_range_filter=column_range)
+
+
+
[docs]classValueRegexFilter(_RegexFilter):
+ """Row filter for a value regular expression.
+
+ The ``regex`` must be valid RE2 patterns. See Google's
+ `RE2 reference`_ for the accepted syntax.
+
+ .. _RE2 reference: https://github.com/google/re2/wiki/Syntax
+
+ .. note::
+
+ Special care need be used with the expression used. Since
+ each of these properties can contain arbitrary bytes, the ``\\C``
+ escape sequence must be used if a true wildcard is desired. The ``.``
+ character will not match the new line character ``\\n``, which may be
+ present in a binary value.
+
+ :type regex: bytes
+ :param regex: A regular expression (RE2) to match cells with values that
+ match this regex.
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(value_regex_filter=self.regex)
+
+
+
[docs]classValueRangeFilter(RowFilter):
+ """A range of values to restrict to in a row filter.
+
+ Will only match cells that have values in this range.
+
+ Both the start and end value can be included or excluded in the range.
+ By default, we include them both, but this can be changed with optional
+ flags.
+
+ :type start_value: bytes
+ :param start_value: The start of the range of values. If no value is used,
+ the backend applies no lower bound to the values.
+
+ :type end_value: bytes
+ :param end_value: The end of the range of values. If no value is used,
+ the backend applies no upper bound to the values.
+
+ :type inclusive_start: bool
+ :param inclusive_start: Boolean indicating if the start value should be
+ included in the range (or excluded). Defaults
+ to :data:`True` if ``start_value`` is passed and
+ no ``inclusive_start`` was given.
+
+ :type inclusive_end: bool
+ :param inclusive_end: Boolean indicating if the end value should be
+ included in the range (or excluded). Defaults
+ to :data:`True` if ``end_value`` is passed and
+ no ``inclusive_end`` was given.
+
+ :raises: :class:`ValueError <exceptions.ValueError>` if ``inclusive_start``
+ is set but no ``start_value`` is given or if ``inclusive_end``
+ is set but no ``end_value`` is given
+ """
+
+ def__init__(self,start_value=None,end_value=None,
+ inclusive_start=None,inclusive_end=None):
+ ifinclusive_startisNone:
+ inclusive_start=True
+ elifstart_valueisNone:
+ raiseValueError('Inclusive start was specified but no '
+ 'start value was given.')
+ self.start_value=start_value
+ self.inclusive_start=inclusive_start
+
+ ifinclusive_endisNone:
+ inclusive_end=True
+ elifend_valueisNone:
+ raiseValueError('Inclusive end was specified but no '
+ 'end value was given.')
+ self.end_value=end_value
+ self.inclusive_end=inclusive_end
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ return(other.start_value==self.start_valueand
+ other.end_value==self.end_valueand
+ other.inclusive_start==self.inclusive_startand
+ other.inclusive_end==self.inclusive_end)
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ First converts to a :class:`.data_v2_pb2.ValueRange` and then uses
+ it to create a row filter protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ value_range_kwargs={}
+ ifself.start_valueisnotNone:
+ ifself.inclusive_start:
+ key='start_value_closed'
+ else:
+ key='start_value_open'
+ value_range_kwargs[key]=_to_bytes(self.start_value)
+ ifself.end_valueisnotNone:
+ ifself.inclusive_end:
+ key='end_value_closed'
+ else:
+ key='end_value_open'
+ value_range_kwargs[key]=_to_bytes(self.end_value)
+
+ value_range=data_v2_pb2.ValueRange(**value_range_kwargs)
+ returndata_v2_pb2.RowFilter(value_range_filter=value_range)
+
+
+class_CellCountFilter(RowFilter):
+ """Row filter that uses an integer count of cells.
+
+ The cell count is used as an offset or a limit for the number
+ of results returned.
+
+ :type num_cells: int
+ :param num_cells: An integer count / offset / limit.
+ """
+
+ def__init__(self,num_cells):
+ self.num_cells=num_cells
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother.num_cells==self.num_cells
+
+
+
[docs]classCellsRowOffsetFilter(_CellCountFilter):
+ """Row filter to skip cells in a row.
+
+ :type num_cells: int
+ :param num_cells: Skips the first N cells of the row.
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(
+ cells_per_row_offset_filter=self.num_cells)
+
+
+
[docs]classCellsRowLimitFilter(_CellCountFilter):
+ """Row filter to limit cells in a row.
+
+ :type num_cells: int
+ :param num_cells: Matches only the first N cells of the row.
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(cells_per_row_limit_filter=self.num_cells)
+
+
+
[docs]classCellsColumnLimitFilter(_CellCountFilter):
+ """Row filter to limit cells in a column.
+
+ :type num_cells: int
+ :param num_cells: Matches only the most recent N cells within each column.
+ This filters a (family name, column) pair, based on
+ timestamps of each cell.
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(
+ cells_per_column_limit_filter=self.num_cells)
+
+
+
[docs]classStripValueTransformerFilter(_BoolFilter):
+ """Row filter that transforms cells into empty string (0 bytes).
+
+ :type flag: bool
+ :param flag: If :data:`True`, replaces each cell's value with the empty
+ string. As the name indicates, this is more useful as a
+ transformer than a generic query / filter.
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(strip_value_transformer=self.flag)
+
+
+
[docs]classApplyLabelFilter(RowFilter):
+ """Filter to apply labels to cells.
+
+ Intended to be used as an intermediate filter on a pre-existing filtered
+ result set. This way if two sets are combined, the label can tell where
+ the cell(s) originated.This allows the client to determine which results
+ were produced from which part of the filter.
+
+ .. note::
+
+ Due to a technical limitation of the backend, it is not currently
+ possible to apply multiple labels to a cell.
+
+ :type label: str
+ :param label: Label to apply to cells in the output row. Values must be
+ at most 15 characters long, and match the pattern
+ ``[a-z0-9\\-]+``.
+ """
+
+ def__init__(self,label):
+ self.label=label
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother.label==self.label
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ returndata_v2_pb2.RowFilter(apply_label_transformer=self.label)
+
+
+class_FilterCombination(RowFilter):
+ """Chain of row filters.
+
+ Sends rows through several filters in sequence. The filters are "chained"
+ together to process a row. After the first filter is applied, the second
+ is applied to the filtered output and so on for subsequent filters.
+
+ :type filters: list
+ :param filters: List of :class:`RowFilter`
+ """
+
+ def__init__(self,filters=None):
+ iffiltersisNone:
+ filters=[]
+ self.filters=filters
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ returnother.filters==self.filters
+
+
+
[docs]classRowFilterChain(_FilterCombination):
+ """Chain of row filters.
+
+ Sends rows through several filters in sequence. The filters are "chained"
+ together to process a row. After the first filter is applied, the second
+ is applied to the filtered output and so on for subsequent filters.
+
+ :type filters: list
+ :param filters: List of :class:`RowFilter`
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ chain=data_v2_pb2.RowFilter.Chain(
+ filters=[row_filter.to_pb()forrow_filterinself.filters])
+ returndata_v2_pb2.RowFilter(chain=chain)
+
+
+
[docs]classRowFilterUnion(_FilterCombination):
+ """Union of row filters.
+
+ Sends rows through several filters simultaneously, then
+ merges / interleaves all the filtered results together.
+
+ If multiple cells are produced with the same column and timestamp,
+ they will all appear in the output row in an unspecified mutual order.
+
+ :type filters: list
+ :param filters: List of :class:`RowFilter`
+ """
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ interleave=data_v2_pb2.RowFilter.Interleave(
+ filters=[row_filter.to_pb()forrow_filterinself.filters])
+ returndata_v2_pb2.RowFilter(interleave=interleave)
+
+
+
[docs]classConditionalRowFilter(RowFilter):
+ """Conditional row filter which exhibits ternary behavior.
+
+ Executes one of two filters based on another filter. If the ``base_filter``
+ returns any cells in the row, then ``true_filter`` is executed. If not,
+ then ``false_filter`` is executed.
+
+ .. note::
+
+ The ``base_filter`` does not execute atomically with the true and false
+ filters, which may lead to inconsistent or unexpected results.
+
+ Additionally, executing a :class:`ConditionalRowFilter` has poor
+ performance on the server, especially when ``false_filter`` is set.
+
+ :type base_filter: :class:`RowFilter`
+ :param base_filter: The filter to condition on before executing the
+ true/false filters.
+
+ :type true_filter: :class:`RowFilter`
+ :param true_filter: (Optional) The filter to execute if there are any cells
+ matching ``base_filter``. If not provided, no results
+ will be returned in the true case.
+
+ :type false_filter: :class:`RowFilter`
+ :param false_filter: (Optional) The filter to execute if there are no cells
+ matching ``base_filter``. If not provided, no results
+ will be returned in the false case.
+ """
+
+ def__init__(self,base_filter,true_filter=None,false_filter=None):
+ self.base_filter=base_filter
+ self.true_filter=true_filter
+ self.false_filter=false_filter
+
+ def__eq__(self,other):
+ ifnotisinstance(other,self.__class__):
+ returnFalse
+ return(other.base_filter==self.base_filterand
+ other.true_filter==self.true_filterand
+ other.false_filter==self.false_filter)
+
+
[docs]defto_pb(self):
+ """Converts the row filter to a protobuf.
+
+ :rtype: :class:`.data_v2_pb2.RowFilter`
+ :returns: The converted current object.
+ """
+ condition_kwargs={'predicate_filter':self.base_filter.to_pb()}
+ ifself.true_filterisnotNone:
+ condition_kwargs['true_filter']=self.true_filter.to_pb()
+ ifself.false_filterisnotNone:
+ condition_kwargs['false_filter']=self.false_filter.to_pb()
+ condition=data_v2_pb2.RowFilter.Condition(**condition_kwargs)
+ returndata_v2_pb2.RowFilter(condition=condition)
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""User friendly container for Google Cloud Bigtable Table."""
+
+fromgcloud._helpersimport_to_bytes
+fromgcloud.bigtable._generatedimport(
+ bigtable_pb2asdata_messages_v2_pb2)
+fromgcloud.bigtable._generatedimport(
+ bigtable_table_admin_pb2astable_admin_messages_v2_pb2)
+fromgcloud.bigtable._generatedimport(
+ table_pb2astable_v2_pb2)
+fromgcloud.bigtable.column_familyimport_gc_rule_from_pb
+fromgcloud.bigtable.column_familyimportColumnFamily
+fromgcloud.bigtable.rowimportAppendRow
+fromgcloud.bigtable.rowimportConditionalRow
+fromgcloud.bigtable.rowimportDirectRow
+fromgcloud.bigtable.row_dataimportPartialRowsData
+
+
+
[docs]classTable(object):
+ """Representation of a Google Cloud Bigtable Table.
+
+ .. note::
+
+ We don't define any properties on a table other than the name.
+ The only other fields are ``column_families`` and ``granularity``,
+ The ``column_families`` are not stored locally and
+ ``granularity`` is an enum with only one value.
+
+ We can use a :class:`Table` to:
+
+ * :meth:`create` the table
+ * :meth:`rename` the table
+ * :meth:`delete` the table
+ * :meth:`list_column_families` in the table
+
+ :type table_id: str
+ :param table_id: The ID of the table.
+
+ :type instance: :class:`Instance <.instance.Instance>`
+ :param instance: The instance that owns the table.
+ """
+
+ def__init__(self,table_id,instance):
+ self.table_id=table_id
+ self._instance=instance
+
+ @property
+ defname(self):
+ """Table name used in requests.
+
+ .. note::
+
+ This property will not change if ``table_id`` does not, but the
+ return value is not cached.
+
+ The table name is of the form
+
+ ``"projects/../instances/../tables/{table_id}"``
+
+ :rtype: str
+ :returns: The table name.
+ """
+ returnself._instance.name+'/tables/'+self.table_id
+
+
[docs]defcolumn_family(self,column_family_id,gc_rule=None):
+ """Factory to create a column family associated with this table.
+
+ :type column_family_id: str
+ :param column_family_id: The ID of the column family. Must be of the
+ form ``[_a-zA-Z0-9][-_.a-zA-Z0-9]*``.
+
+ :type gc_rule: :class:`.GarbageCollectionRule`
+ :param gc_rule: (Optional) The garbage collection settings for this
+ column family.
+
+ :rtype: :class:`.ColumnFamily`
+ :returns: A column family owned by this table.
+ """
+ returnColumnFamily(column_family_id,self,gc_rule=gc_rule)
+
+
[docs]defrow(self,row_key,filter_=None,append=False):
+ """Factory to create a row associated with this table.
+
+ .. warning::
+
+ At most one of ``filter_`` and ``append`` can be used in a
+ :class:`Row`.
+
+ :type row_key: bytes
+ :param row_key: The key for the row being created.
+
+ :type filter_: :class:`.RowFilter`
+ :param filter_: (Optional) Filter to be used for conditional mutations.
+ See :class:`.DirectRow` for more details.
+
+ :type append: bool
+ :param append: (Optional) Flag to determine if the row should be used
+ for append mutations.
+
+ :rtype: :class:`.DirectRow`
+ :returns: A row owned by this table.
+ :raises: :class:`ValueError <exceptions.ValueError>` if both
+ ``filter_`` and ``append`` are used.
+ """
+ ifappendandfilter_isnotNone:
+ raiseValueError('At most one of filter_ and append can be set')
+ ifappend:
+ returnAppendRow(row_key,self)
+ eliffilter_isnotNone:
+ returnConditionalRow(row_key,self,filter_=filter_)
+ else:
+ returnDirectRow(row_key,self)
[docs]defcreate(self,initial_split_keys=None,column_families=()):
+ """Creates this table.
+
+ .. note::
+
+ A create request returns a
+ :class:`._generated.table_pb2.Table` but we don't use
+ this response.
+
+ :type initial_split_keys: list
+ :param initial_split_keys: (Optional) List of row keys that will be
+ used to initially split the table into
+ several tablets (Tablets are similar to
+ HBase regions). Given two split keys,
+ ``"s1"`` and ``"s2"``, three tablets will be
+ created, spanning the key ranges:
+ ``[, s1)``, ``[s1, s2)``, ``[s2, )``.
+
+ :type column_families: list
+ :param column_families: (Optional) List or other iterable of
+ :class:`.ColumnFamily` instances.
+ """
+ ifinitial_split_keysisnotNone:
+ split_pb=table_admin_messages_v2_pb2.CreateTableRequest.Split
+ initial_split_keys=[
+ split_pb(key=key)forkeyininitial_split_keys]
+
+ table_pb=None
+ ifcolumn_families:
+ table_pb=table_v2_pb2.Table()
+ forcol_famincolumn_families:
+ curr_id=col_fam.column_family_id
+ table_pb.column_families[curr_id].MergeFrom(col_fam.to_pb())
+
+ request_pb=table_admin_messages_v2_pb2.CreateTableRequest(
+ initial_splits=initial_split_keysor[],
+ parent=self._instance.name,
+ table_id=self.table_id,
+ table=table_pb,
+ )
+ client=self._instance._client
+ # We expect a `._generated.table_pb2.Table`
+ client._table_stub.CreateTable(request_pb,client.timeout_seconds)
+
+
[docs]defdelete(self):
+ """Delete this table."""
+ request_pb=table_admin_messages_v2_pb2.DeleteTableRequest(
+ name=self.name)
+ client=self._instance._client
+ # We expect a `google.protobuf.empty_pb2.Empty`
+ client._table_stub.DeleteTable(request_pb,client.timeout_seconds)
+
+
[docs]deflist_column_families(self):
+ """List the column families owned by this table.
+
+ :rtype: dict
+ :returns: Dictionary of column families attached to this table. Keys
+ are strings (column family names) and values are
+ :class:`.ColumnFamily` instances.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the column
+ family name from the response does not agree with the computed
+ name from the column family ID.
+ """
+ request_pb=table_admin_messages_v2_pb2.GetTableRequest(
+ name=self.name)
+ client=self._instance._client
+ # We expect a `._generated.table_pb2.Table`
+ table_pb=client._table_stub.GetTable(request_pb,
+ client.timeout_seconds)
+
+ result={}
+ forcolumn_family_id,value_pbintable_pb.column_families.items():
+ gc_rule=_gc_rule_from_pb(value_pb.gc_rule)
+ column_family=self.column_family(column_family_id,
+ gc_rule=gc_rule)
+ result[column_family_id]=column_family
+ returnresult
+
+
[docs]defread_row(self,row_key,filter_=None):
+ """Read a single row from this table.
+
+ :type row_key: bytes
+ :param row_key: The key of the row to read from.
+
+ :type filter_: :class:`.RowFilter`
+ :param filter_: (Optional) The filter to apply to the contents of the
+ row. If unset, returns the entire row.
+
+ :rtype: :class:`.PartialRowData`, :data:`NoneType <types.NoneType>`
+ :returns: The contents of the row if any chunks were returned in
+ the response, otherwise :data:`None`.
+ :raises: :class:`ValueError <exceptions.ValueError>` if a commit row
+ chunk is never encountered.
+ """
+ request_pb=_create_row_request(self.name,row_key=row_key,
+ filter_=filter_)
+ client=self._instance._client
+ response_iterator=client._data_stub.ReadRows(request_pb,
+ client.timeout_seconds)
+ rows_data=PartialRowsData(response_iterator)
+ rows_data.consume_all()
+ ifrows_data.statenotin(rows_data.NEW_ROW,rows_data.START):
+ raiseValueError('The row remains partial / is not committed.')
+
+ iflen(rows_data.rows)==0:
+ returnNone
+
+ returnrows_data.rows[row_key]
+
+
[docs]defread_rows(self,start_key=None,end_key=None,limit=None,
+ filter_=None):
+ """Read rows from this table.
+
+ :type start_key: bytes
+ :param start_key: (Optional) The beginning of a range of row keys to
+ read from. The range will include ``start_key``. If
+ left empty, will be interpreted as the empty string.
+
+ :type end_key: bytes
+ :param end_key: (Optional) The end of a range of row keys to read from.
+ The range will not include ``end_key``. If left empty,
+ will be interpreted as an infinite string.
+
+ :type limit: int
+ :param limit: (Optional) The read will terminate after committing to N
+ rows' worth of results. The default (zero) is to return
+ all results.
+
+ :type filter_: :class:`.RowFilter`
+ :param filter_: (Optional) The filter to apply to the contents of the
+ specified row(s). If unset, reads every column in
+ each row.
+
+ :rtype: :class:`.PartialRowsData`
+ :returns: A :class:`.PartialRowsData` convenience wrapper for consuming
+ the streamed results.
+ """
+ request_pb=_create_row_request(
+ self.name,start_key=start_key,end_key=end_key,filter_=filter_,
+ limit=limit)
+ client=self._instance._client
+ response_iterator=client._data_stub.ReadRows(request_pb,
+ client.timeout_seconds)
+ # We expect an iterator of `data_messages_v2_pb2.ReadRowsResponse`
+ returnPartialRowsData(response_iterator)
+
+
[docs]defsample_row_keys(self):
+ """Read a sample of row keys in the table.
+
+ The returned row keys will delimit contiguous sections of the table of
+ approximately equal size, which can be used to break up the data for
+ distributed tasks like mapreduces.
+
+ The elements in the iterator are a SampleRowKeys response and they have
+ the properties ``offset_bytes`` and ``row_key``. They occur in sorted
+ order. The table might have contents before the first row key in the
+ list and after the last one, but a key containing the empty string
+ indicates "end of table" and will be the last response given, if
+ present.
+
+ .. note::
+
+ Row keys in this list may not have ever been written to or read
+ from, and users should therefore not make any assumptions about the
+ row key structure that are specific to their use case.
+
+ The ``offset_bytes`` field on a response indicates the approximate
+ total storage space used by all rows in the table which precede
+ ``row_key``. Buffering the contents of all rows between two subsequent
+ samples would require space roughly equal to the difference in their
+ ``offset_bytes`` fields.
+
+ :rtype: :class:`grpc.framework.alpha._reexport._CancellableIterator`
+ :returns: A cancel-able iterator. Can be consumed by calling ``next()``
+ or by casting to a :class:`list` and can be cancelled by
+ calling ``cancel()``.
+ """
+ request_pb=data_messages_v2_pb2.SampleRowKeysRequest(
+ table_name=self.name)
+ client=self._instance._client
+ response_iterator=client._data_stub.SampleRowKeys(
+ request_pb,client.timeout_seconds)
+ returnresponse_iterator
+
+
+def_create_row_request(table_name,row_key=None,start_key=None,end_key=None,
+ filter_=None,limit=None):
+ """Creates a request to read rows in a table.
+
+ :type table_name: str
+ :param table_name: The name of the table to read from.
+
+ :type row_key: bytes
+ :param row_key: (Optional) The key of a specific row to read from.
+
+ :type start_key: bytes
+ :param start_key: (Optional) The beginning of a range of row keys to
+ read from. The range will include ``start_key``. If
+ left empty, will be interpreted as the empty string.
+
+ :type end_key: bytes
+ :param end_key: (Optional) The end of a range of row keys to read from.
+ The range will not include ``end_key``. If left empty,
+ will be interpreted as an infinite string.
+
+ :type filter_: :class:`.RowFilter`
+ :param filter_: (Optional) The filter to apply to the contents of the
+ specified row(s). If unset, reads the entire table.
+
+ :type limit: int
+ :param limit: (Optional) The read will terminate after committing to N
+ rows' worth of results. The default (zero) is to return
+ all results.
+
+ :rtype: :class:`data_messages_v2_pb2.ReadRowsRequest`
+ :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs.
+ :raises: :class:`ValueError <exceptions.ValueError>` if both
+ ``row_key`` and one of ``start_key`` and ``end_key`` are set
+ """
+ request_kwargs={'table_name':table_name}
+ if(row_keyisnotNoneand
+ (start_keyisnotNoneorend_keyisnotNone)):
+ raiseValueError('Row key and row range cannot be '
+ 'set simultaneously')
+ range_kwargs={}
+ ifstart_keyisnotNoneorend_keyisnotNone:
+ ifstart_keyisnotNone:
+ range_kwargs['start_key_closed']=_to_bytes(start_key)
+ ifend_keyisnotNone:
+ range_kwargs['end_key_open']=_to_bytes(end_key)
+ iffilter_isnotNone:
+ request_kwargs['filter']=filter_.to_pb()
+ iflimitisnotNone:
+ request_kwargs['rows_limit']=limit
+
+ message=data_messages_v2_pb2.ReadRowsRequest(**request_kwargs)
+
+ ifrow_keyisnotNone:
+ message.rows.row_keys.append(_to_bytes(row_key))
+
+ ifrange_kwargs:
+ message.rows.row_ranges.add(**range_kwargs)
+
+ returnmessage
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Base classes for client used to interact with Google Cloud APIs."""
+
+fromoauth2client.service_accountimportServiceAccountCredentials
+importsix
+
+fromgcloud._helpersimport_determine_default_project
+fromgcloud.connectionimportConnection
+fromgcloud.credentialsimportget_credentials
+
+
+class_ClientFactoryMixin(object):
+ """Mixin to allow factories that create credentials.
+
+ .. note::
+
+ This class is virtual.
+ """
+
+ @classmethod
+ deffrom_service_account_json(cls,json_credentials_path,*args,**kwargs):
+ """Factory to retrieve JSON credentials while creating client.
+
+ :type json_credentials_path: string
+ :param json_credentials_path: The path to a private key file (this file
+ was given to you when you created the
+ service account). This file must contain
+ a JSON object with a private key and
+ other credentials information (downloaded
+ from the Google APIs console).
+
+ :type args: tuple
+ :param args: Remaining positional arguments to pass to constructor.
+
+ :type kwargs: dict
+ :param kwargs: Remaining keyword arguments to pass to constructor.
+
+ :rtype: :class:`gcloud.pubsub.client.Client`
+ :returns: The client created with the retrieved JSON credentials.
+ :raises: :class:`TypeError` if there is a conflict with the kwargs
+ and the credentials created by the factory.
+ """
+ if'credentials'inkwargs:
+ raiseTypeError('credentials must not be in keyword arguments')
+ credentials=ServiceAccountCredentials.from_json_keyfile_name(
+ json_credentials_path)
+ kwargs['credentials']=credentials
+ returncls(*args,**kwargs)
+
+ @classmethod
+ deffrom_service_account_p12(cls,client_email,private_key_path,
+ *args,**kwargs):
+ """Factory to retrieve P12 credentials while creating client.
+
+ .. note::
+ Unless you have an explicit reason to use a PKCS12 key for your
+ service account, we recommend using a JSON key.
+
+ :type client_email: string
+ :param client_email: The e-mail attached to the service account.
+
+ :type private_key_path: string
+ :param private_key_path: The path to a private key file (this file was
+ given to you when you created the service
+ account). This file must be in P12 format.
+
+ :type args: tuple
+ :param args: Remaining positional arguments to pass to constructor.
+
+ :type kwargs: dict
+ :param kwargs: Remaining keyword arguments to pass to constructor.
+
+ :rtype: :class:`gcloud.client.Client`
+ :returns: The client created with the retrieved P12 credentials.
+ :raises: :class:`TypeError` if there is a conflict with the kwargs
+ and the credentials created by the factory.
+ """
+ if'credentials'inkwargs:
+ raiseTypeError('credentials must not be in keyword arguments')
+ credentials=ServiceAccountCredentials.from_p12_keyfile(
+ client_email,private_key_path)
+ kwargs['credentials']=credentials
+ returncls(*args,**kwargs)
+
+
+
[docs]classClient(_ClientFactoryMixin):
+ """Client to bundle configuration needed for API requests.
+
+ Assumes that the associated ``_connection_class`` only accepts
+ ``http`` and ``credentials`` in its constructor.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for the connection
+ owned by this client. If not passed (and if no ``http``
+ object is passed), falls back to the default inferred
+ from the environment.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests. If not passed, an
+ ``http`` object is created that is bound to the
+ ``credentials`` for the current object.
+ """
+
+ _connection_class=Connection
+
+ def__init__(self,credentials=None,http=None):
+ ifcredentialsisNoneandhttpisNone:
+ credentials=get_credentials()
+ self.connection=self._connection_class(
+ credentials=credentials,http=http)
+
+
+class_ClientProjectMixin(object):
+ """Mixin to allow setting the project on the client.
+
+ :type project: string
+ :param project: the project which the client acts on behalf of. If not
+ passed falls back to the default inferred from the
+ environment.
+
+ :raises: :class:`EnvironmentError` if the project is neither passed in nor
+ set in the environment. :class:`ValueError` if the project value
+ is invalid.
+ """
+
+ def__init__(self,project=None):
+ project=self._determine_default(project)
+ ifprojectisNone:
+ raiseEnvironmentError('Project was not passed and could not be '
+ 'determined from the environment.')
+ ifisinstance(project,six.binary_type):
+ project=project.decode('utf-8')
+ ifnotisinstance(project,six.string_types):
+ raiseValueError('Project must be a string.')
+ self.project=project
+
+ @staticmethod
+ def_determine_default(project):
+ """Helper: use default project detection."""
+ return_determine_default_project(project)
+
+
+
[docs]classJSONClient(Client,_ClientProjectMixin):
+ """Client to for Google JSON-based API.
+
+ Assumes such APIs use the ``project`` and the client needs to store this
+ value.
+
+ :type project: string
+ :param project: the project which the client acts on behalf of. If not
+ passed falls back to the default inferred from the
+ environment.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for the connection
+ owned by this client. If not passed (and if no ``http``
+ object is passed), falls back to the default inferred
+ from the environment.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests. If not passed, an
+ ``http`` object is created that is bound to the
+ ``credentials`` for the current object.
+
+ :raises: :class:`ValueError` if the project is neither passed in nor
+ set in the environment.
+ """
+
+ def__init__(self,project=None,credentials=None,http=None):
+ _ClientProjectMixin.__init__(self,project=project)
+ Client.__init__(self,credentials=credentials,http=http)
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared implementation of connections to API servers."""
+
+importjson
+frompkg_resourcesimportget_distribution
+importsix
+fromsix.moves.urllib.parseimporturlencode
+
+importhttplib2
+
+fromgcloud.exceptionsimportmake_exception
+
+
+API_BASE_URL='https://www.googleapis.com'
+"""The base of the API call URL."""
+
+
+
[docs]classConnection(object):
+ """A generic connection to Google Cloud Platform.
+
+ Subclasses should understand only the basic types in method arguments,
+ however they should be capable of returning advanced types.
+
+ If no value is passed in for ``http``, a :class:`httplib2.Http` object
+ will be created and authorized with the ``credentials``. If not, the
+ ``credentials`` and ``http`` need not be related.
+
+ Subclasses may seek to use the private key from ``credentials`` to sign
+ data.
+
+ A custom (non-``httplib2``) HTTP object must have a ``request`` method
+ which accepts the following arguments:
+
+ * ``uri``
+ * ``method``
+ * ``body``
+ * ``headers``
+
+ In addition, ``redirections`` and ``connection_type`` may be used.
+
+ Without the use of ``credentials.authorize(http)``, a custom ``http``
+ object will also need to be able to add a bearer token to API
+ requests and handle token refresh on 401 errors.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for this connection.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests.
+ """
+
+ USER_AGENT="gcloud-python/{0}".format(get_distribution('gcloud').version)
+ """The user agent for gcloud-python requests."""
+
+ SCOPE=None
+ """The scopes required for authenticating with a service.
+
+ Needs to be set by subclasses.
+ """
+
+ def__init__(self,credentials=None,http=None):
+ self._http=http
+ self._credentials=self._create_scoped_credentials(
+ credentials,self.SCOPE)
+
+ @property
+ defcredentials(self):
+ """Getter for current credentials.
+
+ :rtype: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :returns: The credentials object associated with this connection.
+ """
+ returnself._credentials
+
+ @property
+ defhttp(self):
+ """A getter for the HTTP transport used in talking to the API.
+
+ :rtype: :class:`httplib2.Http`
+ :returns: A Http object used to transport data.
+ """
+ ifself._httpisNone:
+ self._http=httplib2.Http()
+ ifself._credentials:
+ self._http=self._credentials.authorize(self._http)
+ returnself._http
+
+ @staticmethod
+ def_create_scoped_credentials(credentials,scope):
+ """Create a scoped set of credentials if it is required.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to add a scope to.
+
+ :type scope: list of URLs
+ :param scope: the effective service auth scopes for the connection.
+
+ :rtype: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :returns: A new credentials object that has a scope added (if needed).
+ """
+ ifcredentials:
+ try:
+ ifcredentials.create_scoped_required():
+ credentials=credentials.create_scoped(scope)
+ exceptAttributeError:
+ pass
+ returncredentials
+
+
+
[docs]classJSONConnection(Connection):
+ """A connection to a Google JSON-based API.
+
+ These APIs are discovery based. For reference:
+
+ https://developers.google.com/discovery/
+
+ This defines :meth:`api_request` for making a generic JSON
+ API request and API requests are created elsewhere.
+
+ The class constants
+
+ * :attr:`API_BASE_URL`
+ * :attr:`API_VERSION`
+ * :attr:`API_URL_TEMPLATE`
+
+ must be updated by subclasses.
+ """
+
+ API_BASE_URL=None
+ """The base of the API call URL."""
+
+ API_VERSION=None
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE=None
+ """A template for the URL of a particular API call."""
+
+ @classmethod
+
[docs]defbuild_api_url(cls,path,query_params=None,
+ api_base_url=None,api_version=None):
+ """Construct an API url given a few components, some optional.
+
+ Typically, you shouldn't need to use this method.
+
+ :type path: string
+ :param path: The path to the resource (ie, ``'/b/bucket-name'``).
+
+ :type query_params: dict or list
+ :param query_params: A dictionary of keys and values (or list of
+ key-value pairs) to insert into the query
+ string of the URL.
+
+ :type api_base_url: string
+ :param api_base_url: The base URL for the API endpoint.
+ Typically you won't have to provide this.
+
+ :type api_version: string
+ :param api_version: The version of the API to call.
+ Typically you shouldn't provide this and instead
+ use the default for the library.
+
+ :rtype: string
+ :returns: The URL assembled from the pieces provided.
+ """
+ url=cls.API_URL_TEMPLATE.format(
+ api_base_url=(api_base_urlorcls.API_BASE_URL),
+ api_version=(api_versionorcls.API_VERSION),
+ path=path)
+
+ query_params=query_paramsor{}
+ ifquery_params:
+ url+='?'+urlencode(query_params)
+
+ returnurl
+
+ def_make_request(self,method,url,data=None,content_type=None,
+ headers=None,target_object=None):
+ """A low level method to send a request to the API.
+
+ Typically, you shouldn't need to use this method.
+
+ :type method: string
+ :param method: The HTTP method to use in the request.
+
+ :type url: string
+ :param url: The URL to send the request to.
+
+ :type data: string
+ :param data: The data to send as the body of the request.
+
+ :type content_type: string
+ :param content_type: The proper MIME type of the data provided.
+
+ :type headers: dict
+ :param headers: A dictionary of HTTP headers to send with the request.
+
+ :type target_object: object or :class:`NoneType`
+ :param target_object: Argument to be used by library callers.
+ This can allow custom behavior, for example, to
+ defer an HTTP request and complete initialization
+ of the object at a later time.
+
+ :rtype: tuple of ``response`` (a dictionary of sorts)
+ and ``content`` (a string).
+ :returns: The HTTP response object and the content of the response,
+ returned by :meth:`_do_request`.
+ """
+ headers=headersor{}
+ headers['Accept-Encoding']='gzip'
+
+ ifdata:
+ content_length=len(str(data))
+ else:
+ content_length=0
+
+ # NOTE: str is intended, bytes are sufficient for headers.
+ headers['Content-Length']=str(content_length)
+
+ ifcontent_type:
+ headers['Content-Type']=content_type
+
+ headers['User-Agent']=self.USER_AGENT
+
+ returnself._do_request(method,url,headers,data,target_object)
+
+ def_do_request(self,method,url,headers,data,
+ target_object):# pylint: disable=unused-argument
+ """Low-level helper: perform the actual API request over HTTP.
+
+ Allows batch context managers to override and defer a request.
+
+ :type method: string
+ :param method: The HTTP method to use in the request.
+
+ :type url: string
+ :param url: The URL to send the request to.
+
+ :type headers: dict
+ :param headers: A dictionary of HTTP headers to send with the request.
+
+ :type data: string
+ :param data: The data to send as the body of the request.
+
+ :type target_object: object or :class:`NoneType`
+ :param target_object: Unused ``target_object`` here but may be used
+ by a superclass.
+
+ :rtype: tuple of ``response`` (a dictionary of sorts)
+ and ``content`` (a string).
+ :returns: The HTTP response object and the content of the response.
+ """
+ returnself.http.request(uri=url,method=method,headers=headers,
+ body=data)
+
+
[docs]defapi_request(self,method,path,query_params=None,
+ data=None,content_type=None,
+ api_base_url=None,api_version=None,
+ expect_json=True,_target_object=None):
+ """Make a request over the HTTP transport to the API.
+
+ You shouldn't need to use this method, but if you plan to
+ interact with the API using these primitives, this is the
+ correct one to use.
+
+ :type method: string
+ :param method: The HTTP method name (ie, ``GET``, ``POST``, etc).
+ Required.
+
+ :type path: string
+ :param path: The path to the resource (ie, ``'/b/bucket-name'``).
+ Required.
+
+ :type query_params: dict or list
+ :param query_params: A dictionary of keys and values (or list of
+ key-value pairs) to insert into the query
+ string of the URL.
+
+ :type data: string
+ :param data: The data to send as the body of the request. Default is
+ the empty string.
+
+ :type content_type: string
+ :param content_type: The proper MIME type of the data provided. Default
+ is None.
+
+ :type api_base_url: string
+ :param api_base_url: The base URL for the API endpoint.
+ Typically you won't have to provide this.
+ Default is the standard API base URL.
+
+ :type api_version: string
+ :param api_version: The version of the API to call. Typically
+ you shouldn't provide this and instead use
+ the default for the library. Default is the
+ latest API version supported by
+ gcloud-python.
+
+ :type expect_json: bool
+ :param expect_json: If True, this method will try to parse the
+ response as JSON and raise an exception if
+ that cannot be done. Default is True.
+
+ :type _target_object: :class:`object` or :class:`NoneType`
+ :param _target_object: Protected argument to be used by library
+ callers. This can allow custom behavior, for
+ example, to defer an HTTP request and complete
+ initialization of the object at a later time.
+
+ :raises: Exception if the response code is not 200 OK.
+ :rtype: dict or str
+ :returns: The API response payload, either as a raw string or
+ a dictionary if the response is valid JSON.
+ """
+ url=self.build_api_url(path=path,query_params=query_params,
+ api_base_url=api_base_url,
+ api_version=api_version)
+
+ # Making the executive decision that any dictionary
+ # data will be sent properly as JSON.
+ ifdataandisinstance(data,dict):
+ data=json.dumps(data)
+ content_type='application/json'
+
+ response,content=self._make_request(
+ method=method,url=url,data=data,content_type=content_type,
+ target_object=_target_object)
+
+ ifnot200<=response.status<300:
+ raisemake_exception(response,content,
+ error_info=method+' '+url)
+
+ string_or_bytes=(six.binary_type,six.text_type)
+ ifcontentandexpect_jsonandisinstance(content,string_or_bytes):
+ content_type=response.get('content-type','')
+ ifnotcontent_type.startswith('application/json'):
+ raiseTypeError('Expected JSON, got %s'%content_type)
+ ifisinstance(content,six.binary_type):
+ content=content.decode('utf-8')
+ returnjson.loads(content)
+
+ returncontent
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A simple wrapper around the OAuth2 credentials library."""
+
+importbase64
+importdatetime
+importsix
+fromsix.moves.urllib.parseimporturlencode
+
+fromoauth2clientimportclient
+
+fromgcloud._helpersimportUTC
+fromgcloud._helpersimport_NOW
+fromgcloud._helpersimport_microseconds_from_datetime
+
+
+
[docs]defget_credentials():
+ """Gets credentials implicitly from the current environment.
+
+ .. note::
+
+ You should not need to use this function directly. Instead, use a
+ helper method which uses this method under the hood.
+
+ Checks environment in order of precedence:
+
+ * Google App Engine (production and testing)
+ * Environment variable :envvar:`GOOGLE_APPLICATION_CREDENTIALS` pointing to
+ a file with stored credentials information.
+ * Stored "well known" file associated with ``gcloud`` command line tool.
+ * Google Compute Engine production environment.
+
+ The file referred to in :envvar:`GOOGLE_APPLICATION_CREDENTIALS` is
+ expected to contain information about credentials that are ready to use.
+ This means either service account information or user account information
+ with a ready-to-use refresh token:
+
+ .. code:: json
+
+ {
+ 'type': 'authorized_user',
+ 'client_id': '...',
+ 'client_secret': '...',
+ 'refresh_token': '...'
+ }
+
+ or
+
+ .. code:: json
+
+ {
+ 'type': 'service_account',
+ 'client_id': '...',
+ 'client_email': '...',
+ 'private_key_id': '...',
+ 'private_key': '...'
+ }
+
+ The second of these is simply a JSON key downloaded from the Google APIs
+ console. The first is a close cousin of the "client secrets" JSON file
+ used by :mod:`oauth2client.clientsecrets` but differs in formatting.
+
+ :rtype: :class:`oauth2client.client.GoogleCredentials`,
+ :class:`oauth2client.contrib.appengine.AppAssertionCredentials`,
+ :class:`oauth2client.contrib.gce.AppAssertionCredentials`,
+ :class:`oauth2client.service_account.ServiceAccountCredentials`
+ :returns: A new credentials instance corresponding to the implicit
+ environment.
+ """
+ returnclient.GoogleCredentials.get_application_default()
+
+
+def_get_signed_query_params(credentials,expiration,string_to_sign):
+ """Gets query parameters for creating a signed URL.
+
+ :type credentials: :class:`oauth2client.client.AssertionCredentials`
+ :param credentials: The credentials used to create a private key
+ for signing text.
+
+ :type expiration: int or long
+ :param expiration: When the signed URL should expire.
+
+ :type string_to_sign: string
+ :param string_to_sign: The string to be signed by the credentials.
+
+ :raises AttributeError: If :meth: sign_blob is unavailable.
+
+ :rtype: dict
+ :returns: Query parameters matching the signing credentials with a
+ signed payload.
+ """
+ ifnothasattr(credentials,'sign_blob'):
+ raiseAttributeError('you need a private key to sign credentials.'
+ 'the credentials you are currently using %s '
+ 'just contains a token. see https://googlecloud'
+ 'platform.github.io/gcloud-python/stable/gcloud-'
+ 'auth.html#setting-up-a-service-account for more '
+ 'details.'%type(credentials))
+
+ _,signature_bytes=credentials.sign_blob(string_to_sign)
+ signature=base64.b64encode(signature_bytes)
+ service_account_name=credentials.service_account_email
+ return{
+ 'GoogleAccessId':service_account_name,
+ 'Expires':str(expiration),
+ 'Signature':signature,
+ }
+
+
+def_get_expiration_seconds(expiration):
+ """Convert 'expiration' to a number of seconds in the future.
+
+ :type expiration: int, long, datetime.datetime, datetime.timedelta
+ :param expiration: When the signed URL should expire.
+
+ :raises TypeError: When expiration is not an integer.
+
+ :rtype: int
+ :returns: a timestamp as an absolute number of seconds.
+ """
+ # If it's a timedelta, add it to `now` in UTC.
+ ifisinstance(expiration,datetime.timedelta):
+ now=_NOW().replace(tzinfo=UTC)
+ expiration=now+expiration
+
+ # If it's a datetime, convert to a timestamp.
+ ifisinstance(expiration,datetime.datetime):
+ micros=_microseconds_from_datetime(expiration)
+ expiration=micros//10**6
+
+ ifnotisinstance(expiration,six.integer_types):
+ raiseTypeError('Expected an integer timestamp, datetime, or '
+ 'timedelta. Got %s'%type(expiration))
+ returnexpiration
+
+
+
[docs]defgenerate_signed_url(credentials,resource,expiration,
+ api_access_endpoint='',
+ method='GET',content_md5=None,
+ content_type=None,response_type=None,
+ response_disposition=None,generation=None):
+ """Generate signed URL to provide query-string auth'n to a resource.
+
+ .. note::
+
+ Assumes ``credentials`` implements a ``sign_blob()`` method that takes
+ bytes to sign and returns a pair of the key ID (unused here) and the
+ signed bytes (this is abstract in the base class
+ :class:`oauth2client.client.AssertionCredentials`). Also assumes
+ ``credentials`` has a ``service_account_email`` property which
+ identifies the credentials.
+
+ .. note::
+
+ If you are on Google Compute Engine, you can't generate a signed URL.
+ Follow `Issue 922`_ for updates on this. If you'd like to be able to
+ generate a signed URL from GCE, you can use a standard service account
+ from a JSON file rather than a GCE service account.
+
+ See headers `reference`_ for more details on optional arguments.
+
+ .. _Issue 922: https://github.com/GoogleCloudPlatform/\
+ gcloud-python/issues/922
+ .. _reference: https://cloud.google.com/storage/docs/reference-headers
+
+ :type credentials: :class:`oauth2client.appengine.AppAssertionCredentials`
+ :param credentials: Credentials object with an associated private key to
+ sign text.
+
+ :type resource: string
+ :param resource: A pointer to a specific resource
+ (typically, ``/bucket-name/path/to/blob.txt``).
+
+ :type expiration: :class:`int`, :class:`long`, :class:`datetime.datetime`,
+ :class:`datetime.timedelta`
+ :param expiration: When the signed URL should expire.
+
+ :type api_access_endpoint: str
+ :param api_access_endpoint: Optional URI base. Defaults to empty string.
+
+ :type method: str
+ :param method: The HTTP verb that will be used when requesting the URL.
+ Defaults to ``'GET'``.
+
+ :type content_md5: str
+ :param content_md5: (Optional) The MD5 hash of the object referenced by
+ ``resource``.
+
+ :type content_type: str
+ :param content_type: (Optional) The content type of the object referenced
+ by ``resource``.
+
+ :type response_type: str
+ :param response_type: (Optional) Content type of responses to requests for
+ the signed URL. Used to over-ride the content type of
+ the underlying resource.
+
+ :type response_disposition: str
+ :param response_disposition: (Optional) Content disposition of responses to
+ requests for the signed URL.
+
+ :type generation: str
+ :param generation: (Optional) A value that indicates which generation of
+ the resource to fetch.
+
+ :rtype: string
+ :returns: A signed URL you can use to access the resource
+ until expiration.
+ """
+ expiration=_get_expiration_seconds(expiration)
+
+ # Generate the string to sign.
+ string_to_sign='\n'.join([
+ method,
+ content_md5or'',
+ content_typeor'',
+ str(expiration),
+ resource])
+
+ # Set the right query parameters.
+ query_params=_get_signed_query_params(credentials,
+ expiration,
+ string_to_sign)
+ ifresponse_typeisnotNone:
+ query_params['response-content-type']=response_type
+ ifresponse_dispositionisnotNone:
+ query_params['response-content-disposition']=response_disposition
+ ifgenerationisnotNone:
+ query_params['generation']=generation
+
+ # Return the built URL.
+ return'{endpoint}{resource}?{querystring}'.format(
+ endpoint=api_access_endpoint,resource=resource,
+ querystring=urlencode(query_params))
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with a batch of updates / deletes.
+
+Batches provide the ability to execute multiple operations
+in a single request to the Cloud Datastore API.
+
+See
+https://cloud.google.com/datastore/docs/concepts/entities#Datastore_Batch_operations
+"""
+
+fromgcloud.datastoreimporthelpers
+fromgcloud.datastore._generatedimportdatastore_pb2as_datastore_pb2
+
+
+
[docs]classBatch(object):
+ """An abstraction representing a collected group of updates / deletes.
+
+ Used to build up a bulk mutuation.
+
+ For example, the following snippet of code will put the two ``save``
+ operations and the ``delete`` operation into the same mutation, and send
+ them to the server in a single API request::
+
+ >>> from gcloud import datastore
+ >>> client = datastore.Client()
+ >>> batch = client.batch()
+ >>> batch.put(entity1)
+ >>> batch.put(entity2)
+ >>> batch.delete(key3)
+ >>> batch.commit()
+
+ You can also use a batch as a context manager, in which case
+ :meth:`commit` will be called automatically if its block exits without
+ raising an exception::
+
+ >>> with batch:
+ ... batch.put(entity1)
+ ... batch.put(entity2)
+ ... batch.delete(key3)
+
+ By default, no updates will be sent if the block exits with an error::
+
+ >>> with batch:
+ ... do_some_work(batch)
+ ... raise Exception() # rolls back
+
+ :type client: :class:`gcloud.datastore.client.Client`
+ :param client: The client used to connect to datastore.
+ """
+
+ _id=None# "protected" attribute, always None for non-transactions
+
+ _INITIAL=0
+ """Enum value for _INITIAL status of batch/transaction."""
+
+ _IN_PROGRESS=1
+ """Enum value for _IN_PROGRESS status of batch/transaction."""
+
+ _ABORTED=2
+ """Enum value for _ABORTED status of batch/transaction."""
+
+ _FINISHED=3
+ """Enum value for _FINISHED status of batch/transaction."""
+
+ def__init__(self,client):
+ self._client=client
+ self._commit_request=_datastore_pb2.CommitRequest()
+ self._partial_key_entities=[]
+ self._status=self._INITIAL
+
+
[docs]defcurrent(self):
+ """Return the topmost batch / transaction, or None."""
+ returnself._client.current_batch
+
+ @property
+ defproject(self):
+ """Getter for project in which the batch will run.
+
+ :rtype: :class:`str`
+ :returns: The project in which the batch will run.
+ """
+ returnself._client.project
+
+ @property
+ defnamespace(self):
+ """Getter for namespace in which the batch will run.
+
+ :rtype: :class:`str`
+ :returns: The namespace in which the batch will run.
+ """
+ returnself._client.namespace
+
+ @property
+ defconnection(self):
+ """Getter for connection over which the batch will run.
+
+ :rtype: :class:`gcloud.datastore.connection.Connection`
+ :returns: The connection over which the batch will run.
+ """
+ returnself._client.connection
+
+ def_add_partial_key_entity_pb(self):
+ """Adds a new mutation for an entity with a partial key.
+
+ :rtype: :class:`gcloud.datastore._generated.entity_pb2.Entity`
+ :returns: The newly created entity protobuf that will be
+ updated and sent with a commit.
+ """
+ new_mutation=self.mutations.add()
+ returnnew_mutation.insert
+
+ def_add_complete_key_entity_pb(self):
+ """Adds a new mutation for an entity with a completed key.
+
+ :rtype: :class:`gcloud.datastore._generated.entity_pb2.Entity`
+ :returns: The newly created entity protobuf that will be
+ updated and sent with a commit.
+ """
+ # We use ``upsert`` for entities with completed keys, rather than
+ # ``insert`` or ``update``, in order not to create race conditions
+ # based on prior existence / removal of the entity.
+ new_mutation=self.mutations.add()
+ returnnew_mutation.upsert
+
+ def_add_delete_key_pb(self):
+ """Adds a new mutation for a key to be deleted.
+
+ :rtype: :class:`gcloud.datastore._generated.entity_pb2.Key`
+ :returns: The newly created key protobuf that will be
+ deleted when sent with a commit.
+ """
+ new_mutation=self.mutations.add()
+ returnnew_mutation.delete
+
+ @property
+ defmutations(self):
+ """Getter for the changes accumulated by this batch.
+
+ Every batch is committed with a single commit request containing all
+ the work to be done as mutations. Inside a batch, calling :meth:`put`
+ with an entity, or :meth:`delete` with a key, builds up the request by
+ adding a new mutation. This getter returns the protobuf that has been
+ built-up so far.
+
+ :rtype: iterable
+ :returns: The list of :class:`._generated.datastore_pb2.Mutation`
+ protobufs to be sent in the commit request.
+ """
+ returnself._commit_request.mutations
+
+
[docs]defput(self,entity):
+ """Remember an entity's state to be saved during :meth:`commit`.
+
+ .. note::
+ Any existing properties for the entity will be replaced by those
+ currently set on this instance. Already-stored properties which do
+ not correspond to keys set on this instance will be removed from
+ the datastore.
+
+ .. note::
+ Property values which are "text" ('unicode' in Python2, 'str' in
+ Python3) map to 'string_value' in the datastore; values which are
+ "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'.
+
+ When an entity has a partial key, calling :meth:`commit` sends it as
+ an ``insert`` mutation and the key is completed. On return,
+ the key for the ``entity`` passed in is updated to match the key ID
+ assigned by the server.
+
+ :type entity: :class:`gcloud.datastore.entity.Entity`
+ :param entity: the entity to be saved.
+
+ :raises: ValueError if entity has no key assigned, or if the key's
+ ``project`` does not match ours.
+ """
+ ifentity.keyisNone:
+ raiseValueError("Entity must have a key")
+
+ ifself.project!=entity.key.project:
+ raiseValueError("Key must be from same project as batch")
+
+ ifentity.key.is_partial:
+ entity_pb=self._add_partial_key_entity_pb()
+ self._partial_key_entities.append(entity)
+ else:
+ entity_pb=self._add_complete_key_entity_pb()
+
+ _assign_entity_to_pb(entity_pb,entity)
+
+
[docs]defdelete(self,key):
+ """Remember a key to be deleted during :meth:`commit`.
+
+ :type key: :class:`gcloud.datastore.key.Key`
+ :param key: the key to be deleted.
+
+ :raises: ValueError if key is not complete, or if the key's
+ ``project`` does not match ours.
+ """
+ ifkey.is_partial:
+ raiseValueError("Key must be complete")
+
+ ifself.project!=key.project:
+ raiseValueError("Key must be from same project as batch")
+
+ key_pb=key.to_protobuf()
+ self._add_delete_key_pb().CopyFrom(key_pb)
+
+
[docs]defbegin(self):
+ """Begins a batch.
+
+ This method is called automatically when entering a with
+ statement, however it can be called explicitly if you don't want
+ to use a context manager.
+
+ Overridden by :class:`gcloud.datastore.transaction.Transaction`.
+
+ :raises: :class:`ValueError` if the batch has already begun.
+ """
+ ifself._status!=self._INITIAL:
+ raiseValueError('Batch already started previously.')
+ self._status=self._IN_PROGRESS
+
+ def_commit(self):
+ """Commits the batch.
+
+ This is called by :meth:`commit`.
+ """
+ # NOTE: ``self._commit_request`` will be modified.
+ _,updated_keys=self.connection.commit(
+ self.project,self._commit_request,self._id)
+ # If the back-end returns without error, we are guaranteed that
+ # :meth:`Connection.commit` will return keys that match (length and
+ # order) directly ``_partial_key_entities``.
+ fornew_key_pb,entityinzip(updated_keys,
+ self._partial_key_entities):
+ new_id=new_key_pb.path[-1].id
+ entity.key=entity.key.completed_key(new_id)
+
+
[docs]defcommit(self):
+ """Commits the batch.
+
+ This is called automatically upon exiting a with statement,
+ however it can be called explicitly if you don't want to use a
+ context manager.
+ """
+ try:
+ self._commit()
+ finally:
+ self._status=self._FINISHED
+
+
[docs]defrollback(self):
+ """Rolls back the current batch.
+
+ Marks the batch as aborted (can't be used again).
+
+ Overridden by :class:`gcloud.datastore.transaction.Transaction`.
+ """
+ self._status=self._ABORTED
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Convenience wrapper for invoking APIs/factories w/ a project."""
+
+importos
+
+fromgcloud._helpersimport_LocalStack
+fromgcloud._helpersimport_determine_default_projectas_base_default_project
+fromgcloud.clientimport_ClientProjectMixin
+fromgcloud.clientimportClientas_BaseClient
+fromgcloud.datastoreimporthelpers
+fromgcloud.datastore.connectionimportConnection
+fromgcloud.datastore.batchimportBatch
+fromgcloud.datastore.entityimportEntity
+fromgcloud.datastore.keyimportKey
+fromgcloud.datastore.queryimportQuery
+fromgcloud.datastore.transactionimportTransaction
+fromgcloud.environment_varsimportGCD_DATASET
+
+
+_MAX_LOOPS=128
+"""Maximum number of iterations to wait for deferred keys."""
+
+
+def_get_gcd_project():
+ """Gets the GCD application ID if it can be inferred."""
+ returnos.getenv(GCD_DATASET)
+
+
+def_determine_default_project(project=None):
+ """Determine default project explicitly or implicitly as fall-back.
+
+ In implicit case, supports four environments. In order of precedence, the
+ implicit environments are:
+
+ * DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing)
+ * GCLOUD_PROJECT environment variable
+ * Google App Engine application ID
+ * Google Compute Engine project ID (from metadata server)
+
+ :type project: string
+ :param project: Optional. The project to use as default.
+
+ :rtype: string or ``NoneType``
+ :returns: Default project if it can be determined.
+ """
+ ifprojectisNone:
+ project=_get_gcd_project()
+
+ ifprojectisNone:
+ project=_base_default_project(project=project)
+
+ returnproject
+
+
+def_extended_lookup(connection,project,key_pbs,
+ missing=None,deferred=None,
+ eventual=False,transaction_id=None):
+ """Repeat lookup until all keys found (unless stop requested).
+
+ Helper function for :meth:`Client.get_multi`.
+
+ :type connection: :class:`gcloud.datastore.connection.Connection`
+ :param connection: The connection used to connect to datastore.
+
+ :type project: string
+ :param project: The project to make the request for.
+
+ :type key_pbs: list of :class:`gcloud.datastore._generated.entity_pb2.Key`
+ :param key_pbs: The keys to retrieve from the datastore.
+
+ :type missing: list
+ :param missing: (Optional) If a list is passed, the key-only entity
+ protobufs returned by the backend as "missing" will be
+ copied into it.
+
+ :type deferred: list
+ :param deferred: (Optional) If a list is passed, the key protobufs returned
+ by the backend as "deferred" will be copied into it.
+
+ :type eventual: bool
+ :param eventual: If False (the default), request ``STRONG`` read
+ consistency. If True, request ``EVENTUAL`` read
+ consistency.
+
+ :type transaction_id: string
+ :param transaction_id: If passed, make the request in the scope of
+ the given transaction. Incompatible with
+ ``eventual==True``.
+
+ :rtype: list of :class:`gcloud.datastore._generated.entity_pb2.Entity`
+ :returns: The requested entities.
+ :raises: :class:`ValueError` if missing / deferred are not null or
+ empty list.
+ """
+ ifmissingisnotNoneandmissing!=[]:
+ raiseValueError('missing must be None or an empty list')
+
+ ifdeferredisnotNoneanddeferred!=[]:
+ raiseValueError('deferred must be None or an empty list')
+
+ results=[]
+
+ loop_num=0
+ whileloop_num<_MAX_LOOPS:# loop against possible deferred.
+ loop_num+=1
+
+ results_found,missing_found,deferred_found=connection.lookup(
+ project=project,
+ key_pbs=key_pbs,
+ eventual=eventual,
+ transaction_id=transaction_id,
+ )
+
+ results.extend(results_found)
+
+ ifmissingisnotNone:
+ missing.extend(missing_found)
+
+ ifdeferredisnotNone:
+ deferred.extend(deferred_found)
+ break
+
+ iflen(deferred_found)==0:
+ break
+
+ # We have deferred keys, and the user didn't ask to know about
+ # them, so retry (but only with the deferred ones).
+ key_pbs=deferred_found
+
+ returnresults
+
+
+
[docs]classClient(_BaseClient,_ClientProjectMixin):
+ """Convenience wrapper for invoking APIs/factories w/ a project.
+
+ :type project: string
+ :param project: (optional) The project to pass to proxied API methods.
+
+ :type namespace: string
+ :param namespace: (optional) namespace to pass to proxied API methods.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for the connection
+ owned by this client. If not passed (and if no ``http``
+ object is passed), falls back to the default inferred
+ from the environment.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests. If not passed, an
+ ``http`` object is created that is bound to the
+ ``credentials`` for the current object.
+ """
+ _connection_class=Connection
+
+ def__init__(self,project=None,namespace=None,
+ credentials=None,http=None):
+ _ClientProjectMixin.__init__(self,project=project)
+ self.namespace=namespace
+ self._batch_stack=_LocalStack()
+ super(Client,self).__init__(credentials,http)
+
+ @staticmethod
+ def_determine_default(project):
+ """Helper: override default project detection."""
+ return_determine_default_project(project)
+
+ def_push_batch(self,batch):
+ """Push a batch/transaction onto our stack.
+
+ "Protected", intended for use by batch / transaction context mgrs.
+
+ :type batch: :class:`gcloud.datastore.batch.Batch`, or an object
+ implementing its API.
+ :param batch: newly-active batch/transaction.
+ """
+ self._batch_stack.push(batch)
+
+ def_pop_batch(self):
+ """Pop a batch/transaction from our stack.
+
+ "Protected", intended for use by batch / transaction context mgrs.
+
+ :raises: IndexError if the stack is empty.
+ :rtype: :class:`gcloud.datastore.batch.Batch`, or an object
+ implementing its API.
+ :returns: the top-most batch/transaction, after removing it.
+ """
+ returnself._batch_stack.pop()
+
+ @property
+ defcurrent_batch(self):
+ """Currently-active batch.
+
+ :rtype: :class:`gcloud.datastore.batch.Batch`, or an object
+ implementing its API, or ``NoneType`` (if no batch is active).
+ :returns: The batch/transaction at the top of the batch stack.
+ """
+ returnself._batch_stack.top
+
+ @property
+ defcurrent_transaction(self):
+ """Currently-active transaction.
+
+ :rtype: :class:`gcloud.datastore.transaction.Transaction`, or an object
+ implementing its API, or ``NoneType`` (if no transaction is
+ active).
+ :returns: The transaction at the top of the batch stack.
+ """
+ transaction=self.current_batch
+ ifisinstance(transaction,Transaction):
+ returntransaction
+
+
[docs]defget(self,key,missing=None,deferred=None,transaction=None):
+ """Retrieve an entity from a single key (if it exists).
+
+ .. note::
+
+ This is just a thin wrapper over :meth:`get_multi`.
+ The backend API does not make a distinction between a single key or
+ multiple keys in a lookup request.
+
+ :type key: :class:`gcloud.datastore.key.Key`
+ :param key: The key to be retrieved from the datastore.
+
+ :type missing: list
+ :param missing: (Optional) If a list is passed, the key-only entities
+ returned by the backend as "missing" will be copied
+ into it.
+
+ :type deferred: list
+ :param deferred: (Optional) If a list is passed, the keys returned
+ by the backend as "deferred" will be copied into it.
+
+ :type transaction: :class:`gcloud.datastore.transaction.Transaction`
+ :param transaction: (Optional) Transaction to use for read consistency.
+ If not passed, uses current transaction, if set.
+
+ :rtype: :class:`gcloud.datastore.entity.Entity` or ``NoneType``
+ :returns: The requested entity if it exists.
+ """
+ entities=self.get_multi(keys=[key],missing=missing,
+ deferred=deferred,transaction=transaction)
+ ifentities:
+ returnentities[0]
+
+
[docs]defget_multi(self,keys,missing=None,deferred=None,transaction=None):
+ """Retrieve entities, along with their attributes.
+
+ :type keys: list of :class:`gcloud.datastore.key.Key`
+ :param keys: The keys to be retrieved from the datastore.
+
+ :type missing: list
+ :param missing: (Optional) If a list is passed, the key-only entities
+ returned by the backend as "missing" will be copied
+ into it. If the list is not empty, an error will occur.
+
+ :type deferred: list
+ :param deferred: (Optional) If a list is passed, the keys returned
+ by the backend as "deferred" will be copied into it.
+ If the list is not empty, an error will occur.
+
+ :type transaction: :class:`gcloud.datastore.transaction.Transaction`
+ :param transaction: (Optional) Transaction to use for read consistency.
+ If not passed, uses current transaction, if set.
+
+ :rtype: list of :class:`gcloud.datastore.entity.Entity`
+ :returns: The requested entities.
+ :raises: :class:`ValueError` if one or more of ``keys`` has a project
+ which does not match our project.
+ """
+ ifnotkeys:
+ return[]
+
+ ids=set(key.projectforkeyinkeys)
+ forcurrent_idinids:
+ ifcurrent_id!=self.project:
+ raiseValueError('Keys do not match project')
+
+ iftransactionisNone:
+ transaction=self.current_transaction
+
+ entity_pbs=_extended_lookup(
+ connection=self.connection,
+ project=self.project,
+ key_pbs=[k.to_protobuf()forkinkeys],
+ missing=missing,
+ deferred=deferred,
+ transaction_id=transactionandtransaction.id,
+ )
+
+ ifmissingisnotNone:
+ missing[:]=[
+ helpers.entity_from_protobuf(missed_pb)
+ formissed_pbinmissing]
+
+ ifdeferredisnotNone:
+ deferred[:]=[
+ helpers.key_from_protobuf(deferred_pb)
+ fordeferred_pbindeferred]
+
+ return[helpers.entity_from_protobuf(entity_pb)
+ forentity_pbinentity_pbs]
+
+
[docs]defput(self,entity):
+ """Save an entity in the Cloud Datastore.
+
+ .. note::
+
+ This is just a thin wrapper over :meth:`put_multi`.
+ The backend API does not make a distinction between a single
+ entity or multiple entities in a commit request.
+
+ :type entity: :class:`gcloud.datastore.entity.Entity`
+ :param entity: The entity to be saved to the datastore.
+ """
+ self.put_multi(entities=[entity])
+
+
[docs]defput_multi(self,entities):
+ """Save entities in the Cloud Datastore.
+
+ :type entities: list of :class:`gcloud.datastore.entity.Entity`
+ :param entities: The entities to be saved to the datastore.
+
+ :raises: :class:`ValueError` if ``entities`` is a single entity.
+ """
+ ifisinstance(entities,Entity):
+ raiseValueError("Pass a sequence of entities")
+
+ ifnotentities:
+ return
+
+ current=self.current_batch
+ in_batch=currentisnotNone
+
+ ifnotin_batch:
+ current=self.batch()
+
+ forentityinentities:
+ current.put(entity)
+
+ ifnotin_batch:
+ current.commit()
+
+
[docs]defdelete(self,key):
+ """Delete the key in the Cloud Datastore.
+
+ .. note::
+
+ This is just a thin wrapper over :meth:`delete_multi`.
+ The backend API does not make a distinction between a single key or
+ multiple keys in a commit request.
+
+ :type key: :class:`gcloud.datastore.key.Key`
+ :param key: The key to be deleted from the datastore.
+ """
+ self.delete_multi(keys=[key])
+
+
[docs]defdelete_multi(self,keys):
+ """Delete keys from the Cloud Datastore.
+
+ :type keys: list of :class:`gcloud.datastore.key.Key`
+ :param keys: The keys to be deleted from the datastore.
+ """
+ ifnotkeys:
+ return
+
+ # We allow partial keys to attempt a delete, the backend will fail.
+ current=self.current_batch
+ in_batch=currentisnotNone
+
+ ifnotin_batch:
+ current=self.batch()
+
+ forkeyinkeys:
+ current.delete(key)
+
+ ifnotin_batch:
+ current.commit()
+
+
[docs]defallocate_ids(self,incomplete_key,num_ids):
+ """Allocate a list of IDs from a partial key.
+
+ :type incomplete_key: :class:`gcloud.datastore.key.Key`
+ :param incomplete_key: Partial key to use as base for allocated IDs.
+
+ :type num_ids: int
+ :param num_ids: The number of IDs to allocate.
+
+ :rtype: list of :class:`gcloud.datastore.key.Key`
+ :returns: The (complete) keys allocated with ``incomplete_key`` as
+ root.
+ :raises: :class:`ValueError` if ``incomplete_key`` is not a
+ partial key.
+ """
+ ifnotincomplete_key.is_partial:
+ raiseValueError(('Key is not partial.',incomplete_key))
+
+ incomplete_key_pb=incomplete_key.to_protobuf()
+ incomplete_key_pbs=[incomplete_key_pb]*num_ids
+
+ conn=self.connection
+ allocated_key_pbs=conn.allocate_ids(incomplete_key.project,
+ incomplete_key_pbs)
+ allocated_ids=[allocated_key_pb.path[-1].id
+ forallocated_key_pbinallocated_key_pbs]
+ return[incomplete_key.completed_key(allocated_id)
+ forallocated_idinallocated_ids]
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Connections to gcloud datastore API servers."""
+
+importos
+
+fromgcloudimportconnection
+fromgcloud.environment_varsimportGCD_HOST
+fromgcloud.exceptionsimportmake_exception
+fromgcloud.datastore._generatedimportdatastore_pb2as_datastore_pb2
+fromgoogle.rpcimportstatus_pb2
+
+
+
[docs]classConnection(connection.Connection):
+ """A connection to the Google Cloud Datastore via the Protobuf API.
+
+ This class should understand only the basic types (and protobufs)
+ in method arguments, however should be capable of returning advanced types.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials`
+ :param credentials: The OAuth2 Credentials to use for this connection.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests.
+
+ :type api_base_url: string
+ :param api_base_url: The base of the API call URL. Defaults to
+ :attr:`API_BASE_URL`.
+ """
+
+ API_BASE_URL='https://datastore.googleapis.com'
+ """The base of the API call URL."""
+
+ API_VERSION='v1beta3'
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE=('{api_base}/{api_version}/projects'
+ '/{project}:{method}')
+ """A template for the URL of a particular API call."""
+
+ SCOPE=('https://www.googleapis.com/auth/datastore',)
+ """The scopes required for authenticating as a Cloud Datastore consumer."""
+
+ def__init__(self,credentials=None,http=None,api_base_url=None):
+ super(Connection,self).__init__(credentials=credentials,http=http)
+ ifapi_base_urlisNone:
+ try:
+ # gcd.sh has /datastore/ in the path still since it supports
+ # v1beta2 and v1beta3 simultaneously.
+ api_base_url='%s/datastore'%(os.environ[GCD_HOST],)
+ exceptKeyError:
+ api_base_url=self.__class__.API_BASE_URL
+ self.api_base_url=api_base_url
+
+ def_request(self,project,method,data):
+ """Make a request over the Http transport to the Cloud Datastore API.
+
+ :type project: string
+ :param project: The project to make the request for.
+
+ :type method: string
+ :param method: The API call method name (ie, ``runQuery``,
+ ``lookup``, etc)
+
+ :type data: string
+ :param data: The data to send with the API call.
+ Typically this is a serialized Protobuf string.
+
+ :rtype: string
+ :returns: The string response content from the API call.
+ :raises: :class:`gcloud.exceptions.GCloudError` if the response
+ code is not 200 OK.
+ """
+ headers={
+ 'Content-Type':'application/x-protobuf',
+ 'Content-Length':str(len(data)),
+ 'User-Agent':self.USER_AGENT,
+ }
+ headers,content=self.http.request(
+ uri=self.build_api_url(project=project,method=method),
+ method='POST',headers=headers,body=data)
+
+ status=headers['status']
+ ifstatus!='200':
+ error_status=status_pb2.Status.FromString(content)
+ raisemake_exception(headers,error_status.message,use_json=False)
+
+ returncontent
+
+ def_rpc(self,project,method,request_pb,response_pb_cls):
+ """Make a protobuf RPC request.
+
+ :type project: string
+ :param project: The project to connect to. This is
+ usually your project name in the cloud console.
+
+ :type method: string
+ :param method: The name of the method to invoke.
+
+ :type request_pb: :class:`google.protobuf.message.Message` instance
+ :param request_pb: the protobuf instance representing the request.
+
+ :type response_pb_cls: A :class:`google.protobuf.message.Message`
+ subclass.
+ :param response_pb_cls: The class used to unmarshall the response
+ protobuf.
+
+ :rtype: :class:`google.protobuf.message.Message`
+ :returns: The RPC message parsed from the response.
+ """
+ response=self._request(project=project,method=method,
+ data=request_pb.SerializeToString())
+ returnresponse_pb_cls.FromString(response)
+
+
[docs]defbuild_api_url(self,project,method,base_url=None,
+ api_version=None):
+ """Construct the URL for a particular API call.
+
+ This method is used internally to come up with the URL to use when
+ making RPCs to the Cloud Datastore API.
+
+ :type project: string
+ :param project: The project to connect to. This is
+ usually your project name in the cloud console.
+
+ :type method: string
+ :param method: The API method to call (e.g. 'runQuery', 'lookup').
+
+ :type base_url: string
+ :param base_url: The base URL where the API lives.
+ You shouldn't have to provide this.
+
+ :type api_version: string
+ :param api_version: The version of the API to connect to.
+ You shouldn't have to provide this.
+
+ :rtype: str
+ :returns: The API URL created.
+ """
+ returnself.API_URL_TEMPLATE.format(
+ api_base=(base_urlorself.api_base_url),
+ api_version=(api_versionorself.API_VERSION),
+ project=project,method=method)
+
+
[docs]deflookup(self,project,key_pbs,
+ eventual=False,transaction_id=None):
+ """Lookup keys from a project in the Cloud Datastore.
+
+ Maps the ``DatastoreService.Lookup`` protobuf RPC.
+
+ This uses mostly protobufs
+ (:class:`gcloud.datastore._generated.entity_pb2.Key` as input and
+ :class:`gcloud.datastore._generated.entity_pb2.Entity` as output). It
+ is used under the hood in
+ :meth:`Client.get() <.datastore.client.Client.get>`:
+
+ >>> from gcloud import datastore
+ >>> client = datastore.Client(project='project')
+ >>> key = client.key('MyKind', 1234)
+ >>> client.get(key)
+ [<Entity object>]
+
+ Using a :class:`Connection` directly:
+
+ >>> connection.lookup('project', [key.to_protobuf()])
+ [<Entity protobuf>]
+
+ :type project: string
+ :param project: The project to look up the keys in.
+
+ :type key_pbs: list of
+ :class:`gcloud.datastore._generated.entity_pb2.Key`
+ :param key_pbs: The keys to retrieve from the datastore.
+
+ :type eventual: bool
+ :param eventual: If False (the default), request ``STRONG`` read
+ consistency. If True, request ``EVENTUAL`` read
+ consistency.
+
+ :type transaction_id: string
+ :param transaction_id: If passed, make the request in the scope of
+ the given transaction. Incompatible with
+ ``eventual==True``.
+
+ :rtype: tuple
+ :returns: A triple of (``results``, ``missing``, ``deferred``) where
+ both ``results`` and ``missing`` are lists of
+ :class:`gcloud.datastore._generated.entity_pb2.Entity` and
+ ``deferred`` is a list of
+ :class:`gcloud.datastore._generated.entity_pb2.Key`.
+ """
+ lookup_request=_datastore_pb2.LookupRequest()
+ _set_read_options(lookup_request,eventual,transaction_id)
+ _add_keys_to_request(lookup_request.keys,key_pbs)
+
+ lookup_response=self._rpc(project,'lookup',lookup_request,
+ _datastore_pb2.LookupResponse)
+
+ results=[result.entityforresultinlookup_response.found]
+ missing=[result.entityforresultinlookup_response.missing]
+
+ returnresults,missing,list(lookup_response.deferred)
+
+
[docs]defrun_query(self,project,query_pb,namespace=None,
+ eventual=False,transaction_id=None):
+ """Run a query on the Cloud Datastore.
+
+ Maps the ``DatastoreService.RunQuery`` protobuf RPC.
+
+ Given a Query protobuf, sends a ``runQuery`` request to the
+ Cloud Datastore API and returns a list of entity protobufs
+ matching the query.
+
+ You typically wouldn't use this method directly, in favor of the
+ :meth:`gcloud.datastore.query.Query.fetch` method.
+
+ Under the hood, the :class:`gcloud.datastore.query.Query` class
+ uses this method to fetch data:
+
+ >>> from gcloud import datastore
+ >>> client = datastore.Client()
+ >>> query = client.query(kind='MyKind')
+ >>> query.add_filter('property', '=', 'val')
+
+ Using the query iterator's
+ :meth:`next_page() <.datastore.query.Iterator.next_page>` method:
+
+ >>> query_iter = query.fetch()
+ >>> entities, more_results, cursor = query_iter.next_page()
+ >>> entities
+ [<list of Entity unmarshalled from protobuf>]
+ >>> more_results
+ <boolean of more results>
+ >>> cursor
+ <string containing cursor where fetch stopped>
+
+ Under the hood this is doing:
+
+ >>> connection.run_query('project', query.to_protobuf())
+ [<list of Entity Protobufs>], cursor, more_results, skipped_results
+
+ :type project: string
+ :param project: The project over which to run the query.
+
+ :type query_pb: :class:`gcloud.datastore._generated.query_pb2.Query`
+ :param query_pb: The Protobuf representing the query to run.
+
+ :type namespace: string
+ :param namespace: The namespace over which to run the query.
+
+ :type eventual: bool
+ :param eventual: If False (the default), request ``STRONG`` read
+ consistency. If True, request ``EVENTUAL`` read
+ consistency.
+
+ :type transaction_id: string
+ :param transaction_id: If passed, make the request in the scope of
+ the given transaction. Incompatible with
+ ``eventual==True``.
+
+ :rtype: tuple
+ :returns: Four-tuple containing the entities returned,
+ the end cursor of the query, a ``more_results``
+ enum and a count of the number of skipped results.
+ """
+ request=_datastore_pb2.RunQueryRequest()
+ _set_read_options(request,eventual,transaction_id)
+
+ ifnamespace:
+ request.partition_id.namespace_id=namespace
+
+ request.query.CopyFrom(query_pb)
+ response=self._rpc(project,'runQuery',request,
+ _datastore_pb2.RunQueryResponse)
+ return(
+ [e.entityforeinresponse.batch.entity_results],
+ response.batch.end_cursor,# Assume response always has cursor.
+ response.batch.more_results,
+ response.batch.skipped_results,
+ )
+
+
[docs]defbegin_transaction(self,project):
+ """Begin a transaction.
+
+ Maps the ``DatastoreService.BeginTransaction`` protobuf RPC.
+
+ :type project: string
+ :param project: The project to which the transaction applies.
+
+ :rtype: bytes
+ :returns: The serialized transaction that was begun.
+ """
+ request=_datastore_pb2.BeginTransactionRequest()
+ response=self._rpc(project,'beginTransaction',request,
+ _datastore_pb2.BeginTransactionResponse)
+ returnresponse.transaction
+
+
[docs]defcommit(self,project,request,transaction_id):
+ """Commit mutations in context of current transation (if any).
+
+ Maps the ``DatastoreService.Commit`` protobuf RPC.
+
+ :type project: string
+ :param project: The project to which the transaction applies.
+
+ :type request: :class:`._generated.datastore_pb2.CommitRequest`
+ :param request: The protobuf with the mutations being committed.
+
+ :type transaction_id: string or None
+ :param transaction_id: The transaction ID returned from
+ :meth:`begin_transaction`. Non-transactional
+ batches must pass ``None``.
+
+ .. note::
+
+ This method will mutate ``request`` before using it.
+
+ :rtype: tuple
+ :returns: The pair of the number of index updates and a list of
+ :class:`._generated.entity_pb2.Key` for each incomplete key
+ that was completed in the commit.
+ """
+ iftransaction_id:
+ request.mode=_datastore_pb2.CommitRequest.TRANSACTIONAL
+ request.transaction=transaction_id
+ else:
+ request.mode=_datastore_pb2.CommitRequest.NON_TRANSACTIONAL
+
+ response=self._rpc(project,'commit',request,
+ _datastore_pb2.CommitResponse)
+ return_parse_commit_response(response)
+
+
[docs]defrollback(self,project,transaction_id):
+ """Rollback the connection's existing transaction.
+
+ Maps the ``DatastoreService.Rollback`` protobuf RPC.
+
+ :type project: string
+ :param project: The project to which the transaction belongs.
+
+ :type transaction_id: string
+ :param transaction_id: The transaction ID returned from
+ :meth:`begin_transaction`.
+ """
+ request=_datastore_pb2.RollbackRequest()
+ request.transaction=transaction_id
+ # Nothing to do with this response, so just execute the method.
+ self._rpc(project,'rollback',request,
+ _datastore_pb2.RollbackResponse)
+
+
[docs]defallocate_ids(self,project,key_pbs):
+ """Obtain backend-generated IDs for a set of keys.
+
+ Maps the ``DatastoreService.AllocateIds`` protobuf RPC.
+
+ :type project: string
+ :param project: The project to which the transaction belongs.
+
+ :type key_pbs: list of
+ :class:`gcloud.datastore._generated.entity_pb2.Key`
+ :param key_pbs: The keys for which the backend should allocate IDs.
+
+ :rtype: list of :class:`gcloud.datastore._generated.entity_pb2.Key`
+ :returns: An equal number of keys, with IDs filled in by the backend.
+ """
+ request=_datastore_pb2.AllocateIdsRequest()
+ _add_keys_to_request(request.keys,key_pbs)
+ # Nothing to do with this response, so just execute the method.
+ response=self._rpc(project,'allocateIds',request,
+ _datastore_pb2.AllocateIdsResponse)
+ returnlist(response.keys)
+
+
+def_set_read_options(request,eventual,transaction_id):
+ """Validate rules for read options, and assign to the request.
+
+ Helper method for ``lookup()`` and ``run_query``.
+
+ :raises: :class:`ValueError` if ``eventual`` is ``True`` and the
+ ``transaction_id`` is not ``None``.
+ """
+ ifeventualand(transaction_idisnotNone):
+ raiseValueError('eventual must be False when in a transaction')
+
+ opts=request.read_options
+ ifeventual:
+ opts.read_consistency=_datastore_pb2.ReadOptions.EVENTUAL
+ eliftransaction_id:
+ opts.transaction=transaction_id
+
+
+def_add_keys_to_request(request_field_pb,key_pbs):
+ """Add protobuf keys to a request object.
+
+ :type request_field_pb: `RepeatedCompositeFieldContainer`
+ :param request_field_pb: A repeated proto field that contains keys.
+
+ :type key_pbs: list of :class:`gcloud.datastore._generated.entity_pb2.Key`
+ :param key_pbs: The keys to add to a request.
+ """
+ forkey_pbinkey_pbs:
+ request_field_pb.add().CopyFrom(key_pb)
+
+
+def_parse_commit_response(commit_response_pb):
+ """Extract response data from a commit response.
+
+ :type commit_response_pb: :class:`._generated.datastore_pb2.CommitResponse`
+ :param commit_response_pb: The protobuf response from a commit request.
+
+ :rtype: tuple
+ :returns: The pair of the number of index updates and a list of
+ :class:`._generated.entity_pb2.Key` for each incomplete key
+ that was completed in the commit.
+ """
+ mut_results=commit_response_pb.mutation_results
+ index_updates=commit_response_pb.index_updates
+ completed_keys=[mut_result.keyformut_resultinmut_results
+ ifmut_result.HasField('key')]# Message field (Key)
+ returnindex_updates,completed_keys
+
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Class for representing a single entity in the Cloud Datastore."""
+
+
+fromgcloud._helpersimport_ensure_tuple_or_list
+
+
+
[docs]classEntity(dict):
+ """Entities are akin to rows in a relational database
+
+ An entity storing the actual instance of data.
+
+ Each entity is officially represented with a
+ :class:`gcloud.datastore.key.Key` class, however it is possible that
+ you might create an Entity with only a partial Key (that is, a Key
+ with a Kind, and possibly a parent, but without an ID). In such a
+ case, the datastore service will automatically assign an ID to the
+ partial key.
+
+ Entities in this API act like dictionaries with extras built in that
+ allow you to delete or persist the data stored on the entity.
+
+ Entities are mutable and act like a subclass of a dictionary.
+ This means you could take an existing entity and change the key
+ to duplicate the object.
+
+ Use :func:`gcloud.datastore.get` to retrieve an existing entity.
+
+ >>> from gcloud import datastore
+ >>> client = datastore.Client()
+ >>> client.get(key)
+ <Entity[{'kind': 'EntityKind', id: 1234}] {'property': 'value'}>
+
+ You can the set values on the entity just like you would on any
+ other dictionary.
+
+ >>> entity['age'] = 20
+ >>> entity['name'] = 'JJ'
+ >>> entity
+ <Entity[{'kind': 'EntityKind', id: 1234}] {'age': 20, 'name': 'JJ'}>
+
+ And you can convert an entity to a regular Python dictionary with the
+ ``dict`` builtin:
+
+ >>> dict(entity)
+ {'age': 20, 'name': 'JJ'}
+
+ .. note::
+
+ When saving an entity to the backend, values which are "text"
+ (``unicode`` in Python2, ``str`` in Python3) will be saved using
+ the 'text_value' field, after being encoded to UTF-8. When
+ retrieved from the back-end, such values will be decoded to "text"
+ again. Values which are "bytes" (``str`` in Python2, ``bytes`` in
+ Python3), will be saved using the 'blob_value' field, without
+ any decoding / encoding step.
+
+ :type key: :class:`gcloud.datastore.key.Key`
+ :param key: Optional key to be set on entity.
+
+ :type exclude_from_indexes: tuple of string
+ :param exclude_from_indexes: Names of fields whose values are not to be
+ indexed for this entity.
+ """
+
+ def__init__(self,key=None,exclude_from_indexes=()):
+ super(Entity,self).__init__()
+ self.key=key
+ self._exclude_from_indexes=set(_ensure_tuple_or_list(
+ 'exclude_from_indexes',exclude_from_indexes))
+ # NOTE: This will be populated when parsing a protobuf in
+ # gcloud.datastore.helpers.entity_from_protobuf.
+ self._meanings={}
+
+ def__eq__(self,other):
+ """Compare two entities for equality.
+
+ Entities compare equal if their keys compare equal, and their
+ properties compare equal.
+
+ :rtype: boolean
+ :returns: True if the entities compare equal, else False.
+ """
+ ifnotisinstance(other,Entity):
+ returnFalse
+
+ return(self.key==other.keyand
+ self._exclude_from_indexes==other._exclude_from_indexesand
+ self._meanings==other._meaningsand
+ super(Entity,self).__eq__(other))
+
+ def__ne__(self,other):
+ """Compare two entities for inequality.
+
+ Entities compare equal if their keys compare equal, and their
+ properties compare equal.
+
+ :rtype: boolean
+ :returns: False if the entities compare equal, else True.
+ """
+ returnnotself.__eq__(other)
+
+ @property
+ defkind(self):
+ """Get the kind of the current entity.
+
+ .. note::
+ This relies entirely on the :class:`gcloud.datastore.key.Key`
+ set on the entity. That means that we're not storing the kind
+ of the entity at all, just the properties and a pointer to a
+ Key which knows its Kind.
+ """
+ ifself.key:
+ returnself.key.kind
+
+ @property
+ defexclude_from_indexes(self):
+ """Names of fields which are *not* to be indexed for this entity.
+
+ :rtype: sequence of field names
+ :returns: The set of fields excluded from indexes.
+ """
+ returnfrozenset(self._exclude_from_indexes)
+
+ def__repr__(self):
+ ifself.key:
+ return'<Entity%s%s>'%(self.key.path,
+ super(Entity,self).__repr__())
+ else:
+ return'<Entity %s>'%(super(Entity,self).__repr__())
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper functions for dealing with Cloud Datastore's Protobuf API.
+
+The non-private functions are part of the API.
+"""
+
+importdatetime
+importitertools
+
+fromgoogle.protobufimportstruct_pb2
+fromgoogle.typeimportlatlng_pb2
+importsix
+
+fromgcloud._helpersimport_datetime_to_pb_timestamp
+fromgcloud._helpersimport_pb_timestamp_to_datetime
+fromgcloud.datastore._generatedimportentity_pb2as_entity_pb2
+fromgcloud.datastore.entityimportEntity
+fromgcloud.datastore.keyimportKey
+
+__all__=('entity_from_protobuf','key_from_protobuf')
+
+
+def_get_meaning(value_pb,is_list=False):
+ """Get the meaning from a protobuf value.
+
+ :type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value`
+ :param value_pb: The protobuf value to be checked for an
+ associated meaning.
+
+ :type is_list: bool
+ :param is_list: Boolean indicating if the ``value_pb`` contains
+ a list value.
+
+ :rtype: int
+ :returns: The meaning for the ``value_pb`` if one is set, else
+ :data:`None`. For a list value, if there are disagreeing
+ means it just returns a list of meanings. If all the
+ list meanings agree, it just condenses them.
+ """
+ meaning=None
+ ifis_list:
+ # An empty list will have no values, hence no shared meaning
+ # set among them.
+ iflen(value_pb.array_value.values)==0:
+ returnNone
+
+ # We check among all the meanings, some of which may be None,
+ # the rest which may be enum/int values.
+ all_meanings=[_get_meaning(sub_value_pb)
+ forsub_value_pbinvalue_pb.array_value.values]
+ unique_meanings=set(all_meanings)
+ iflen(unique_meanings)==1:
+ # If there is a unique meaning, we preserve it.
+ meaning=unique_meanings.pop()
+ else:# We know len(value_pb.array_value.values) > 0.
+ # If the meaning is not unique, just return all of them.
+ meaning=all_meanings
+ elifvalue_pb.meaning:# Simple field (int32)
+ meaning=value_pb.meaning
+
+ returnmeaning
+
+
+def_new_value_pb(entity_pb,name):
+ """Add (by name) a new ``Value`` protobuf to an entity protobuf.
+
+ :type entity_pb: :class:`gcloud.datastore._generated.entity_pb2.Entity`
+ :param entity_pb: An entity protobuf to add a new property to.
+
+ :type name: string
+ :param name: The name of the new property.
+
+ :rtype: :class:`gcloud.datastore._generated.entity_pb2.Value`
+ :returns: The new ``Value`` protobuf that was added to the entity.
+ """
+ returnentity_pb.properties.get_or_create(name)
+
+
+def_property_tuples(entity_pb):
+ """Iterator of name, ``Value`` tuples from entity properties.
+
+ :type entity_pb: :class:`gcloud.datastore._generated.entity_pb2.Entity`
+ :param entity_pb: An entity protobuf to add a new property to.
+
+ :rtype: :class:`generator`
+ :returns: An iterator that yields tuples of a name and ``Value``
+ corresponding to properties on the entity.
+ """
+ returnsix.iteritems(entity_pb.properties)
+
+
+
[docs]defentity_from_protobuf(pb):
+ """Factory method for creating an entity based on a protobuf.
+
+ The protobuf should be one returned from the Cloud Datastore
+ Protobuf API.
+
+ :type pb: :class:`gcloud.datastore._generated.entity_pb2.Entity`
+ :param pb: The Protobuf representing the entity.
+
+ :rtype: :class:`gcloud.datastore.entity.Entity`
+ :returns: The entity derived from the protobuf.
+ """
+ key=None
+ ifpb.HasField('key'):# Message field (Key)
+ key=key_from_protobuf(pb.key)
+
+ entity_props={}
+ entity_meanings={}
+ exclude_from_indexes=[]
+
+ forprop_name,value_pbin_property_tuples(pb):
+ value=_get_value_from_value_pb(value_pb)
+ entity_props[prop_name]=value
+
+ # Check if the property has an associated meaning.
+ is_list=isinstance(value,list)
+ meaning=_get_meaning(value_pb,is_list=is_list)
+ ifmeaningisnotNone:
+ entity_meanings[prop_name]=(meaning,value)
+
+ # Check if ``value_pb`` was excluded from index. Lists need to be
+ # special-cased and we require all ``exclude_from_indexes`` values
+ # in a list agree.
+ ifis_list:
+ exclude_values=set(value_pb.exclude_from_indexes
+ forvalue_pbinvalue_pb.array_value.values)
+ iflen(exclude_values)!=1:
+ raiseValueError('For an array_value, subvalues must either '
+ 'all be indexed or all excluded from '
+ 'indexes.')
+
+ ifexclude_values.pop():
+ exclude_from_indexes.append(prop_name)
+ else:
+ ifvalue_pb.exclude_from_indexes:
+ exclude_from_indexes.append(prop_name)
+
+ entity=Entity(key=key,exclude_from_indexes=exclude_from_indexes)
+ entity.update(entity_props)
+ entity._meanings.update(entity_meanings)
+ returnentity
+
+
+def_set_pb_meaning_from_entity(entity,name,value,value_pb,
+ is_list=False):
+ """Add meaning information (from an entity) to a protobuf.
+
+ :type entity: :class:`gcloud.datastore.entity.Entity`
+ :param entity: The entity to be turned into a protobuf.
+
+ :type name: string
+ :param name: The name of the property.
+
+ :type value: object
+ :param value: The current value stored as property ``name``.
+
+ :type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value`
+ :param value_pb: The protobuf value to add meaning / meanings to.
+
+ :type is_list: bool
+ :param is_list: (Optional) Boolean indicating if the ``value`` is
+ a list value.
+ """
+ ifnamenotinentity._meanings:
+ return
+
+ meaning,orig_value=entity._meanings[name]
+ # Only add the meaning back to the protobuf if the value is
+ # unchanged from when it was originally read from the API.
+ iforig_valueisnotvalue:
+ return
+
+ # For lists, we set meaning on each sub-element.
+ ifis_list:
+ ifnotisinstance(meaning,list):
+ meaning=itertools.repeat(meaning)
+ val_iter=six.moves.zip(value_pb.array_value.values,
+ meaning)
+ forsub_value_pb,sub_meaninginval_iter:
+ ifsub_meaningisnotNone:
+ sub_value_pb.meaning=sub_meaning
+ else:
+ value_pb.meaning=meaning
+
+
+defentity_to_protobuf(entity):
+ """Converts an entity into a protobuf.
+
+ :type entity: :class:`gcloud.datastore.entity.Entity`
+ :param entity: The entity to be turned into a protobuf.
+
+ :rtype: :class:`gcloud.datastore._generated.entity_pb2.Entity`
+ :returns: The protobuf representing the entity.
+ """
+ entity_pb=_entity_pb2.Entity()
+ ifentity.keyisnotNone:
+ key_pb=entity.key.to_protobuf()
+ entity_pb.key.CopyFrom(key_pb)
+
+ forname,valueinentity.items():
+ value_is_list=isinstance(value,list)
+ ifvalue_is_listandlen(value)==0:
+ continue
+
+ value_pb=_new_value_pb(entity_pb,name)
+ # Set the appropriate value.
+ _set_protobuf_value(value_pb,value)
+
+ # Add index information to protobuf.
+ ifnameinentity.exclude_from_indexes:
+ ifnotvalue_is_list:
+ value_pb.exclude_from_indexes=True
+
+ forsub_valueinvalue_pb.array_value.values:
+ sub_value.exclude_from_indexes=True
+
+ # Add meaning information to protobuf.
+ _set_pb_meaning_from_entity(entity,name,value,value_pb,
+ is_list=value_is_list)
+
+ returnentity_pb
+
+
+
[docs]defkey_from_protobuf(pb):
+ """Factory method for creating a key based on a protobuf.
+
+ The protobuf should be one returned from the Cloud Datastore
+ Protobuf API.
+
+ :type pb: :class:`gcloud.datastore._generated.entity_pb2.Key`
+ :param pb: The Protobuf representing the key.
+
+ :rtype: :class:`gcloud.datastore.key.Key`
+ :returns: a new `Key` instance
+ """
+ path_args=[]
+ forelementinpb.path:
+ path_args.append(element.kind)
+ ifelement.id:# Simple field (int64)
+ path_args.append(element.id)
+ # This is safe: we expect proto objects returned will only have
+ # one of `name` or `id` set.
+ ifelement.name:# Simple field (string)
+ path_args.append(element.name)
+
+ project=None
+ ifpb.partition_id.project_id:# Simple field (string)
+ project=pb.partition_id.project_id
+ namespace=None
+ ifpb.partition_id.namespace_id:# Simple field (string)
+ namespace=pb.partition_id.namespace_id
+
+ returnKey(*path_args,namespace=namespace,project=project)
+
+
+def_pb_attr_value(val):
+ """Given a value, return the protobuf attribute name and proper value.
+
+ The Protobuf API uses different attribute names based on value types
+ rather than inferring the type. This function simply determines the
+ proper attribute name based on the type of the value provided and
+ returns the attribute name as well as a properly formatted value.
+
+ Certain value types need to be coerced into a different type (such
+ as a `datetime.datetime` into an integer timestamp, or a
+ `gcloud.datastore.key.Key` into a Protobuf representation. This
+ function handles that for you.
+
+ .. note::
+ Values which are "text" ('unicode' in Python2, 'str' in Python3) map
+ to 'string_value' in the datastore; values which are "bytes"
+ ('str' in Python2, 'bytes' in Python3) map to 'blob_value'.
+
+ For example:
+
+ >>> _pb_attr_value(1234)
+ ('integer_value', 1234)
+ >>> _pb_attr_value('my_string')
+ ('string_value', 'my_string')
+
+ :type val: `datetime.datetime`, :class:`gcloud.datastore.key.Key`,
+ bool, float, integer, string
+ :param val: The value to be scrutinized.
+
+ :rtype: tuple
+ :returns: A tuple of the attribute name and proper value type.
+ """
+
+ ifisinstance(val,datetime.datetime):
+ name='timestamp'
+ value=_datetime_to_pb_timestamp(val)
+ elifisinstance(val,Key):
+ name,value='key',val.to_protobuf()
+ elifisinstance(val,bool):
+ name,value='boolean',val
+ elifisinstance(val,float):
+ name,value='double',val
+ elifisinstance(val,six.integer_types):
+ name,value='integer',val
+ elifisinstance(val,six.text_type):
+ name,value='string',val
+ elifisinstance(val,(bytes,str)):
+ name,value='blob',val
+ elifisinstance(val,Entity):
+ name,value='entity',val
+ elifisinstance(val,list):
+ name,value='array',val
+ elifisinstance(val,GeoPoint):
+ name,value='geo_point',val.to_protobuf()
+ elifvalisNone:
+ name,value='null',struct_pb2.NULL_VALUE
+ else:
+ raiseValueError("Unknown protobuf attr type %s"%type(val))
+
+ returnname+'_value',value
+
+
+def_get_value_from_value_pb(value_pb):
+ """Given a protobuf for a Value, get the correct value.
+
+ The Cloud Datastore Protobuf API returns a Property Protobuf which
+ has one value set and the rest blank. This function retrieves the
+ the one value provided.
+
+ Some work is done to coerce the return value into a more useful type
+ (particularly in the case of a timestamp value, or a key value).
+
+ :type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value`
+ :param value_pb: The Value Protobuf.
+
+ :rtype: object
+ :returns: The value provided by the Protobuf.
+ :raises: :class:`ValueError <exceptions.ValueError>` if no value type
+ has been set.
+ """
+ value_type=value_pb.WhichOneof('value_type')
+
+ ifvalue_type=='timestamp_value':
+ result=_pb_timestamp_to_datetime(value_pb.timestamp_value)
+
+ elifvalue_type=='key_value':
+ result=key_from_protobuf(value_pb.key_value)
+
+ elifvalue_type=='boolean_value':
+ result=value_pb.boolean_value
+
+ elifvalue_type=='double_value':
+ result=value_pb.double_value
+
+ elifvalue_type=='integer_value':
+ result=value_pb.integer_value
+
+ elifvalue_type=='string_value':
+ result=value_pb.string_value
+
+ elifvalue_type=='blob_value':
+ result=value_pb.blob_value
+
+ elifvalue_type=='entity_value':
+ result=entity_from_protobuf(value_pb.entity_value)
+
+ elifvalue_type=='array_value':
+ result=[_get_value_from_value_pb(value)
+ forvalueinvalue_pb.array_value.values]
+
+ elifvalue_type=='geo_point_value':
+ result=GeoPoint(value_pb.geo_point_value.latitude,
+ value_pb.geo_point_value.longitude)
+
+ elifvalue_type=='null_value':
+ result=None
+
+ else:
+ raiseValueError('Value protobuf did not have any value set')
+
+ returnresult
+
+
+def_set_protobuf_value(value_pb,val):
+ """Assign 'val' to the correct subfield of 'value_pb'.
+
+ The Protobuf API uses different attribute names based on value types
+ rather than inferring the type.
+
+ Some value types (entities, keys, lists) cannot be directly
+ assigned; this function handles them correctly.
+
+ :type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value`
+ :param value_pb: The value protobuf to which the value is being assigned.
+
+ :type val: :class:`datetime.datetime`, boolean, float, integer, string,
+ :class:`gcloud.datastore.key.Key`,
+ :class:`gcloud.datastore.entity.Entity`
+ :param val: The value to be assigned.
+ """
+ attr,val=_pb_attr_value(val)
+ ifattr=='key_value':
+ value_pb.key_value.CopyFrom(val)
+ elifattr=='timestamp_value':
+ value_pb.timestamp_value.CopyFrom(val)
+ elifattr=='entity_value':
+ entity_pb=entity_to_protobuf(val)
+ value_pb.entity_value.CopyFrom(entity_pb)
+ elifattr=='array_value':
+ l_pb=value_pb.array_value.values
+ foriteminval:
+ i_pb=l_pb.add()
+ _set_protobuf_value(i_pb,item)
+ elifattr=='geo_point_value':
+ value_pb.geo_point_value.CopyFrom(val)
+ else:# scalar, just assign
+ setattr(value_pb,attr,val)
+
+
+classGeoPoint(object):
+ """Simple container for a geo point value.
+
+ :type latitude: float
+ :param latitude: Latitude of a point.
+
+ :type longitude: float
+ :param longitude: Longitude of a point.
+ """
+
+ def__init__(self,latitude,longitude):
+ self.latitude=latitude
+ self.longitude=longitude
+
+ defto_protobuf(self):
+ """Convert the current object to protobuf.
+
+ :rtype: :class:`google.type.latlng_pb2.LatLng`.
+ :returns: The current point as a protobuf.
+ """
+ returnlatlng_pb2.LatLng(latitude=self.latitude,
+ longitude=self.longitude)
+
+ def__eq__(self,other):
+ """Compare two geo points for equality.
+
+ :rtype: boolean
+ :returns: True if the points compare equal, else False.
+ """
+ ifnotisinstance(other,GeoPoint):
+ returnFalse
+
+ return(self.latitude==other.latitudeand
+ self.longitude==other.longitude)
+
+ def__ne__(self,other):
+ """Compare two geo points for inequality.
+
+ :rtype: boolean
+ :returns: False if the points compare equal, else True.
+ """
+ returnnotself.__eq__(other)
+
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with gcloud datastore keys."""
+
+importcopy
+importsix
+
+fromgcloud.datastore._generatedimportentity_pb2as_entity_pb2
+
+
+
[docs]classKey(object):
+ """An immutable representation of a datastore Key.
+
+ To create a basic key:
+
+ >>> Key('EntityKind', 1234)
+ <Key[{'kind': 'EntityKind', 'id': 1234}]>
+ >>> Key('EntityKind', 'foo')
+ <Key[{'kind': 'EntityKind', 'name': 'foo'}]>
+
+ To create a key with a parent:
+
+ >>> Key('Parent', 'foo', 'Child', 1234)
+ <Key[{'kind': 'Parent', 'name': 'foo'}, {'kind': 'Child', 'id': 1234}]>
+ >>> Key('Child', 1234, parent=parent_key)
+ <Key[{'kind': 'Parent', 'name': 'foo'}, {'kind': 'Child', 'id': 1234}]>
+
+ To create a partial key:
+
+ >>> Key('Parent', 'foo', 'Child')
+ <Key[{'kind': 'Parent', 'name': 'foo'}, {'kind': 'Child'}]>
+
+ :type path_args: tuple of string and integer
+ :param path_args: May represent a partial (odd length) or full (even
+ length) key path.
+
+ :type kwargs: dict
+ :param kwargs: Keyword arguments to be passed in.
+
+ Accepted keyword arguments are
+
+ * namespace (string): A namespace identifier for the key.
+ * project (string): The project associated with the key.
+ * parent (:class:`gcloud.datastore.key.Key`): The parent of the key.
+
+ The project argument is required unless it has been set implicitly.
+ """
+
+ def__init__(self,*path_args,**kwargs):
+ self._flat_path=path_args
+ parent=self._parent=kwargs.get('parent')
+ self._namespace=kwargs.get('namespace')
+ project=kwargs.get('project')
+ self._project=_validate_project(project,parent)
+ # _flat_path, _parent, _namespace and _project must be set before
+ # _combine_args() is called.
+ self._path=self._combine_args()
+
+ def__eq__(self,other):
+ """Compare two keys for equality.
+
+ Incomplete keys never compare equal to any other key.
+
+ Completed keys compare equal if they have the same path, project,
+ and namespace.
+
+ :rtype: bool
+ :returns: True if the keys compare equal, else False.
+ """
+ ifnotisinstance(other,Key):
+ returnFalse
+
+ ifself.is_partialorother.is_partial:
+ returnFalse
+
+ return(self.flat_path==other.flat_pathand
+ self.project==other.projectand
+ self.namespace==other.namespace)
+
+ def__ne__(self,other):
+ """Compare two keys for inequality.
+
+ Incomplete keys never compare equal to any other key.
+
+ Completed keys compare equal if they have the same path, project,
+ and namespace.
+
+ :rtype: bool
+ :returns: False if the keys compare equal, else True.
+ """
+ returnnotself.__eq__(other)
+
+ def__hash__(self):
+ """Hash a keys for use in a dictionary lookp.
+
+ :rtype: integer
+ :returns: a hash of the key's state.
+ """
+ return(hash(self.flat_path)+
+ hash(self.project)+
+ hash(self.namespace))
+
+ @staticmethod
+ def_parse_path(path_args):
+ """Parses positional arguments into key path with kinds and IDs.
+
+ :type path_args: tuple
+ :param path_args: A tuple from positional arguments. Should be
+ alternating list of kinds (string) and ID/name
+ parts (int or string).
+
+ :rtype: :class:`list` of :class:`dict`
+ :returns: A list of key parts with kind and ID or name set.
+ :raises: :class:`ValueError` if there are no ``path_args``, if one of
+ the kinds is not a string or if one of the IDs/names is not
+ a string or an integer.
+ """
+ iflen(path_args)==0:
+ raiseValueError('Key path must not be empty.')
+
+ kind_list=path_args[::2]
+ id_or_name_list=path_args[1::2]
+ # Dummy sentinel value to pad incomplete key to even length path.
+ partial_ending=object()
+ iflen(path_args)%2==1:
+ id_or_name_list+=(partial_ending,)
+
+ result=[]
+ forkind,id_or_nameinzip(kind_list,id_or_name_list):
+ curr_key_part={}
+ ifisinstance(kind,six.string_types):
+ curr_key_part['kind']=kind
+ else:
+ raiseValueError(kind,'Kind was not a string.')
+
+ ifisinstance(id_or_name,six.string_types):
+ curr_key_part['name']=id_or_name
+ elifisinstance(id_or_name,six.integer_types):
+ curr_key_part['id']=id_or_name
+ elifid_or_nameisnotpartial_ending:
+ raiseValueError(id_or_name,
+ 'ID/name was not a string or integer.')
+
+ result.append(curr_key_part)
+
+ returnresult
+
+ def_combine_args(self):
+ """Sets protected data by combining raw data set from the constructor.
+
+ If a ``_parent`` is set, updates the ``_flat_path`` and sets the
+ ``_namespace`` and ``_project`` if not already set.
+
+ :rtype: :class:`list` of :class:`dict`
+ :returns: A list of key parts with kind and ID or name set.
+ :raises: :class:`ValueError` if the parent key is not complete.
+ """
+ child_path=self._parse_path(self._flat_path)
+
+ ifself._parentisnotNone:
+ ifself._parent.is_partial:
+ raiseValueError('Parent key must be complete.')
+
+ # We know that _parent.path() will return a copy.
+ child_path=self._parent.path+child_path
+ self._flat_path=self._parent.flat_path+self._flat_path
+ if(self._namespaceisnotNoneand
+ self._namespace!=self._parent.namespace):
+ raiseValueError('Child namespace must agree with parent\'s.')
+ self._namespace=self._parent.namespace
+ if(self._projectisnotNoneand
+ self._project!=self._parent.project):
+ raiseValueError('Child project must agree with parent\'s.')
+ self._project=self._parent.project
+
+ returnchild_path
+
+ def_clone(self):
+ """Duplicates the Key.
+
+ Most attributes are simple types, so don't require copying. Other
+ attributes like ``parent`` are long-lived and so we re-use them.
+
+ :rtype: :class:`gcloud.datastore.key.Key`
+ :returns: A new ``Key`` instance with the same data as the current one.
+ """
+ cloned_self=self.__class__(*self.flat_path,
+ project=self.project,
+ namespace=self.namespace)
+ # If the current parent has already been set, we re-use
+ # the same instance
+ cloned_self._parent=self._parent
+ returncloned_self
+
+
[docs]defcompleted_key(self,id_or_name):
+ """Creates new key from existing partial key by adding final ID/name.
+
+ :type id_or_name: string or integer
+ :param id_or_name: ID or name to be added to the key.
+
+ :rtype: :class:`gcloud.datastore.key.Key`
+ :returns: A new ``Key`` instance with the same data as the current one
+ and an extra ID or name added.
+ :raises: :class:`ValueError` if the current key is not partial or if
+ ``id_or_name`` is not a string or integer.
+ """
+ ifnotself.is_partial:
+ raiseValueError('Only a partial key can be completed.')
+
+ id_or_name_key=None
+ ifisinstance(id_or_name,six.string_types):
+ id_or_name_key='name'
+ elifisinstance(id_or_name,six.integer_types):
+ id_or_name_key='id'
+ else:
+ raiseValueError(id_or_name,
+ 'ID/name was not a string or integer.')
+
+ new_key=self._clone()
+ new_key._path[-1][id_or_name_key]=id_or_name
+ new_key._flat_path+=(id_or_name,)
+ returnnew_key
+
+ @property
+ defis_partial(self):
+ """Boolean indicating if the key has an ID (or name).
+
+ :rtype: bool
+ :returns: ``True`` if the last element of the key's path does not have
+ an ``id`` or a ``name``.
+ """
+ returnself.id_or_nameisNone
+
+ @property
+ defnamespace(self):
+ """Namespace getter.
+
+ :rtype: string
+ :returns: The namespace of the current key.
+ """
+ returnself._namespace
+
+ @property
+ defpath(self):
+ """Path getter.
+
+ Returns a copy so that the key remains immutable.
+
+ :rtype: :class:`list` of :class:`dict`
+ :returns: The (key) path of the current key.
+ """
+ returncopy.deepcopy(self._path)
+
+ @property
+ defflat_path(self):
+ """Getter for the key path as a tuple.
+
+ :rtype: tuple of string and integer
+ :returns: The tuple of elements in the path.
+ """
+ returnself._flat_path
+
+ @property
+ defkind(self):
+ """Kind getter. Based on the last element of path.
+
+ :rtype: string
+ :returns: The kind of the current key.
+ """
+ returnself.path[-1]['kind']
+
+ @property
+ defid(self):
+ """ID getter. Based on the last element of path.
+
+ :rtype: integer
+ :returns: The (integer) ID of the key.
+ """
+ returnself.path[-1].get('id')
+
+ @property
+ defname(self):
+ """Name getter. Based on the last element of path.
+
+ :rtype: string
+ :returns: The (string) name of the key.
+ """
+ returnself.path[-1].get('name')
+
+ @property
+ defid_or_name(self):
+ """Getter. Based on the last element of path.
+
+ :rtype: integer (if ``id``) or string (if ``name``)
+ :returns: The last element of the key's path if it is either an ``id``
+ or a ``name``.
+ """
+ returnself.idorself.name
+
+ @property
+ defproject(self):
+ """Project getter.
+
+ :rtype: string
+ :returns: The key's project.
+ """
+ returnself._project
+
+ def_make_parent(self):
+ """Creates a parent key for the current path.
+
+ Extracts all but the last element in the key path and creates a new
+ key, while still matching the namespace and the project.
+
+ :rtype: :class:`gcloud.datastore.key.Key` or :class:`NoneType`
+ :returns: A new ``Key`` instance, whose path consists of all but the
+ last element of current path. If the current key has only
+ one path element, returns ``None``.
+ """
+ ifself.is_partial:
+ parent_args=self.flat_path[:-1]
+ else:
+ parent_args=self.flat_path[:-2]
+ ifparent_args:
+ returnself.__class__(*parent_args,project=self.project,
+ namespace=self.namespace)
+
+ @property
+ defparent(self):
+ """The parent of the current key.
+
+ :rtype: :class:`gcloud.datastore.key.Key` or :class:`NoneType`
+ :returns: A new ``Key`` instance, whose path consists of all but the
+ last element of current path. If the current key has only
+ one path element, returns ``None``.
+ """
+ ifself._parentisNone:
+ self._parent=self._make_parent()
+
+ returnself._parent
+
+ def__repr__(self):
+ return'<Key%s, project=%s>'%(self.path,self.project)
+
+
+def_validate_project(project,parent):
+ """Ensure the project is set appropriately.
+
+ If ``parent`` is passed, skip the test (it will be checked / fixed up
+ later).
+
+ If ``project`` is unset, attempt to infer the project from the environment.
+
+ :type project: string
+ :param project: A project.
+
+ :type parent: :class:`gcloud.datastore.key.Key` or ``NoneType``
+ :param parent: The parent of the key or ``None``.
+
+ :rtype: string
+ :returns: The ``project`` passed in, or implied from the environment.
+ :raises: :class:`ValueError` if ``project`` is ``None`` and no project
+ can be inferred from the parent.
+ """
+ ifparentisNone:
+ ifprojectisNone:
+ raiseValueError("A Key must have a project set.")
+
+ returnproject
+
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with gcloud datastore queries."""
+
+importbase64
+
+fromgcloud._helpersimport_ensure_tuple_or_list
+fromgcloud.datastore._generatedimportquery_pb2as_query_pb2
+fromgcloud.datastoreimporthelpers
+fromgcloud.datastore.keyimportKey
+
+
+
[docs]classQuery(object):
+ """A Query against the Cloud Datastore.
+
+ This class serves as an abstraction for creating a query over data
+ stored in the Cloud Datastore.
+
+ :type client: :class:`gcloud.datastore.client.Client`
+ :param client: The client used to connect to datastore.
+
+ :type kind: string
+ :param kind: The kind to query.
+
+ :type project: string
+ :param project: The project associated with the query. If not passed,
+ uses the client's value.
+
+ :type namespace: string or None
+ :param namespace: The namespace to which to restrict results. If not
+ passed, uses the client's value.
+
+ :type ancestor: :class:`gcloud.datastore.key.Key` or None
+ :param ancestor: key of the ancestor to which this query's results are
+ restricted.
+
+ :type filters: sequence of (property_name, operator, value) tuples
+ :param filters: property filters applied by this query.
+
+ :type projection: sequence of string
+ :param projection: fields returned as part of query results.
+
+ :type order: sequence of string
+ :param order: field names used to order query results. Prepend '-'
+ to a field name to sort it in descending order.
+
+ :type distinct_on: sequence of string
+ :param distinct_on: field names used to group query results.
+
+ :raises: ValueError if ``project`` is not passed and no implicit
+ default is set.
+ """
+
+ OPERATORS={
+ '<=':_query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL,
+ '>=':_query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL,
+ '<':_query_pb2.PropertyFilter.LESS_THAN,
+ '>':_query_pb2.PropertyFilter.GREATER_THAN,
+ '=':_query_pb2.PropertyFilter.EQUAL,
+ }
+ """Mapping of operator strings and their protobuf equivalents."""
+
+ def__init__(self,
+ client,
+ kind=None,
+ project=None,
+ namespace=None,
+ ancestor=None,
+ filters=(),
+ projection=(),
+ order=(),
+ distinct_on=()):
+
+ self._client=client
+ self._kind=kind
+ self._project=projectorclient.project
+ self._namespace=namespaceorclient.namespace
+ self._ancestor=ancestor
+ self._filters=[]
+ # Verify filters passed in.
+ forproperty_name,operator,valueinfilters:
+ self.add_filter(property_name,operator,value)
+ self._projection=_ensure_tuple_or_list('projection',projection)
+ self._order=_ensure_tuple_or_list('order',order)
+ self._distinct_on=_ensure_tuple_or_list('distinct_on',distinct_on)
+
+ @property
+ defproject(self):
+ """Get the project for this Query.
+
+ :rtype: str
+ :returns: The project for the query.
+ """
+ returnself._projectorself._client.project
+
+ @property
+ defnamespace(self):
+ """This query's namespace
+
+ :rtype: string or None
+ :returns: the namespace assigned to this query
+ """
+ returnself._namespaceorself._client.namespace
+
+ @namespace.setter
+ defnamespace(self,value):
+ """Update the query's namespace.
+
+ :type value: string
+ """
+ ifnotisinstance(value,str):
+ raiseValueError("Namespace must be a string")
+ self._namespace=value
+
+ @property
+ defkind(self):
+ """Get the Kind of the Query.
+
+ :rtype: string
+ :returns: The kind for the query.
+ """
+ returnself._kind
+
+ @kind.setter
+ defkind(self,value):
+ """Update the Kind of the Query.
+
+ :type value: string
+ :param value: updated kind for the query.
+
+ .. note::
+
+ The protobuf specification allows for ``kind`` to be repeated,
+ but the current implementation returns an error if more than
+ one value is passed. If the back-end changes in the future to
+ allow multiple values, this method will be updated to allow passing
+ either a string or a sequence of strings.
+ """
+ ifnotisinstance(value,str):
+ raiseTypeError("Kind must be a string")
+ self._kind=value
+
+ @property
+ defancestor(self):
+ """The ancestor key for the query.
+
+ :rtype: Key or None
+ :returns: The ancestor for the query.
+ """
+ returnself._ancestor
+
+ @ancestor.setter
+ defancestor(self,value):
+ """Set the ancestor for the query
+
+ :type value: Key
+ :param value: the new ancestor key
+ """
+ ifnotisinstance(value,Key):
+ raiseTypeError("Ancestor must be a Key")
+ self._ancestor=value
+
+ @ancestor.deleter
+ defancestor(self):
+ """Remove the ancestor for the query."""
+ self._ancestor=None
+
+ @property
+ deffilters(self):
+ """Filters set on the query.
+
+ :rtype: sequence of (property_name, operator, value) tuples.
+ :returns: The filters set on the query.
+ """
+ returnself._filters[:]
+
+
[docs]defadd_filter(self,property_name,operator,value):
+ """Filter the query based on a property name, operator and a value.
+
+ Expressions take the form of::
+
+ .add_filter('<property>', '<operator>', <value>)
+
+ where property is a property stored on the entity in the datastore
+ and operator is one of ``OPERATORS``
+ (ie, ``=``, ``<``, ``<=``, ``>``, ``>=``)::
+
+ >>> from gcloud import datastore
+ >>> client = datastore.Client()
+ >>> query = client.query(kind='Person')
+ >>> query.add_filter('name', '=', 'James')
+ >>> query.add_filter('age', '>', 50)
+
+ :type property_name: string
+ :param property_name: A property name.
+
+ :type operator: string
+ :param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``.
+
+ :type value: :class:`int`, :class:`str`, :class:`bool`,
+ :class:`float`, :class:`NoneType`,
+ :class:`datetime.datetime`,
+ :class:`gcloud.datastore.key.Key`
+ :param value: The value to filter on.
+
+ :raises: :class:`ValueError` if ``operation`` is not one of the
+ specified values, or if a filter names ``'__key__'`` but
+ passes an invalid value (a key is required).
+ """
+ ifself.OPERATORS.get(operator)isNone:
+ error_message='Invalid expression: "%s"'%(operator,)
+ choices_message='Please use one of: =, <, <=, >, >=.'
+ raiseValueError(error_message,choices_message)
+
+ ifproperty_name=='__key__'andnotisinstance(value,Key):
+ raiseValueError('Invalid key: "%s"'%value)
+
+ self._filters.append((property_name,operator,value))
+
+ @property
+ defprojection(self):
+ """Fields names returned by the query.
+
+ :rtype: sequence of string
+ :returns: Names of fields in query results.
+ """
+ returnself._projection[:]
+
+ @projection.setter
+ defprojection(self,projection):
+ """Set the fields returned the query.
+
+ :type projection: string or sequence of strings
+ :param projection: Each value is a string giving the name of a
+ property to be included in the projection query.
+ """
+ ifisinstance(projection,str):
+ projection=[projection]
+ self._projection[:]=projection
+
+
[docs]defkeys_only(self):
+ """Set the projection to include only keys."""
+ self._projection[:]=['__key__']
+
+
[docs]defkey_filter(self,key,operator='='):
+ """Filter on a key.
+
+ :type key: :class:`gcloud.datastore.key.Key`
+ :param key: The key to filter on.
+
+ :type operator: string
+ :param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``.
+ Defaults to ``=``.
+ """
+ self.add_filter('__key__',operator,key)
+
+ @property
+ deforder(self):
+ """Names of fields used to sort query results.
+
+ :rtype: sequence of string
+ :returns: The order(s) set on the query.
+ """
+ returnself._order[:]
+
+ @order.setter
+ deforder(self,value):
+ """Set the fields used to sort query results.
+
+ Sort fields will be applied in the order specified.
+
+ :type value: string or sequence of strings
+ :param value: Each value is a string giving the name of the
+ property on which to sort, optionally preceded by a
+ hyphen (-) to specify descending order.
+ Omitting the hyphen implies ascending order.
+ """
+ ifisinstance(value,str):
+ value=[value]
+ self._order[:]=value
+
+ @property
+ defdistinct_on(self):
+ """Names of fields used to group query results.
+
+ :rtype: sequence of string
+ :returns: The "distinct on" fields set on the query.
+ """
+ returnself._distinct_on[:]
+
+ @distinct_on.setter
+ defdistinct_on(self,value):
+ """Set fields used to group query results.
+
+ :type value: string or sequence of strings
+ :param value: Each value is a string giving the name of a
+ property to use to group results together.
+ """
+ ifisinstance(value,str):
+ value=[value]
+ self._distinct_on[:]=value
+
+
[docs]deffetch(self,limit=None,offset=0,start_cursor=None,end_cursor=None,
+ client=None):
+ """Execute the Query; return an iterator for the matching entities.
+
+ For example::
+
+ >>> from gcloud import datastore
+ >>> client = datastore.Client()
+ >>> query = client.query(kind='Person')
+ >>> query.add_filter('name', '=', 'Sally')
+ >>> list(query.fetch())
+ [<Entity object>, <Entity object>, ...]
+ >>> list(query.fetch(1))
+ [<Entity object>]
+
+ :type limit: integer or None
+ :param limit: An optional limit passed through to the iterator.
+
+ :type offset: integer
+ :param offset: An optional offset passed through to the iterator.
+
+ :type start_cursor: bytes
+ :param start_cursor: An optional cursor passed through to the iterator.
+
+ :type end_cursor: bytes
+ :param end_cursor: An optional cursor passed through to the iterator.
+
+ :type client: :class:`gcloud.datastore.client.Client`
+ :param client: client used to connect to datastore.
+ If not supplied, uses the query's value.
+
+ :rtype: :class:`Iterator`
+ :returns: The iterator for the query.
+ :raises: ValueError if ``connection`` is not passed and no implicit
+ default has been set.
+ """
+ ifclientisNone:
+ client=self._client
+
+ returnIterator(
+ self,client,limit,offset,start_cursor,end_cursor)
+
+
+
[docs]classIterator(object):
+ """Represent the state of a given execution of a Query.
+
+ :type query: :class:`gcloud.datastore.query.Query`
+ :param query: Query object holding permanent configuration (i.e.
+ things that don't change on with each page in
+ a results set).
+
+ :type client: :class:`gcloud.datastore.client.Client`
+ :param client: The client used to make a request.
+
+ :type limit: integer
+ :param limit: (Optional) Limit the number of results returned.
+
+ :type offset: integer
+ :param offset: (Optional) Offset used to begin a query.
+
+ :type start_cursor: bytes
+ :param start_cursor: (Optional) Cursor to begin paging through
+ query results.
+
+ :type end_cursor: bytes
+ :param end_cursor: (Optional) Cursor to end paging through
+ query results.
+ """
+
+ _NOT_FINISHED=_query_pb2.QueryResultBatch.NOT_FINISHED
+
+ _FINISHED=(
+ _query_pb2.QueryResultBatch.NO_MORE_RESULTS,
+ _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT,
+ _query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_CURSOR,
+ )
+
+ def__init__(self,query,client,limit=None,offset=None,
+ start_cursor=None,end_cursor=None):
+ self._query=query
+ self._client=client
+ self._limit=limit
+ self._offset=offset
+ self._start_cursor=start_cursor
+ self._end_cursor=end_cursor
+ self._page=self._more_results=None
+ self._skipped_results=None
+
+
[docs]defnext_page(self):
+ """Fetch a single "page" of query results.
+
+ Low-level API for fine control: the more convenient API is
+ to iterate on the current Iterator.
+
+ :rtype: tuple, (entities, more_results, cursor)
+ :returns: The next page of results.
+ """
+ pb=_pb_from_query(self._query)
+
+ start_cursor=self._start_cursor
+ ifstart_cursorisnotNone:
+ pb.start_cursor=base64.urlsafe_b64decode(start_cursor)
+
+ end_cursor=self._end_cursor
+ ifend_cursorisnotNone:
+ pb.end_cursor=base64.urlsafe_b64decode(end_cursor)
+
+ ifself._limitisnotNone:
+ pb.limit.value=self._limit
+
+ ifself._offsetisnotNone:
+ pb.offset=self._offset
+
+ transaction=self._client.current_transaction
+
+ query_results=self._client.connection.run_query(
+ query_pb=pb,
+ project=self._query.project,
+ namespace=self._query.namespace,
+ transaction_id=transactionandtransaction.id,
+ )
+ (entity_pbs,cursor_as_bytes,
+ more_results_enum,self._skipped_results)=query_results
+
+ ifcursor_as_bytes==b'':
+ self._start_cursor=None
+ else:
+ self._start_cursor=base64.urlsafe_b64encode(cursor_as_bytes)
+ self._end_cursor=None
+
+ ifmore_results_enum==self._NOT_FINISHED:
+ self._more_results=True
+ elifmore_results_enuminself._FINISHED:
+ self._more_results=False
+ else:
+ raiseValueError('Unexpected value returned for `more_results`.')
+
+ self._page=[
+ helpers.entity_from_protobuf(entity)
+ forentityinentity_pbs]
+ returnself._page,self._more_results,self._start_cursor
+
+ def__iter__(self):
+ """Generator yielding all results matching our query.
+
+ :rtype: sequence of :class:`gcloud.datastore.entity.Entity`
+ """
+ whileTrue:
+ self.next_page()
+ forentityinself._page:
+ yieldentity
+ ifnotself._more_results:
+ break
+ num_results=len(self._page)
+ ifself._limitisnotNone:
+ self._limit-=num_results
+ ifself._offsetisnotNoneandself._skipped_resultsisnotNone:
+ # NOTE: The offset goes down relative to the location
+ # because we are updating the cursor each time.
+ self._offset-=self._skipped_results
+
+
+def_pb_from_query(query):
+ """Convert a Query instance to the corresponding protobuf.
+
+ :type query: :class:`Query`
+ :param query: The source query.
+
+ :rtype: :class:`gcloud.datastore._generated.query_pb2.Query`
+ :returns: A protobuf that can be sent to the protobuf API. N.b. that
+ it does not contain "in-flight" fields for ongoing query
+ executions (cursors, offset, limit).
+ """
+ pb=_query_pb2.Query()
+
+ forprojection_nameinquery.projection:
+ pb.projection.add().property.name=projection_name
+
+ ifquery.kind:
+ pb.kind.add().name=query.kind
+
+ composite_filter=pb.filter.composite_filter
+ composite_filter.op=_query_pb2.CompositeFilter.AND
+
+ ifquery.ancestor:
+ ancestor_pb=query.ancestor.to_protobuf()
+
+ # Filter on __key__ HAS_ANCESTOR == ancestor.
+ ancestor_filter=composite_filter.filters.add().property_filter
+ ancestor_filter.property.name='__key__'
+ ancestor_filter.op=_query_pb2.PropertyFilter.HAS_ANCESTOR
+ ancestor_filter.value.key_value.CopyFrom(ancestor_pb)
+
+ forproperty_name,operator,valueinquery.filters:
+ pb_op_enum=query.OPERATORS.get(operator)
+
+ # Add the specific filter
+ property_filter=composite_filter.filters.add().property_filter
+ property_filter.property.name=property_name
+ property_filter.op=pb_op_enum
+
+ # Set the value to filter on based on the type.
+ ifproperty_name=='__key__':
+ key_pb=value.to_protobuf()
+ property_filter.value.key_value.CopyFrom(key_pb)
+ else:
+ helpers._set_protobuf_value(property_filter.value,value)
+
+ ifnotcomposite_filter.filters:
+ pb.ClearField('filter')
+
+ forpropinquery.order:
+ property_order=pb.order.add()
+
+ ifprop.startswith('-'):
+ property_order.property.name=prop[1:]
+ property_order.direction=property_order.DESCENDING
+ else:
+ property_order.property.name=prop
+ property_order.direction=property_order.ASCENDING
+
+ fordistinct_on_nameinquery.distinct_on:
+ pb.distinct_on.add().name=distinct_on_name
+
+ returnpb
+
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with gcloud datastore transactions."""
+
+fromgcloud.datastore.batchimportBatch
+
+
+
[docs]classTransaction(Batch):
+ """An abstraction representing datastore Transactions.
+
+ Transactions can be used to build up a bulk mutation and ensure all
+ or none succeed (transactionally).
+
+ For example, the following snippet of code will put the two ``save``
+ operations (either ``insert`` or ``upsert``) into the same
+ mutation, and execute those within a transaction::
+
+ >>> from gcloud import datastore
+ >>> client = datastore.Client()
+ >>> with client.transaction():
+ ... client.put_multi([entity1, entity2])
+
+ Because it derives from :class:`Batch <.datastore.batch.Batch>`,
+ :class:`Transaction` also provides :meth:`put` and :meth:`delete` methods::
+
+ >>> with client.transaction() as xact:
+ ... xact.put(entity1)
+ ... xact.delete(entity2.key)
+
+ By default, the transaction is rolled back if the transaction block
+ exits with an error::
+
+ >>> with client.transaction():
+ ... do_some_work()
+ ... raise SomeException() # rolls back
+
+ If the transaction block exists without an exception, it will commit
+ by default.
+
+ .. warning:: Inside a transaction, automatically assigned IDs for
+ entities will not be available at save time! That means, if you
+ try::
+
+ >>> with client.transaction():
+ ... entity = datastore.Entity(key=client.key('Thing'))
+ ... client.put(entity)
+
+ ``entity`` won't have a complete key until the transaction is
+ committed.
+
+ Once you exit the transaction (or call :meth:`commit`), the
+ automatically generated ID will be assigned to the entity::
+
+ >>> with client.transaction():
+ ... entity = datastore.Entity(key=client.key('Thing'))
+ ... client.put(entity)
+ ... print(entity.key.is_partial) # There is no ID on this key.
+ ...
+ True
+ >>> print(entity.key.is_partial) # There *is* an ID.
+ False
+
+ If you don't want to use the context manager you can initialize a
+ transaction manually::
+
+ >>> transaction = client.transaction()
+ >>> transaction.begin()
+ >>>
+ >>> entity = datastore.Entity(key=client.key('Thing'))
+ >>> transaction.put(entity)
+ >>>
+ >>> if error:
+ ... transaction.rollback()
+ ... else:
+ ... transaction.commit()
+
+ :type client: :class:`gcloud.datastore.client.Client`
+ :param client: the client used to connect to datastore.
+ """
+
+ def__init__(self,client):
+ super(Transaction,self).__init__(client)
+ self._id=None
+
+ @property
+ defid(self):
+ """Getter for the transaction ID.
+
+ :rtype: string
+ :returns: The ID of the current transaction.
+ """
+ returnself._id
+
+
[docs]defcurrent(self):
+ """Return the topmost transaction.
+
+ .. note::
+
+ If the topmost element on the stack is not a transaction,
+ returns None.
+
+ :rtype: :class:`gcloud.datastore.transaction.Transaction` or None
+ :returns: The current transaction (if any are active).
+ """
+ top=super(Transaction,self).current()
+ ifisinstance(top,Transaction):
+ returntop
+
+
[docs]defbegin(self):
+ """Begins a transaction.
+
+ This method is called automatically when entering a with
+ statement, however it can be called explicitly if you don't want
+ to use a context manager.
+
+ :raises: :class:`ValueError` if the transaction has already begun.
+ """
+ super(Transaction,self).begin()
+ self._id=self.connection.begin_transaction(self.project)
+
+
[docs]defrollback(self):
+ """Rolls back the current transaction.
+
+ This method has necessary side-effects:
+
+ - Sets the current connection's transaction reference to None.
+ - Sets the current transaction's ID to None.
+ """
+ try:
+ self.connection.rollback(self.project,self._id)
+ finally:
+ super(Transaction,self).rollback()
+ # Clear our own ID in case this gets accidentally reused.
+ self._id=None
+
+
[docs]defcommit(self):
+ """Commits the transaction.
+
+ This is called automatically upon exiting a with statement,
+ however it can be called explicitly if you don't want to use a
+ context manager.
+
+ This method has necessary side-effects:
+
+ - Sets the current transaction's ID to None.
+ """
+ try:
+ super(Transaction,self).commit()
+ finally:
+ # Clear our own ID in case this gets accidentally reused.
+ self._id=None
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define API ResourceRecordSets."""
+
+importsix
+
+fromgcloud._helpersimport_rfc3339_to_datetime
+fromgcloud.exceptionsimportNotFound
+fromgcloud.dns.resource_record_setimportResourceRecordSet
+
+
+
[docs]classChanges(object):
+ """Changes are bundled additions / deletions of DNS resource records.
+
+ Changes are owned by a :class:`gcloud.dns.zone.ManagedZone` instance.
+
+ See:
+ https://cloud.google.com/dns/api/v1/changes
+
+ :type zone: :class:`gcloud.dns.zone.ManagedZone`
+ :param zone: A zone which holds one or more record sets.
+ """
+
+ def__init__(self,zone):
+ self.zone=zone
+ self._properties={}
+ self._additions=self._deletions=()
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,zone):
+ """Factory: construct a change set given its API representation
+
+ :type resource: dict
+ :param resource: change set representation returned from the API
+
+ :type zone: :class:`gcloud.dns.zone.ManagedZone`
+ :param zone: A zone which holds zero or more change sets.
+
+ :rtype: :class:`gcloud.dns.changes.Changes`
+ :returns: RRS parsed from ``resource``.
+ """
+ changes=cls(zone=zone)
+ changes._set_properties(resource)
+ returnchanges
+
+ def_set_properties(self,resource):
+ """Helper method for :meth:`from_api_repr`, :meth:`create`, etc.
+
+ :type resource: dict
+ :param resource: change set representation returned from the API
+ """
+ resource=resource.copy()
+ self._additions=tuple([
+ ResourceRecordSet.from_api_repr(added_res,self.zone)
+ foradded_resinresource.pop('additions',())])
+ self._deletions=tuple([
+ ResourceRecordSet.from_api_repr(added_res,self.zone)
+ foradded_resinresource.pop('deletions',())])
+ self._properties=resource
+
+ @property
+ defpath(self):
+ """URL path for change set APIs.
+
+ :rtype: string
+ :returns: the path based on project, zone, and change set names.
+ """
+ return'/projects/%s/managedZones/%s/changes/%s'%(
+ self.zone.project,self.zone.name,self.name)
+
+ @property
+ defname(self):
+ """Name of the change set.
+
+ :rtype: string or ``NoneType``
+ :returns: Name, as set by the back-end, or None.
+ """
+ returnself._properties.get('id')
+
+ @name.setter
+ defname(self,value):
+ """Update name of the change set.
+
+ :type value: string
+ :param value: New name for the changeset.
+ """
+ ifnotisinstance(value,six.string_types):
+ raiseValueError("Pass a string")
+ self._properties['id']=value
+
+ @property
+ defstatus(self):
+ """Status of the change set.
+
+ :rtype: string or ``NoneType``
+ :returns: Status, as set by the back-end, or None.
+ """
+ returnself._properties.get('status')
+
+ @property
+ defstarted(self):
+ """Time when the change set was started.
+
+ :rtype: ``datetime.datetime`` or ``NoneType``
+ :returns: Time, as set by the back-end, or None.
+ """
+ stamp=self._properties.get('startTime')
+ ifstampisnotNone:
+ return_rfc3339_to_datetime(stamp)
+
+ @property
+ defadditions(self):
+ """Resource record sets to be added to the zone.
+
+ :rtype: sequence of
+ :class:`gcloud.dns.resource_record_set.ResourceRecordSet`.
+ :returns: record sets appended via :meth:`add_record_set`
+ """
+ returnself._additions
+
+ @property
+ defdeletions(self):
+ """Resource record sets to be deleted from the zone.
+
+ :rtype: sequence of
+ :class:`gcloud.dns.resource_record_set.ResourceRecordSet`.
+ :returns: record sets appended via :meth:`delete_record_set`
+ """
+ returnself._deletions
+
+
[docs]defadd_record_set(self,record_set):
+ """Append a record set to the 'additions' for the change set.
+
+ :type record_set:
+ :class:`gcloud.dns.resource_record_set.ResourceRecordSet`
+ :param record_set: the record set to append
+
+ :raises: ``ValueError`` if ``record_set`` is not of the required type.
+ """
+ ifnotisinstance(record_set,ResourceRecordSet):
+ raiseValueError("Pass a ResourceRecordSet")
+ self._additions+=(record_set,)
+
+
[docs]defdelete_record_set(self,record_set):
+ """Append a record set to the 'deletions' for the change set.
+
+ :type record_set:
+ :class:`gcloud.dns.resource_record_set.ResourceRecordSet`
+ :param record_set: the record set to append
+
+ :raises: ``ValueError`` if ``record_set`` is not of the required type.
+ """
+ ifnotisinstance(record_set,ResourceRecordSet):
+ raiseValueError("Pass a ResourceRecordSet")
+ self._deletions+=(record_set,)
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.dns.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current zone.
+
+ :rtype: :class:`gcloud.dns.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self.zone._client
+ returnclient
+
+ def_build_resource(self):
+ """Generate a resource for ``create``."""
+ additions=[{
+ 'name':added.name,
+ 'type':added.record_type,
+ 'ttl':str(added.ttl),
+ 'rrdatas':added.rrdatas,
+ }foraddedinself.additions]
+
+ deletions=[{
+ 'name':deleted.name,
+ 'type':deleted.record_type,
+ 'ttl':str(deleted.ttl),
+ 'rrdatas':deleted.rrdatas,
+ }fordeletedinself.deletions]
+
+ return{
+ 'additions':additions,
+ 'deletions':deletions,
+ }
+
+
[docs]defcreate(self,client=None):
+ """API call: create the change set via a POST request
+
+ See:
+ https://cloud.google.com/dns/api/v1/changes/create
+
+ :type client: :class:`gcloud.dns.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current zone.
+ """
+ iflen(self.additions)==0andlen(self.deletions)==0:
+ raiseValueError("No record sets added or deleted")
+ client=self._require_client(client)
+ path='/projects/%s/managedZones/%s/changes'%(
+ self.zone.project,self.zone.name)
+ api_response=client.connection.api_request(
+ method='POST',path=path,data=self._build_resource())
+ self._set_properties(api_response)
+
+
[docs]defexists(self,client=None):
+ """API call: test for the existence of the change set via a GET request
+
+ See
+ https://cloud.google.com/dns/api/v1/changes/get
+
+ :type client: :class:`gcloud.dns.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current zone.
+
+ :rtype: bool
+ :returns: Boolean indicating existence of the changes.
+ """
+ client=self._require_client(client)
+ try:
+ client.connection.api_request(method='GET',path=self.path,
+ query_params={'fields':'id'})
+ exceptNotFound:
+ returnFalse
+ else:
+ returnTrue
+
+
[docs]defreload(self,client=None):
+ """API call: refresh zone properties via a GET request
+
+ See
+ https://cloud.google.com/dns/api/v1/changes/get
+
+ :type client: :class:`gcloud.dns.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current zone.
+ """
+ client=self._require_client(client)
+
+ api_response=client.connection.api_request(
+ method='GET',path=self.path)
+ self._set_properties(api_response)
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the Google Cloud DNS API."""
+
+
+fromgcloud.clientimportJSONClient
+fromgcloud.dns.connectionimportConnection
+fromgcloud.dns.zoneimportManagedZone
+
+
+
[docs]classClient(JSONClient):
+ """Client to bundle configuration needed for API requests.
+
+ :type project: string
+ :param project: the project which the client acts on behalf of. Will be
+ passed when creating a zone. If not passed,
+ falls back to the default inferred from the environment.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for the connection
+ owned by this client. If not passed (and if no ``http``
+ object is passed), falls back to the default inferred
+ from the environment.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests. If not passed, an
+ ``http`` object is created that is bound to the
+ ``credentials`` for the current object.
+ """
+
+ _connection_class=Connection
+
+
[docs]defquotas(self):
+ """Return DNS quots for the project associated with this client.
+
+ See:
+ https://cloud.google.com/dns/api/v1/projects/get
+
+ :rtype: mapping
+ :returns: keys for the mapping correspond to those of the ``quota``
+ sub-mapping of the project resource.
+ """
+ path='/projects/%s'%(self.project,)
+ resp=self.connection.api_request(method='GET',path=path)
+
+ returndict([(key,int(value))
+ forkey,valueinresp['quota'].items()ifkey!='kind'])
+
+
[docs]deflist_zones(self,max_results=None,page_token=None):
+ """List zones for the project associated with this client.
+
+ See:
+ https://cloud.google.com/dns/api/v1/managedZones/list
+
+ :type max_results: int
+ :param max_results: maximum number of zones to return, If not
+ passed, defaults to a value set by the API.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of zones. If
+ not passed, the API will return the first page of
+ zones.
+
+ :rtype: tuple, (list, str)
+ :returns: list of :class:`gcloud.dns.zone.ManagedZone`, plus a
+ "next page token" string: if the token is not None,
+ indicates that more zones can be retrieved with another
+ call (pass that value as ``page_token``).
+ """
+ params={}
+
+ ifmax_resultsisnotNone:
+ params['maxResults']=max_results
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ path='/projects/%s/managedZones'%(self.project,)
+ resp=self.connection.api_request(method='GET',path=path,
+ query_params=params)
+ zones=[ManagedZone.from_api_repr(resource,self)
+ forresourceinresp['managedZones']]
+ returnzones,resp.get('nextPageToken')
+
+
[docs]defzone(self,name,dns_name=None,description=None):
+ """Construct a zone bound to this client.
+
+ :type name: string
+ :param name: Name of the zone.
+
+ :type dns_name: string or :class:`NoneType`
+ :param dns_name: DNS name of the zone. If not passed, then calls
+ to :meth:`zone.create` will fail.
+
+ :type description: string or :class:`NoneType`
+ :param description: the description for the zone. If not passed,
+ defaults to the value of 'dns_name'.
+
+ :rtype: :class:`gcloud.dns.zone.ManagedZone`
+ :returns: a new ``ManagedZone`` instance
+ """
+ returnManagedZone(name,dns_name,client=self,
+ description=description)
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with gcloud dns connections."""
+
+fromgcloudimportconnectionasbase_connection
+
+
+
[docs]classConnection(base_connection.JSONConnection):
+ """A connection to Google Cloud DNS via the JSON REST API."""
+
+ API_BASE_URL='https://www.googleapis.com'
+ """The base of the API call URL."""
+
+ API_VERSION='v1'
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE='{api_base_url}/dns/{api_version}{path}'
+ """A template for the URL of a particular API call."""
+
+ SCOPE=('https://www.googleapis.com/auth/ndev.clouddns.readwrite',)
+ """The scopes required for authenticating as a Cloud DNS consumer."""
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define API ResourceRecordSets."""
+
+
+
[docs]classResourceRecordSet(object):
+ """ResourceRecordSets are DNS resource records.
+
+ RRS are owned by a :class:`gcloud.dns.zone.ManagedZone` instance.
+
+ See:
+ https://cloud.google.com/dns/api/v1/resourceRecordSets
+
+ :type name: string
+ :param name: the name of the record set
+
+ :type record_type: string
+ :param record_type: the RR type of the zone
+
+ :type ttl: integer
+ :param ttl: TTL (in seconds) for caching the record sets
+
+ :type rrdatas: list of string
+ :param rrdatas: one or more lines containing the resource data
+
+ :type zone: :class:`gcloud.dns.zone.ManagedZone`
+ :param zone: A zone which holds one or more record sets.
+ """
+
+ def__init__(self,name,record_type,ttl,rrdatas,zone):
+ self.name=name
+ self.record_type=record_type
+ self.ttl=ttl
+ self.rrdatas=rrdatas
+ self.zone=zone
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,zone):
+ """Factory: construct a record set given its API representation
+
+ :type resource: dict
+ :param resource: record sets representation returned from the API
+
+ :type zone: :class:`gcloud.dns.zone.ManagedZone`
+ :param zone: A zone which holds one or more record sets.
+
+ :rtype: :class:`gcloud.dns.zone.ResourceRecordSet`
+ :returns: RRS parsed from ``resource``.
+ """
+ name=resource['name']
+ record_type=resource['type']
+ ttl=int(resource['ttl'])
+ rrdatas=resource['rrdatas']
+ returncls(name,record_type,ttl,rrdatas,zone=zone)
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define API ManagedZones."""
+importsix
+
+fromgcloud._helpersimport_rfc3339_to_datetime
+fromgcloud.exceptionsimportNotFound
+fromgcloud.dns.changesimportChanges
+fromgcloud.dns.resource_record_setimportResourceRecordSet
+
+
+
[docs]classManagedZone(object):
+ """ManagedZones are containers for DNS resource records.
+
+ See:
+ https://cloud.google.com/dns/api/v1/managedZones
+
+ :type name: string
+ :param name: the name of the zone
+
+ :type dns_name: string or :class:`NoneType`
+ :param dns_name: the DNS name of the zone. If not passed, then calls
+ to :meth:`create` will fail.
+
+ :type client: :class:`gcloud.dns.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the zone (which requires a project).
+
+ :type description: string or :class:`NoneType`
+ :param description: the description for the zone. If not passed, defaults
+ to the value of 'dns_name'.
+ """
+
+ def__init__(self,name,dns_name=None,client=None,description=None):
+ self.name=name
+ self.dns_name=dns_name
+ self._client=client
+ self._properties={}
+ ifdescriptionisNone:
+ description=dns_name
+ self.description=description
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,client):
+ """Factory: construct a zone given its API representation
+
+ :type resource: dict
+ :param resource: zone resource representation returned from the API
+
+ :type client: :class:`gcloud.dns.client.Client`
+ :param client: Client which holds credentials and project
+ configuration for the zone.
+
+ :rtype: :class:`gcloud.dns.zone.ManagedZone`
+ :returns: Zone parsed from ``resource``.
+ """
+ name=resource.get('name')
+ dns_name=resource.get('dnsName')
+ ifnameisNoneordns_nameisNone:
+ raiseKeyError('Resource lacks required identity information:'
+ '["name"]["dnsName"]')
+ zone=cls(name,dns_name,client=client)
+ zone._set_properties(resource)
+ returnzone
+
+ @property
+ defproject(self):
+ """Project bound to the zone.
+
+ :rtype: string
+ :returns: the project (derived from the client).
+ """
+ returnself._client.project
+
+ @property
+ defpath(self):
+ """URL path for the zone's APIs.
+
+ :rtype: string
+ :returns: the path based on project and dataste name.
+ """
+ return'/projects/%s/managedZones/%s'%(self.project,self.name)
+
+ @property
+ defcreated(self):
+ """Datetime at which the zone was created.
+
+ :rtype: ``datetime.datetime``, or ``NoneType``
+ :returns: the creation time (None until set from the server).
+ """
+ returnself._properties.get('creationTime')
+
+ @property
+ defname_servers(self):
+ """Datetime at which the zone was created.
+
+ :rtype: list of strings, or ``NoneType``.
+ :returns: the assigned name servers (None until set from the server).
+ """
+ returnself._properties.get('nameServers')
+
+ @property
+ defzone_id(self):
+ """ID for the zone resource.
+
+ :rtype: string, or ``NoneType``
+ :returns: the ID (None until set from the server).
+ """
+ returnself._properties.get('id')
+
+ @property
+ defdescription(self):
+ """Description of the zone.
+
+ :rtype: string, or ``NoneType``
+ :returns: The description as set by the user, or None (the default).
+ """
+ returnself._properties.get('description')
+
+ @description.setter
+ defdescription(self,value):
+ """Update description of the zone.
+
+ :type value: string, or ``NoneType``
+ :param value: new description
+
+ :raises: ValueError for invalid value types.
+ """
+ ifnotisinstance(value,six.string_types)andvalueisnotNone:
+ raiseValueError("Pass a string, or None")
+ self._properties['description']=value
+
+ @property
+ defname_server_set(self):
+ """Named set of DNS name servers that all host the same ManagedZones.
+
+ Most users will leave this blank.
+
+ See:
+ https://cloud.google.com/dns/api/v1/managedZones#nameServerSet
+
+ :rtype: string, or ``NoneType``
+ :returns: The name as set by the user, or None (the default).
+ """
+ returnself._properties.get('nameServerSet')
+
+ @name_server_set.setter
+ defname_server_set(self,value):
+ """Update named set of DNS name servers.
+
+ :type value: string, or ``NoneType``
+ :param value: new title
+
+ :raises: ValueError for invalid value types.
+ """
+ ifnotisinstance(value,six.string_types)andvalueisnotNone:
+ raiseValueError("Pass a string, or None")
+ self._properties['nameServerSet']=value
+
+
[docs]defresource_record_set(self,name,record_type,ttl,rrdatas):
+ """Construct a resource record set bound to this zone.
+
+ :type name: string
+ :param name: Name of the record set.
+
+ :type record_type: string
+ :param record_type: RR type
+
+ :type ttl: integer
+ :param ttl: TTL for the RR, in seconds
+
+ :type rrdatas: list of string
+ :param rrdatas: resource data for the RR
+
+ :rtype: :class:`gcloud.dns.resource_record_set.ResourceRecordSet`
+ :returns: a new ``ResourceRecordSet`` instance
+ """
+ returnResourceRecordSet(name,record_type,ttl,rrdatas,zone=self)
+
+
[docs]defchanges(self):
+ """Construct a change set bound to this zone.
+
+ :rtype: :class:`gcloud.dns.changes.Changes`
+ :returns: a new ``Changes`` instance
+ """
+ returnChanges(zone=self)
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.dns.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current zone.
+
+ :rtype: :class:`gcloud.dns.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self._client
+ returnclient
+
+ def_set_properties(self,api_response):
+ """Update properties from resource in body of ``api_response``
+
+ :type api_response: httplib2.Response
+ :param api_response: response returned from an API call
+ """
+ self._properties.clear()
+ cleaned=api_response.copy()
+ self.dns_name=cleaned.pop('dnsName',None)
+ if'creationTime'incleaned:
+ cleaned['creationTime']=_rfc3339_to_datetime(
+ cleaned['creationTime'])
+ self._properties.update(cleaned)
+
+ def_build_resource(self):
+ """Generate a resource for ``create`` or ``update``."""
+ resource={
+ 'name':self.name,
+ }
+
+ ifself.dns_nameisnotNone:
+ resource['dnsName']=self.dns_name
+
+ ifself.descriptionisnotNone:
+ resource['description']=self.description
+
+ ifself.name_server_setisnotNone:
+ resource['nameServerSet']=self.name_server_set
+
+ returnresource
+
+
[docs]defcreate(self,client=None):
+ """API call: create the zone via a PUT request
+
+ See:
+ https://cloud.google.com/dns/api/v1/managedZones/create
+
+ :type client: :class:`gcloud.dns.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current zone.
+ """
+ client=self._require_client(client)
+ path='/projects/%s/managedZones'%(self.project,)
+ api_response=client.connection.api_request(
+ method='POST',path=path,data=self._build_resource())
+ self._set_properties(api_response)
+
+
[docs]defexists(self,client=None):
+ """API call: test for the existence of the zone via a GET request
+
+ See
+ https://cloud.google.com/dns/api/v1/managedZones/get
+
+ :type client: :class:`gcloud.dns.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current zone.
+
+ :rtype: bool
+ :returns: Boolean indicating existence of the managed zone.
+ """
+ client=self._require_client(client)
+
+ try:
+ client.connection.api_request(method='GET',path=self.path,
+ query_params={'fields':'id'})
+ exceptNotFound:
+ returnFalse
+ else:
+ returnTrue
+
+
[docs]defreload(self,client=None):
+ """API call: refresh zone properties via a GET request
+
+ See
+ https://cloud.google.com/dns/api/v1/managedZones/get
+
+ :type client: :class:`gcloud.dns.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current zone.
+ """
+ client=self._require_client(client)
+
+ api_response=client.connection.api_request(
+ method='GET',path=self.path)
+ self._set_properties(api_response)
+
+
[docs]defdelete(self,client=None):
+ """API call: delete the zone via a DELETE request
+
+ See:
+ https://cloud.google.com/dns/api/v1/managedZones/delete
+
+ :type client: :class:`gcloud.dns.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current zone.
+ """
+ client=self._require_client(client)
+ client.connection.api_request(method='DELETE',path=self.path)
+
+
[docs]deflist_resource_record_sets(self,max_results=None,page_token=None,
+ client=None):
+ """List resource record sets for this zone.
+
+ See:
+ https://cloud.google.com/dns/api/v1/resourceRecordSets/list
+
+ :type max_results: int
+ :param max_results: maximum number of zones to return, If not
+ passed, defaults to a value set by the API.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of zones. If
+ not passed, the API will return the first page of
+ zones.
+
+ :type client: :class:`gcloud.dns.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current zone.
+
+ :rtype: tuple, (list, str)
+ :returns: list of
+ :class:`gcloud.dns.resource_record_set.ResourceRecordSet`,
+ plus a "next page token" string: if the token is not None,
+ indicates that more zones can be retrieved with another
+ call (pass that value as ``page_token``).
+ """
+ params={}
+
+ ifmax_resultsisnotNone:
+ params['maxResults']=max_results
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ path='/projects/%s/managedZones/%s/rrsets'%(
+ self.project,self.name)
+ client=self._require_client(client)
+ conn=client.connection
+ resp=conn.api_request(method='GET',path=path,query_params=params)
+ zones=[ResourceRecordSet.from_api_repr(resource,self)
+ forresourceinresp['rrsets']]
+ returnzones,resp.get('nextPageToken')
+
+
[docs]deflist_changes(self,max_results=None,page_token=None,client=None):
+ """List change sets for this zone.
+
+ See:
+ https://cloud.google.com/dns/api/v1/resourceRecordSets/list
+
+ :type max_results: int
+ :param max_results: maximum number of zones to return, If not
+ passed, defaults to a value set by the API.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of zones. If
+ not passed, the API will return the first page of
+ zones.
+
+ :type client: :class:`gcloud.dns.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current zone.
+
+ :rtype: tuple, (list, str)
+ :returns: list of
+ :class:`gcloud.dns.resource_record_set.ResourceRecordSet`,
+ plus a "next page token" string: if the token is not None,
+ indicates that more zones can be retrieved with another
+ call (pass that value as ``page_token``).
+ """
+ params={}
+
+ ifmax_resultsisnotNone:
+ params['maxResults']=max_results
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ path='/projects/%s/managedZones/%s/changes'%(
+ self.project,self.name)
+ client=self._require_client(client)
+ conn=client.connection
+ resp=conn.api_request(method='GET',path=path,query_params=params)
+ zones=[Changes.from_api_repr(resource,self)
+ forresourceinresp['changes']]
+ returnzones,resp.get('nextPageToken')
+# Copyright 2016 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the Stackdriver Logging API"""
+
+importtraceback
+
+importgcloud.logging.client
+importsix
+
+
+
[docs]classHTTPContext(object):
+ """HTTPContext defines an object that captures the parameter for the
+ httpRequest part of Error Reporting API
+
+ :type method: string
+ :param method: The type of HTTP request, such as GET, POST, etc.
+
+ :type url: string
+ :param url: The URL of the request
+
+ :type user_agent: string
+ :param user_agent: The user agent information that is provided with the
+ request.
+
+ :type referrer: string
+ :param referrer: The referrer information that is provided with the
+ request.
+
+ :type response_status_code: int
+ :param response_status_code: The HTTP response status code for the request.
+
+ :type remote_ip: string
+ :param remote_ip: The IP address from which the request originated. This
+ can be IPv4, IPv6, or a token which is derived from
+ the IP address, depending on the data that has been
+ provided in the error report.
+ """
+
+ def__init__(self,method=None,url=None,
+ user_agent=None,referrer=None,
+ response_status_code=None,remote_ip=None):
+ self.method=method
+ self.url=url
+ # intentionally camel case for mapping to JSON API expects
+ # pylint: disable=invalid-name
+ self.userAgent=user_agent
+ self.referrer=referrer
+ self.responseStatusCode=response_status_code
+ self.remoteIp=remote_ip
+
+
+
[docs]classClient(object):
+ """Error Reporting client. Currently Error Reporting is done by creating
+ a Logging client.
+
+ :type project: string
+ :param project: the project which the client acts on behalf of. If not
+ passed falls back to the default inferred from the
+ environment.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for the connection
+ owned by this client. If not passed (and if no ``http``
+ object is passed), falls back to the default inferred
+ from the environment.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests. If not passed, an
+ ``http`` object is created that is bound to the
+ ``credentials`` for the current object.
+
+ :type service: str
+ :param service: An identifier of the service, such as the name of the
+ executable, job, or Google App Engine service name. This
+ field is expected to have a low number of values that are
+ relatively stable over time, as opposed to version,
+ which can be changed whenever new code is deployed.
+
+
+ :type version: str
+ :param version: Represents the source code version that the developer
+ provided, which could represent a version label or a Git
+ SHA-1 hash, for example. If the developer did not provide
+ a version, the value is set to default.
+
+ :raises: :class:`ValueError` if the project is neither passed in nor
+ set in the environment.
+ """
+
+ def__init__(self,project=None,
+ credentials=None,
+ http=None,
+ service=None,
+ version=None):
+ self.logging_client=gcloud.logging.client.Client(
+ project,credentials,http)
+ self.service=serviceifserviceelseself.DEFAULT_SERVICE
+ self.version=version
+
+ DEFAULT_SERVICE='python'
+
+ def_send_error_report(self,message,
+ report_location=None,http_context=None,user=None):
+ """Makes the call to the Error Reporting API via the log stream.
+
+ This is the lower-level interface to build the payload, generally
+ users will use either report() or report_exception() to automatically
+ gather the parameters for this method.
+
+ Currently this method sends the Error Report by formatting a structured
+ log message according to
+
+ https://cloud.google.com/error-reporting/docs/formatting-error-messages
+
+ :type message: string
+ :param message: The stack trace that was reported or logged by the
+ service.
+
+ :type report_location: dict
+ :param report_location: The location in the source code where the
+ decision was made to report the error, usually the place
+ where it was logged. For a logged exception this would be the
+ source line where the exception is logged, usually close to
+ the place where it was caught.
+
+ This should be a Python dict that contains the keys 'filePath',
+ 'lineNumber', and 'functionName'
+
+ :type http_context: :class`gcloud.error_reporting.HTTPContext`
+ :param http_context: The HTTP request which was processed when the
+ error was triggered.
+
+ :type user: string
+ :param user: The user who caused or was affected by the crash. This can
+ be a user ID, an email address, or an arbitrary token that
+ uniquely identifies the user. When sending an error
+ report, leave this field empty if the user was not
+ logged in. In this case the Error Reporting system will
+ use other data, such as remote IP address,
+ to distinguish affected users.
+ """
+ payload={
+ 'serviceContext':{
+ 'service':self.service,
+ },
+ 'message':'{0}'.format(message)
+ }
+
+ ifself.version:
+ payload['serviceContext']['version']=self.version
+
+ ifreport_locationorhttp_contextoruser:
+ payload['context']={}
+
+ ifreport_location:
+ payload['context']['reportLocation']=report_location
+
+ ifhttp_context:
+ http_context_dict=http_context.__dict__
+ # strip out None values
+ # once py26 support is dropped this can use dict comprehension
+ payload['context']['httpContext']=dict(
+ (k,v)for(k,v)insix.iteritems(http_context_dict)
+ ifvisnotNone
+ )
+
+ ifuser:
+ payload['context']['user']=user
+
+ logger=self.logging_client.logger('errors')
+ logger.log_struct(payload)
+
+
[docs]defreport(self,message,http_context=None,user=None):
+ """ Reports a message to Stackdriver Error Reporting
+ https://cloud.google.com/error-reporting/docs/formatting-error-messages
+
+ :type message: str
+ :param message: A user-supplied message to report
+
+
+ :type http_context: :class`gcloud.error_reporting.HTTPContext`
+ :param http_context: The HTTP request which was processed when the
+ error was triggered.
+
+ :type user: string
+ :param user: The user who caused or was affected by the crash. This
+ can be a user ID, an email address, or an arbitrary
+ token that uniquely identifies the user. When sending
+ an error report, leave this field empty if the user
+ was not logged in. In this case the Error Reporting
+ system will use other data, such as remote IP address,
+ to distinguish affected users.
+
+ Example::
+ >>> client.report("Something went wrong!")
+ """
+ stack=traceback.extract_stack()
+ last_call=stack[-2]
+ file_path=last_call[0]
+ line_number=last_call[1]
+ function_name=last_call[2]
+ report_location={
+ 'filePath':file_path,
+ 'lineNumber':line_number,
+ 'functionName':function_name
+ }
+
+ self._send_error_report(message,
+ http_context=http_context,
+ user=user,
+ report_location=report_location)
+
+
[docs]defreport_exception(self,http_context=None,user=None):
+ """ Reports the details of the latest exceptions to Stackdriver Error
+ Reporting.
+
+ :type http_context: :class`gcloud.error_reporting.HTTPContext`
+ :param http_context: The HTTP request which was processed when the
+ error was triggered.
+
+ :type user: string
+ :param user: The user who caused or was affected by the crash. This
+ can be a user ID, an email address, or an arbitrary
+ token that uniquely identifies the user. When sending an
+ error report, leave this field empty if the user was
+ not logged in. In this case the Error Reporting system
+ will use other data, such as remote IP address,
+ to distinguish affected users.
+
+ Example::
+
+ >>> try:
+ >>> raise NameError
+ >>> except Exception:
+ >>> client.report_exception()
+ """
+ self._send_error_report(traceback.format_exc(),
+ http_context=http_context,
+ user=user)
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Custom exceptions for :mod:`gcloud` package.
+
+See: https://cloud.google.com/storage/docs/json_api/v1/status-codes
+"""
+
+importcopy
+importjson
+importsix
+
+_HTTP_CODE_TO_EXCEPTION={}# populated at end of module
+
+
+
[docs]classGCloudError(Exception):
+ """Base error class for gcloud errors (abstract).
+
+ Each subclass represents a single type of HTTP error response.
+ """
+ code=None
+ """HTTP status code. Concrete subclasses *must* define.
+
+ See: http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
+ """
+
+ def__init__(self,message,errors=()):
+ super(GCloudError,self).__init__()
+ # suppress deprecation warning under 2.6.x
+ self.message=message
+ self._errors=errors
+
+ def__str__(self):
+ return'%d%s'%(self.code,self.message)
+
+ @property
+ deferrors(self):
+ """Detailed error information.
+
+ :rtype: list(dict)
+ :returns: a list of mappings describing each error.
+ """
+ return[copy.deepcopy(error)forerrorinself._errors]
+
+
+
[docs]classRedirection(GCloudError):
+ """Base for 3xx responses
+
+ This class is abstract.
+ """
+
+
+
[docs]classMovedPermanently(Redirection):
+ """Exception mapping a '301 Moved Permanently' response."""
+ code=301
+
+
+
[docs]classNotModified(Redirection):
+ """Exception mapping a '304 Not Modified' response."""
+ code=304
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the Google Stackdriver Logging API."""
+
+importos
+
+try:
+ fromgoogle.cloud.logging.v2.config_service_v2_apiimport(
+ ConfigServiceV2ApiasGeneratedSinksAPI)
+ fromgoogle.cloud.logging.v2.logging_service_v2_apiimport(
+ LoggingServiceV2ApiasGeneratedLoggingAPI)
+ fromgoogle.cloud.logging.v2.metrics_service_v2_apiimport(
+ MetricsServiceV2ApiasGeneratedMetricsAPI)
+ fromgcloud.logging._gaximport_LoggingAPIasGAXLoggingAPI
+ fromgcloud.logging._gaximport_MetricsAPIasGAXMetricsAPI
+ fromgcloud.logging._gaximport_SinksAPIasGAXSinksAPI
+exceptImportError:# pragma: NO COVER
+ _HAVE_GAX=False
+ GeneratedLoggingAPI=GAXLoggingAPI=None
+ GeneratedMetricsAPI=GAXMetricsAPI=None
+ GeneratedSinksAPI=GAXSinksAPI=None
+else:
+ _HAVE_GAX=True
+
+fromgcloud.clientimportJSONClient
+fromgcloud.logging.connectionimportConnection
+fromgcloud.logging.connectionimport_LoggingAPIasJSONLoggingAPI
+fromgcloud.logging.connectionimport_MetricsAPIasJSONMetricsAPI
+fromgcloud.logging.connectionimport_SinksAPIasJSONSinksAPI
+fromgcloud.logging.entriesimportProtobufEntry
+fromgcloud.logging.entriesimportStructEntry
+fromgcloud.logging.entriesimportTextEntry
+fromgcloud.logging.loggerimportLogger
+fromgcloud.logging.metricimportMetric
+fromgcloud.logging.sinkimportSink
+
+
+_USE_GAX=_HAVE_GAXand(os.environ.get('GCLOUD_ENABLE_GAX')isnotNone)
+
+
+
[docs]classClient(JSONClient):
+ """Client to bundle configuration needed for API requests.
+
+ :type project: str
+ :param project: the project which the client acts on behalf of.
+ If not passed, falls back to the default inferred
+ from the environment.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for the connection
+ owned by this client. If not passed (and if no ``http``
+ object is passed), falls back to the default inferred
+ from the environment.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests. If not passed, an
+ ``http`` object is created that is bound to the
+ ``credentials`` for the current object.
+ """
+
+ _connection_class=Connection
+ _logging_api=_sinks_api=_metrics_api=None
+
+ @property
+ deflogging_api(self):
+ """Helper for logging-related API calls.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs
+ """
+ ifself._logging_apiisNone:
+ if_USE_GAX:
+ generated=GeneratedLoggingAPI()
+ self._logging_api=GAXLoggingAPI(generated)
+ else:
+ self._logging_api=JSONLoggingAPI(self.connection)
+ returnself._logging_api
+
+ @property
+ defsinks_api(self):
+ """Helper for log sink-related API calls.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks
+ """
+ ifself._sinks_apiisNone:
+ if_USE_GAX:
+ generated=GeneratedSinksAPI()
+ self._sinks_api=GAXSinksAPI(generated)
+ else:
+ self._sinks_api=JSONSinksAPI(self.connection)
+ returnself._sinks_api
+
+ @property
+ defmetrics_api(self):
+ """Helper for log metric-related API calls.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics
+ """
+ ifself._metrics_apiisNone:
+ if_USE_GAX:
+ generated=GeneratedMetricsAPI()
+ self._metrics_api=GAXMetricsAPI(generated)
+ else:
+ self._metrics_api=JSONMetricsAPI(self.connection)
+ returnself._metrics_api
+
+
[docs]deflogger(self,name):
+ """Creates a logger bound to the current client.
+
+ :type name: str
+ :param name: the name of the logger to be constructed.
+
+ :rtype: :class:`gcloud.logging.logger.Logger`
+ :returns: Logger created with the current client.
+ """
+ returnLogger(name,client=self)
+
+ def_entry_from_resource(self,resource,loggers):
+ """Detect correct entry type from resource and instantiate.
+
+ :type resource: dict
+ :param resource: one entry resource from API response
+
+ :type loggers: dict or None
+ :param loggers: A mapping of logger fullnames -> loggers. If not
+ passed, the entry will have a newly-created logger.
+
+ :rtype: One of:
+ :class:`gcloud.logging.entries.TextEntry`,
+ :class:`gcloud.logging.entries.StructEntry`,
+ :class:`gcloud.logging.entries.ProtobufEntry`
+ :returns: the entry instance, constructed via the resource
+ """
+ if'textPayload'inresource:
+ returnTextEntry.from_api_repr(resource,self,loggers)
+ elif'jsonPayload'inresource:
+ returnStructEntry.from_api_repr(resource,self,loggers)
+ elif'protoPayload'inresource:
+ returnProtobufEntry.from_api_repr(resource,self,loggers)
+ raiseValueError('Cannot parse log entry resource')
+
+
[docs]deflist_entries(self,projects=None,filter_=None,order_by=None,
+ page_size=None,page_token=None):
+ """Return a page of log entries.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list
+
+ :type projects: list of strings
+ :param projects: project IDs to include. If not passed,
+ defaults to the project bound to the client.
+
+ :type filter_: str
+ :param filter_: a filter expression. See:
+ https://cloud.google.com/logging/docs/view/advanced_filters
+
+ :type order_by: str
+ :param order_by: One of :data:`gcloud.logging.ASCENDING` or
+ :data:`gcloud.logging.DESCENDING`.
+
+ :type page_size: int
+ :param page_size: maximum number of entries to return, If not passed,
+ defaults to a value set by the API.
+
+ :type page_token: str
+ :param page_token: opaque marker for the next "page" of entries. If not
+ passed, the API will return the first page of
+ entries.
+
+ :rtype: tuple, (list, str)
+ :returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a
+ "next page token" string: if not None, indicates that
+ more entries can be retrieved with another call (pass that
+ value as ``page_token``).
+ """
+ ifprojectsisNone:
+ projects=[self.project]
+
+ resources,token=self.logging_api.list_entries(
+ projects=projects,filter_=filter_,order_by=order_by,
+ page_size=page_size,page_token=page_token)
+ loggers={}
+ entries=[self._entry_from_resource(resource,loggers)
+ forresourceinresources]
+ returnentries,token
+
+
[docs]defsink(self,name,filter_=None,destination=None):
+ """Creates a sink bound to the current client.
+
+ :type name: str
+ :param name: the name of the sink to be constructed.
+
+ :type filter_: str
+ :param filter_: (optional) the advanced logs filter expression
+ defining the entries exported by the sink. If not
+ passed, the instance should already exist, to be
+ refreshed via :meth:`Sink.reload`.
+
+ :type destination: str
+ :param destination: destination URI for the entries exported by
+ the sink. If not passed, the instance should
+ already exist, to be refreshed via
+ :meth:`Sink.reload`.
+
+ :rtype: :class:`gcloud.logging.sink.Sink`
+ :returns: Sink created with the current client.
+ """
+ returnSink(name,filter_,destination,client=self)
+
+
[docs]deflist_sinks(self,page_size=None,page_token=None):
+ """List sinks for the project associated with this client.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list
+
+ :type page_size: int
+ :param page_size: maximum number of sinks to return, If not passed,
+ defaults to a value set by the API.
+
+ :type page_token: str
+ :param page_token: opaque marker for the next "page" of sinks. If not
+ passed, the API will return the first page of
+ sinks.
+
+ :rtype: tuple, (list, str)
+ :returns: list of :class:`gcloud.logging.sink.Sink`, plus a
+ "next page token" string: if not None, indicates that
+ more sinks can be retrieved with another call (pass that
+ value as ``page_token``).
+ """
+ resources,token=self.sinks_api.list_sinks(
+ self.project,page_size,page_token)
+ sinks=[Sink.from_api_repr(resource,self)
+ forresourceinresources]
+ returnsinks,token
+
+
[docs]defmetric(self,name,filter_=None,description=''):
+ """Creates a metric bound to the current client.
+
+ :type name: str
+ :param name: the name of the metric to be constructed.
+
+ :type filter_: str
+ :param filter_: the advanced logs filter expression defining the
+ entries tracked by the metric. If not
+ passed, the instance should already exist, to be
+ refreshed via :meth:`Metric.reload`.
+
+ :type description: str
+ :param description: the description of the metric to be constructed.
+ If not passed, the instance should already exist,
+ to be refreshed via :meth:`Metric.reload`.
+
+ :rtype: :class:`gcloud.logging.metric.Metric`
+ :returns: Metric created with the current client.
+ """
+ returnMetric(name,filter_,client=self,description=description)
+
+
[docs]deflist_metrics(self,page_size=None,page_token=None):
+ """List metrics for the project associated with this client.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list
+
+ :type page_size: int
+ :param page_size: maximum number of metrics to return, If not passed,
+ defaults to a value set by the API.
+
+ :type page_token: str
+ :param page_token: opaque marker for the next "page" of metrics. If not
+ passed, the API will return the first page of
+ metrics.
+
+ :rtype: tuple, (list, str)
+ :returns: list of :class:`gcloud.logging.metric.Metric`, plus a
+ "next page token" string: if not None, indicates that
+ more metrics can be retrieved with another call (pass that
+ value as ``page_token``).
+ """
+ resources,token=self.metrics_api.list_metrics(
+ self.project,page_size,page_token)
+ metrics=[Metric.from_api_repr(resource,self)
+ forresourceinresources]
+ returnmetrics,token
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with Stackdriver Logging connections."""
+
+fromgcloudimportconnectionasbase_connection
+
+
+
[docs]classConnection(base_connection.JSONConnection):
+ """A connection to Google Stackdriver Logging via the JSON REST API.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials`
+ :param credentials: (Optional) The OAuth2 Credentials to use for this
+ connection.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: (Optional) HTTP object to make requests.
+
+ :type api_base_url: string
+ :param api_base_url: The base of the API call URL. Defaults to the value
+ :attr:`Connection.API_BASE_URL`.
+ """
+
+ API_BASE_URL='https://logging.googleapis.com'
+ """The base of the API call URL."""
+
+ API_VERSION='v2beta1'
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE='{api_base_url}/{api_version}{path}'
+ """A template for the URL of a particular API call."""
+
+ SCOPE=('https://www.googleapis.com/auth/logging.read',
+ 'https://www.googleapis.com/auth/logging.write',
+ 'https://www.googleapis.com/auth/logging.admin',
+ 'https://www.googleapis.com/auth/cloud-platform')
+ """The scopes required for authenticating as a Logging consumer."""
+
+
+class_LoggingAPI(object):
+ """Helper mapping logging-related APIs.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs
+
+ :type connection: :class:`gcloud.logging.connection.Connection`
+ :param connection: the connection used to make API requests.
+ """
+ def__init__(self,connection):
+ self._connection=connection
+
+ deflist_entries(self,projects,filter_=None,order_by=None,
+ page_size=None,page_token=None):
+ """Return a page of log entry resources.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list
+
+ :type projects: list of strings
+ :param projects: project IDs to include. If not passed,
+ defaults to the project bound to the client.
+
+ :type filter_: str
+ :param filter_: a filter expression. See:
+ https://cloud.google.com/logging/docs/view/advanced_filters
+
+ :type order_by: str
+ :param order_by: One of :data:`gcloud.logging.ASCENDING` or
+ :data:`gcloud.logging.DESCENDING`.
+
+ :type page_size: int
+ :param page_size: maximum number of entries to return, If not passed,
+ defaults to a value set by the API.
+
+ :type page_token: str
+ :param page_token: opaque marker for the next "page" of entries. If not
+ passed, the API will return the first page of
+ entries.
+
+ :rtype: tuple, (list, str)
+ :returns: list of mappings, plus a "next page token" string:
+ if not None, indicates that more entries can be retrieved
+ with another call (pass that value as ``page_token``).
+ """
+ params={'projectIds':projects}
+
+ iffilter_isnotNone:
+ params['filter']=filter_
+
+ iforder_byisnotNone:
+ params['orderBy']=order_by
+
+ ifpage_sizeisnotNone:
+ params['pageSize']=page_size
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ resp=self._connection.api_request(
+ method='POST',path='/entries:list',data=params)
+
+ returnresp.get('entries',()),resp.get('nextPageToken')
+
+ defwrite_entries(self,entries,logger_name=None,resource=None,
+ labels=None):
+ """API call: log an entry resource via a POST request
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write
+
+ :type entries: sequence of mapping
+ :param entries: the log entry resources to log.
+
+ :type logger_name: string
+ :param logger_name: name of default logger to which to log the entries;
+ individual entries may override.
+
+ :type resource: mapping
+ :param resource: default resource to associate with entries;
+ individual entries may override.
+
+ :type labels: mapping
+ :param labels: default labels to associate with entries;
+ individual entries may override.
+ """
+ data={'entries':list(entries)}
+
+ iflogger_nameisnotNone:
+ data['logName']=logger_name
+
+ ifresourceisnotNone:
+ data['resource']=resource
+
+ iflabelsisnotNone:
+ data['labels']=labels
+
+ self._connection.api_request(method='POST',path='/entries:write',
+ data=data)
+
+ deflogger_delete(self,project,logger_name):
+ """API call: delete all entries in a logger via a DELETE request
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete
+
+ :type project: string
+ :param project: ID of project containing the log entries to delete
+
+ :type logger_name: string
+ :param logger_name: name of logger containing the log entries to delete
+ """
+ path='/projects/%s/logs/%s'%(project,logger_name)
+ self._connection.api_request(method='DELETE',path=path)
+
+
+class_SinksAPI(object):
+ """Helper mapping sink-related APIs.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks
+
+ :type connection: :class:`gcloud.logging.connection.Connection`
+ :param connection: the connection used to make API requests.
+ """
+ def__init__(self,connection):
+ self._connection=connection
+
+ deflist_sinks(self,project,page_size=None,page_token=None):
+ """List sinks for the project associated with this client.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list
+
+ :type project: string
+ :param project: ID of the project whose sinks are to be listed.
+
+ :type page_size: int
+ :param page_size: maximum number of sinks to return, If not passed,
+ defaults to a value set by the API.
+
+ :type page_token: str
+ :param page_token: opaque marker for the next "page" of sinks. If not
+ passed, the API will return the first page of
+ sinks.
+
+ :rtype: tuple, (list, str)
+ :returns: list of mappings, plus a "next page token" string:
+ if not None, indicates that more sinks can be retrieved
+ with another call (pass that value as ``page_token``).
+ """
+ params={}
+
+ ifpage_sizeisnotNone:
+ params['pageSize']=page_size
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ path='/projects/%s/sinks'%(project,)
+ resp=self._connection.api_request(
+ method='GET',path=path,query_params=params)
+ sinks=resp.get('sinks',())
+ returnsinks,resp.get('nextPageToken')
+
+ defsink_create(self,project,sink_name,filter_,destination):
+ """API call: create a sink resource.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create
+
+ :type project: string
+ :param project: ID of the project in which to create the sink.
+
+ :type sink_name: string
+ :param sink_name: the name of the sink
+
+ :type filter_: string
+ :param filter_: the advanced logs filter expression defining the
+ entries exported by the sink.
+
+ :type destination: string
+ :param destination: destination URI for the entries exported by
+ the sink.
+ """
+ target='/projects/%s/sinks'%(project,)
+ data={
+ 'name':sink_name,
+ 'filter':filter_,
+ 'destination':destination,
+ }
+ self._connection.api_request(method='POST',path=target,data=data)
+
+ defsink_get(self,project,sink_name):
+ """API call: retrieve a sink resource.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get
+
+ :type project: string
+ :param project: ID of the project containing the sink.
+
+ :type sink_name: string
+ :param sink_name: the name of the sink
+
+ :rtype: dict
+ :returns: The JSON sink object returned from the API.
+ """
+ target='/projects/%s/sinks/%s'%(project,sink_name)
+ returnself._connection.api_request(method='GET',path=target)
+
+ defsink_update(self,project,sink_name,filter_,destination):
+ """API call: update a sink resource.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update
+
+ :type project: string
+ :param project: ID of the project containing the sink.
+
+ :type sink_name: string
+ :param sink_name: the name of the sink
+
+ :type filter_: string
+ :param filter_: the advanced logs filter expression defining the
+ entries exported by the sink.
+
+ :type destination: string
+ :param destination: destination URI for the entries exported by
+ the sink.
+ """
+ target='/projects/%s/sinks/%s'%(project,sink_name)
+ data={
+ 'name':sink_name,
+ 'filter':filter_,
+ 'destination':destination,
+ }
+ self._connection.api_request(method='PUT',path=target,data=data)
+
+ defsink_delete(self,project,sink_name):
+ """API call: delete a sink resource.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete
+
+ :type project: string
+ :param project: ID of the project containing the sink.
+
+ :type sink_name: string
+ :param sink_name: the name of the sink
+ """
+ target='/projects/%s/sinks/%s'%(project,sink_name)
+ self._connection.api_request(method='DELETE',path=target)
+
+
+class_MetricsAPI(object):
+ """Helper mapping sink-related APIs.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics
+
+ :type connection: :class:`gcloud.logging.connection.Connection`
+ :param connection: the connection used to make API requests.
+ """
+ def__init__(self,connection):
+ self._connection=connection
+
+ deflist_metrics(self,project,page_size=None,page_token=None):
+ """List metrics for the project associated with this client.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list
+
+ :type project: string
+ :param project: ID of the project whose metrics are to be listed.
+
+ :type page_size: int
+ :param page_size: maximum number of metrics to return, If not passed,
+ defaults to a value set by the API.
+
+ :type page_token: str
+ :param page_token: opaque marker for the next "page" of metrics. If not
+ passed, the API will return the first page of
+ metrics.
+
+ :rtype: tuple, (list, str)
+ :returns: list of mappings, plus a "next page token" string:
+ if not None, indicates that more metrics can be retrieved
+ with another call (pass that value as ``page_token``).
+ """
+ params={}
+
+ ifpage_sizeisnotNone:
+ params['pageSize']=page_size
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ path='/projects/%s/metrics'%(project,)
+ resp=self._connection.api_request(
+ method='GET',path=path,query_params=params)
+ metrics=resp.get('metrics',())
+ returnmetrics,resp.get('nextPageToken')
+
+ defmetric_create(self,project,metric_name,filter_,description=None):
+ """API call: create a metric resource.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create
+
+ :type project: string
+ :param project: ID of the project in which to create the metric.
+
+ :type metric_name: string
+ :param metric_name: the name of the metric
+
+ :type filter_: string
+ :param filter_: the advanced logs filter expression defining the
+ entries exported by the metric.
+
+ :type description: string
+ :param description: description of the metric.
+ """
+ target='/projects/%s/metrics'%(project,)
+ data={
+ 'name':metric_name,
+ 'filter':filter_,
+ 'description':description,
+ }
+ self._connection.api_request(method='POST',path=target,data=data)
+
+ defmetric_get(self,project,metric_name):
+ """API call: retrieve a metric resource.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get
+
+ :type project: string
+ :param project: ID of the project containing the metric.
+
+ :type metric_name: string
+ :param metric_name: the name of the metric
+
+ :rtype: dict
+ :returns: The JSON metric object returned from the API.
+ """
+ target='/projects/%s/metrics/%s'%(project,metric_name)
+ returnself._connection.api_request(method='GET',path=target)
+
+ defmetric_update(self,project,metric_name,filter_,description):
+ """API call: update a metric resource.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update
+
+ :type project: string
+ :param project: ID of the project containing the metric.
+
+ :type metric_name: string
+ :param metric_name: the name of the metric
+
+ :type filter_: string
+ :param filter_: the advanced logs filter expression defining the
+ entries exported by the metric.
+
+ :type description: string
+ :param description: description of the metric.
+ """
+ target='/projects/%s/metrics/%s'%(project,metric_name)
+ data={
+ 'name':metric_name,
+ 'filter':filter_,
+ 'description':description,
+ }
+ self._connection.api_request(method='PUT',path=target,data=data)
+
+ defmetric_delete(self,project,metric_name):
+ """API call: delete a metric resource.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete
+
+ :type project: string
+ :param project: ID of the project containing the metric.
+
+ :type metric_name: string
+ :param metric_name: the name of the metric
+ """
+ target='/projects/%s/metrics/%s'%(project,metric_name)
+ self._connection.api_request(method='DELETE',path=target)
+
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Log entries within the Google Stackdriver Logging API."""
+
+importjson
+importre
+
+fromgoogle.protobuf.json_formatimportParse
+
+fromgcloud._helpersimport_name_from_project_path
+fromgcloud._helpersimport_rfc3339_nanos_to_datetime
+
+
+_LOGGER_TEMPLATE=re.compile(r"""
+ projects/ # static prefix
+ (?P<project>[^/]+) # initial letter, wordchars + hyphen
+ /logs/ # static midfix
+ (?P<name>[^/]+) # initial letter, wordchars + allowed punc
+""",re.VERBOSE)
+
+
+
[docs]deflogger_name_from_path(path):
+ """Validate a logger URI path and get the logger name.
+
+ :type path: str
+ :param path: URI path for a logger API request.
+
+ :rtype: str
+ :returns: Logger name parsed from ``path``.
+ :raises: :class:`ValueError` if the ``path`` is ill-formed or if
+ the project from the ``path`` does not agree with the
+ ``project`` passed in.
+ """
+ return_name_from_project_path(path,None,_LOGGER_TEMPLATE)
+
+
+class_BaseEntry(object):
+ """Base class for TextEntry, StructEntry.
+
+ :type payload: text or dict
+ :param payload: The payload passed as ``textPayload``, ``jsonPayload``,
+ or ``protoPayload``.
+
+ :type logger: :class:`gcloud.logging.logger.Logger`
+ :param logger: the logger used to write the entry.
+
+ :type insert_id: text, or :class:`NoneType`
+ :param insert_id: (optional) the ID used to identify an entry uniquely.
+
+ :type timestamp: :class:`datetime.datetime`, or :class:`NoneType`
+ :param timestamp: (optional) timestamp for the entry
+
+ :type labels: dict or :class:`NoneType`
+ :param labels: (optional) mapping of labels for the entry
+
+ :type severity: string or :class:`NoneType`
+ :param severity: (optional) severity of event being logged.
+
+ :type http_request: dict or :class:`NoneType`
+ :param http_request: (optional) info about HTTP request associated with
+ the entry
+ """
+ def__init__(self,payload,logger,insert_id=None,timestamp=None,
+ labels=None,severity=None,http_request=None):
+ self.payload=payload
+ self.logger=logger
+ self.insert_id=insert_id
+ self.timestamp=timestamp
+ self.labels=labels
+ self.severity=severity
+ self.http_request=http_request
+
+ @classmethod
+ deffrom_api_repr(cls,resource,client,loggers=None):
+ """Factory: construct an entry given its API representation
+
+ :type resource: dict
+ :param resource: text entry resource representation returned from
+ the API
+
+ :type client: :class:`gcloud.logging.client.Client`
+ :param client: Client which holds credentials and project
+ configuration.
+
+ :type loggers: dict or None
+ :param loggers: A mapping of logger fullnames -> loggers. If not
+ passed, the entry will have a newly-created logger.
+
+ :rtype: :class:`gcloud.logging.entries.TextEntry`
+ :returns: Text entry parsed from ``resource``.
+ """
+ ifloggersisNone:
+ loggers={}
+ logger_fullname=resource['logName']
+ logger=loggers.get(logger_fullname)
+ ifloggerisNone:
+ logger_name=logger_name_from_path(logger_fullname)
+ logger=loggers[logger_fullname]=client.logger(logger_name)
+ payload=resource[cls._PAYLOAD_KEY]
+ insert_id=resource.get('insertId')
+ timestamp=resource.get('timestamp')
+ iftimestampisnotNone:
+ timestamp=_rfc3339_nanos_to_datetime(timestamp)
+ labels=resource.get('labels')
+ severity=resource.get('severity')
+ http_request=resource.get('httpRequest')
+ returncls(payload,logger,insert_id=insert_id,timestamp=timestamp,
+ labels=labels,severity=severity,http_request=http_request)
+
+
+
[docs]classTextEntry(_BaseEntry):
+ """Entry created with ``textPayload``.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry
+ """
+ _PAYLOAD_KEY='textPayload'
+
+
+
[docs]classStructEntry(_BaseEntry):
+ """Entry created with ``jsonPayload``.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry
+ """
+ _PAYLOAD_KEY='jsonPayload'
+
+
+
[docs]classProtobufEntry(_BaseEntry):
+ """Entry created with ``protoPayload``.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry
+ """
+ _PAYLOAD_KEY='protoPayload'
+
+
[docs]defparse_message(self,message):
+ """Parse payload into a protobuf message.
+
+ Mutates the passed-in ``message`` in place.
+
+ :type message: Protobuf message
+ :param message: the message to be logged
+ """
+ Parse(json.dumps(self.payload),message)
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define API Loggers."""
+
+importjson
+
+fromgoogle.protobuf.json_formatimportMessageToJson
+
+
+
[docs]classLogger(object):
+ """Loggers represent named targets for log entries.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs
+
+ :type name: string
+ :param name: the name of the logger
+
+ :type client: :class:`gcloud.logging.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the logger (which requires a project).
+
+ :type labels: dict or :class:`NoneType`
+ :param labels: (optional) mapping of default labels for entries written
+ via this logger.
+ """
+ def__init__(self,name,client,labels=None):
+ self.name=name
+ self._client=client
+ self.labels=labels
+
+ @property
+ defclient(self):
+ """Clent bound to the logger."""
+ returnself._client
+
+ @property
+ defproject(self):
+ """Project bound to the logger."""
+ returnself._client.project
+
+ @property
+ deffull_name(self):
+ """Fully-qualified name used in logging APIs"""
+ return'projects/%s/logs/%s'%(self.project,self.name)
+
+ @property
+ defpath(self):
+ """URI path for use in logging APIs"""
+ return'/%s'%(self.full_name,)
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current logger.
+
+ :rtype: :class:`gcloud.logging.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self._client
+ returnclient
+
+
[docs]defbatch(self,client=None):
+ """Return a batch to use as a context manager.
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current topic.
+
+ :rtype: :class:`Batch`
+ :returns: A batch to use as a context manager.
+ """
+ client=self._require_client(client)
+ returnBatch(self,client)
+
+ def_make_entry_resource(self,text=None,info=None,message=None,
+ labels=None,insert_id=None,severity=None,
+ http_request=None):
+ """Return a log entry resource of the appropriate type.
+
+ Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`.
+
+ Only one of ``text``, ``info``, or ``message`` should be passed.
+
+ :type text: string or :class:`NoneType`
+ :param text: text payload
+
+ :type info: dict or :class:`NoneType`
+ :param info: struct payload
+
+ :type message: Protobuf message or :class:`NoneType`
+ :param message: protobuf payload
+
+ :type labels: dict or :class:`NoneType`
+ :param labels: labels passed in to calling method.
+
+ :type insert_id: string or :class:`NoneType`
+ :param insert_id: (optional) unique ID for log entry.
+
+ :type severity: string or :class:`NoneType`
+ :param severity: (optional) severity of event being logged.
+
+ :type http_request: dict or :class:`NoneType`
+ :param http_request: (optional) info about HTTP request associated with
+ the entry
+
+ :rtype: dict
+ :returns: The JSON resource created.
+ """
+ resource={
+ 'logName':self.full_name,
+ 'resource':{'type':'global'},
+ }
+
+ iftextisnotNone:
+ resource['textPayload']=text
+
+ ifinfoisnotNone:
+ resource['jsonPayload']=info
+
+ ifmessageisnotNone:
+ as_json_str=MessageToJson(message)
+ as_json=json.loads(as_json_str)
+ resource['protoPayload']=as_json
+
+ iflabelsisNone:
+ labels=self.labels
+
+ iflabelsisnotNone:
+ resource['labels']=labels
+
+ ifinsert_idisnotNone:
+ resource['insertId']=insert_id
+
+ ifseverityisnotNone:
+ resource['severity']=severity
+
+ ifhttp_requestisnotNone:
+ resource['httpRequest']=http_request
+
+ returnresource
+
+
[docs]deflog_text(self,text,client=None,labels=None,insert_id=None,
+ severity=None,http_request=None):
+ """API call: log a text message via a POST request
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write
+
+ :type text: text
+ :param text: the log message.
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current logger.
+
+ :type labels: dict or :class:`NoneType`
+ :param labels: (optional) mapping of labels for the entry.
+
+ :type insert_id: string or :class:`NoneType`
+ :param insert_id: (optional) unique ID for log entry.
+
+ :type severity: string or :class:`NoneType`
+ :param severity: (optional) severity of event being logged.
+
+ :type http_request: dict or :class:`NoneType`
+ :param http_request: (optional) info about HTTP request associated with
+ the entry
+ """
+ client=self._require_client(client)
+ entry_resource=self._make_entry_resource(
+ text=text,labels=labels,insert_id=insert_id,severity=severity,
+ http_request=http_request)
+ client.logging_api.write_entries([entry_resource])
+
+
[docs]deflog_struct(self,info,client=None,labels=None,insert_id=None,
+ severity=None,http_request=None):
+ """API call: log a structured message via a POST request
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write
+
+ :type info: dict
+ :param info: the log entry information
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current logger.
+
+ :type labels: dict or :class:`NoneType`
+ :param labels: (optional) mapping of labels for the entry.
+
+ :type insert_id: string or :class:`NoneType`
+ :param insert_id: (optional) unique ID for log entry.
+
+ :type severity: string or :class:`NoneType`
+ :param severity: (optional) severity of event being logged.
+
+ :type http_request: dict or :class:`NoneType`
+ :param http_request: (optional) info about HTTP request associated with
+ the entry
+ """
+ client=self._require_client(client)
+ entry_resource=self._make_entry_resource(
+ info=info,labels=labels,insert_id=insert_id,severity=severity,
+ http_request=http_request)
+ client.logging_api.write_entries([entry_resource])
+
+
[docs]deflog_proto(self,message,client=None,labels=None,insert_id=None,
+ severity=None,http_request=None):
+ """API call: log a protobuf message via a POST request
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write
+
+ :type message: Protobuf message
+ :param message: the message to be logged
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current logger.
+
+ :type labels: dict or :class:`NoneType`
+ :param labels: (optional) mapping of labels for the entry.
+
+ :type insert_id: string or :class:`NoneType`
+ :param insert_id: (optional) unique ID for log entry.
+
+ :type severity: string or :class:`NoneType`
+ :param severity: (optional) severity of event being logged.
+
+ :type http_request: dict or :class:`NoneType`
+ :param http_request: (optional) info about HTTP request associated with
+ the entry
+ """
+ client=self._require_client(client)
+ entry_resource=self._make_entry_resource(
+ message=message,labels=labels,insert_id=insert_id,
+ severity=severity,http_request=http_request)
+ client.logging_api.write_entries([entry_resource])
+
+
[docs]defdelete(self,client=None):
+ """API call: delete all entries in a logger via a DELETE request
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current logger.
+ """
+ client=self._require_client(client)
+ client.logging_api.logger_delete(self.project,self.name)
+
+
[docs]deflist_entries(self,projects=None,filter_=None,order_by=None,
+ page_size=None,page_token=None):
+ """Return a page of log entries.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list
+
+ :type projects: list of strings
+ :param projects: project IDs to include. If not passed,
+ defaults to the project bound to the client.
+
+ :type filter_: string
+ :param filter_: a filter expression. See:
+ https://cloud.google.com/logging/docs/view/advanced_filters
+
+ :type order_by: string
+ :param order_by: One of :data:`gcloud.logging.ASCENDING` or
+ :data:`gcloud.logging.DESCENDING`.
+
+ :type page_size: int
+ :param page_size: maximum number of entries to return, If not passed,
+ defaults to a value set by the API.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of entries. If not
+ passed, the API will return the first page of
+ entries.
+
+ :rtype: tuple, (list, str)
+ :returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a
+ "next page token" string: if not None, indicates that
+ more entries can be retrieved with another call (pass that
+ value as ``page_token``).
+ """
+ log_filter='logName=%s'%(self.full_name,)
+ iffilter_isnotNone:
+ filter_='%s AND %s'%(filter_,log_filter)
+ else:
+ filter_=log_filter
+ returnself.client.list_entries(
+ projects=projects,filter_=filter_,order_by=order_by,
+ page_size=page_size,page_token=page_token)
+
+
+
[docs]classBatch(object):
+ """Context manager: collect entries to log via a single API call.
+
+ Helper returned by :meth:`Logger.batch`
+
+ :type logger: :class:`gcloud.logging.logger.Logger`
+ :param logger: the logger to which entries will be logged.
+
+ :type client: :class:`gcloud.logging.client.Client`
+ :param client: The client to use.
+ """
+ def__init__(self,logger,client):
+ self.logger=logger
+ self.entries=[]
+ self.client=client
+
+ def__enter__(self):
+ returnself
+
+ def__exit__(self,exc_type,exc_val,exc_tb):
+ ifexc_typeisNone:
+ self.commit()
+
+
[docs]deflog_text(self,text,labels=None,insert_id=None,severity=None,
+ http_request=None):
+ """Add a text entry to be logged during :meth:`commit`.
+
+ :type text: string
+ :param text: the text entry
+
+ :type labels: dict or :class:`NoneType`
+ :param labels: (optional) mapping of labels for the entry.
+
+ :type insert_id: string or :class:`NoneType`
+ :param insert_id: (optional) unique ID for log entry.
+
+ :type severity: string or :class:`NoneType`
+ :param severity: (optional) severity of event being logged.
+
+ :type http_request: dict or :class:`NoneType`
+ :param http_request: (optional) info about HTTP request associated with
+ the entry.
+ """
+ self.entries.append(
+ ('text',text,labels,insert_id,severity,http_request))
+
+
[docs]deflog_struct(self,info,labels=None,insert_id=None,severity=None,
+ http_request=None):
+ """Add a struct entry to be logged during :meth:`commit`.
+
+ :type info: dict
+ :param info: the struct entry
+
+ :type labels: dict or :class:`NoneType`
+ :param labels: (optional) mapping of labels for the entry.
+
+ :type insert_id: string or :class:`NoneType`
+ :param insert_id: (optional) unique ID for log entry.
+
+ :type severity: string or :class:`NoneType`
+ :param severity: (optional) severity of event being logged.
+
+ :type http_request: dict or :class:`NoneType`
+ :param http_request: (optional) info about HTTP request associated with
+ the entry.
+ """
+ self.entries.append(
+ ('struct',info,labels,insert_id,severity,http_request))
+
+
[docs]deflog_proto(self,message,labels=None,insert_id=None,severity=None,
+ http_request=None):
+ """Add a protobuf entry to be logged during :meth:`commit`.
+
+ :type message: protobuf message
+ :param message: the protobuf entry
+
+ :type labels: dict or :class:`NoneType`
+ :param labels: (optional) mapping of labels for the entry.
+
+ :type insert_id: string or :class:`NoneType`
+ :param insert_id: (optional) unique ID for log entry.
+
+ :type severity: string or :class:`NoneType`
+ :param severity: (optional) severity of event being logged.
+
+ :type http_request: dict or :class:`NoneType`
+ :param http_request: (optional) info about HTTP request associated with
+ the entry.
+ """
+ self.entries.append(
+ ('proto',message,labels,insert_id,severity,http_request))
+
+
[docs]defcommit(self,client=None):
+ """Send saved log entries as a single API call.
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current batch.
+ """
+ ifclientisNone:
+ client=self.client
+
+ kwargs={
+ 'logger_name':self.logger.path,
+ 'resource':{'type':'global'},
+ }
+ ifself.logger.labelsisnotNone:
+ kwargs['labels']=self.logger.labels
+
+ entries=[]
+ forentry_type,entry,labels,iid,severity,http_reqinself.entries:
+ ifentry_type=='text':
+ info={'textPayload':entry}
+ elifentry_type=='struct':
+ info={'jsonPayload':entry}
+ elifentry_type=='proto':
+ as_json_str=MessageToJson(entry)
+ as_json=json.loads(as_json_str)
+ info={'protoPayload':as_json}
+ else:
+ raiseValueError('Unknown entry type: %s'%(entry_type,))
+ iflabelsisnotNone:
+ info['labels']=labels
+ ifiidisnotNone:
+ info['insertId']=iid
+ ifseverityisnotNone:
+ info['severity']=severity
+ ifhttp_reqisnotNone:
+ info['httpRequest']=http_req
+ entries.append(info)
+
+ client.logging_api.write_entries(entries,**kwargs)
+ delself.entries[:]
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define Stackdriver Logging API Metrics."""
+
+fromgcloud.exceptionsimportNotFound
+
+
+
[docs]classMetric(object):
+ """Metrics represent named filters for log entries.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics
+
+ :type name: string
+ :param name: the name of the metric
+
+ :type filter_: string
+ :param filter_: the advanced logs filter expression defining the entries
+ tracked by the metric. If not passed, the instance should
+ already exist, to be refreshed via :meth:`reload`.
+
+ :type client: :class:`gcloud.logging.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the metric (which requires a project).
+
+ :type description: string
+ :param description: an optional description of the metric.
+ """
+ def__init__(self,name,filter_=None,client=None,description=''):
+ self.name=name
+ self._client=client
+ self.filter_=filter_
+ self.description=description
+
+ @property
+ defclient(self):
+ """Clent bound to the logger."""
+ returnself._client
+
+ @property
+ defproject(self):
+ """Project bound to the logger."""
+ returnself._client.project
+
+ @property
+ deffull_name(self):
+ """Fully-qualified name used in metric APIs"""
+ return'projects/%s/metrics/%s'%(self.project,self.name)
+
+ @property
+ defpath(self):
+ """URL path for the metric's APIs"""
+ return'/%s'%(self.full_name,)
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,client):
+ """Factory: construct a metric given its API representation
+
+ :type resource: dict
+ :param resource: metric resource representation returned from the API
+
+ :type client: :class:`gcloud.logging.client.Client`
+ :param client: Client which holds credentials and project
+ configuration for the metric.
+
+ :rtype: :class:`gcloud.logging.metric.Metric`
+ :returns: Metric parsed from ``resource``.
+ """
+ metric_name=resource['name']
+ filter_=resource['filter']
+ description=resource.get('description','')
+ returncls(metric_name,filter_,client=client,
+ description=description)
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current metric.
+
+ :rtype: :class:`gcloud.logging.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self._client
+ returnclient
+
+
[docs]defcreate(self,client=None):
+ """API call: create the metric via a PUT request
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current metric.
+ """
+ client=self._require_client(client)
+ client.metrics_api.metric_create(
+ self.project,self.name,self.filter_,self.description)
+
+
[docs]defexists(self,client=None):
+ """API call: test for the existence of the metric via a GET request
+
+ See
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current metric.
+
+ :rtype: bool
+ :returns: Boolean indicating existence of the metric.
+ """
+ client=self._require_client(client)
+
+ try:
+ client.metrics_api.metric_get(self.project,self.name)
+ exceptNotFound:
+ returnFalse
+ else:
+ returnTrue
+
+
[docs]defreload(self,client=None):
+ """API call: sync local metric configuration via a GET request
+
+ See
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current metric.
+ """
+ client=self._require_client(client)
+ data=client.metrics_api.metric_get(self.project,self.name)
+ self.description=data.get('description','')
+ self.filter_=data['filter']
+
+
[docs]defupdate(self,client=None):
+ """API call: update metric configuration via a PUT request
+
+ See
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current metric.
+ """
+ client=self._require_client(client)
+ client.metrics_api.metric_update(
+ self.project,self.name,self.filter_,self.description)
+
+
[docs]defdelete(self,client=None):
+ """API call: delete a metric via a DELETE request
+
+ See
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current metric.
+ """
+ client=self._require_client(client)
+ client.metrics_api.metric_delete(self.project,self.name)
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define Stackdriver Logging API Sinks."""
+
+fromgcloud.exceptionsimportNotFound
+
+
+
[docs]classSink(object):
+ """Sinks represent filtered exports for log entries.
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks
+
+ :type name: string
+ :param name: the name of the sink
+
+ :type filter_: string
+ :param filter_: the advanced logs filter expression defining the entries
+ exported by the sink. If not passed, the instance should
+ already exist, to be refreshed via :meth:`reload`.
+
+ :type destination: string
+ :param destination: destination URI for the entries exported by the sink.
+ If not passed, the instance should already exist, to
+ be refreshed via :meth:`reload`.
+
+ :type client: :class:`gcloud.logging.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the sink (which requires a project).
+ """
+ def__init__(self,name,filter_=None,destination=None,client=None):
+ self.name=name
+ self.filter_=filter_
+ self.destination=destination
+ self._client=client
+
+ @property
+ defclient(self):
+ """Clent bound to the sink."""
+ returnself._client
+
+ @property
+ defproject(self):
+ """Project bound to the sink."""
+ returnself._client.project
+
+ @property
+ deffull_name(self):
+ """Fully-qualified name used in sink APIs"""
+ return'projects/%s/sinks/%s'%(self.project,self.name)
+
+ @property
+ defpath(self):
+ """URL path for the sink's APIs"""
+ return'/%s'%(self.full_name)
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,client):
+ """Factory: construct a sink given its API representation
+
+ :type resource: dict
+ :param resource: sink resource representation returned from the API
+
+ :type client: :class:`gcloud.logging.client.Client`
+ :param client: Client which holds credentials and project
+ configuration for the sink.
+
+ :rtype: :class:`gcloud.logging.sink.Sink`
+ :returns: Sink parsed from ``resource``.
+ :raises: :class:`ValueError` if ``client`` is not ``None`` and the
+ project from the resource does not agree with the project
+ from the client.
+ """
+ sink_name=resource['name']
+ filter_=resource['filter']
+ destination=resource['destination']
+ returncls(sink_name,filter_,destination,client=client)
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current sink.
+
+ :rtype: :class:`gcloud.logging.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self._client
+ returnclient
+
+
[docs]defcreate(self,client=None):
+ """API call: create the sink via a PUT request
+
+ See:
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current sink.
+ """
+ client=self._require_client(client)
+ client.sinks_api.sink_create(
+ self.project,self.name,self.filter_,self.destination)
+
+
[docs]defexists(self,client=None):
+ """API call: test for the existence of the sink via a GET request
+
+ See
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current sink.
+
+ :rtype: bool
+ :returns: Boolean indicating existence of the sink.
+ """
+ client=self._require_client(client)
+
+ try:
+ client.sinks_api.sink_get(self.project,self.name)
+ exceptNotFound:
+ returnFalse
+ else:
+ returnTrue
+
+
[docs]defreload(self,client=None):
+ """API call: sync local sink configuration via a GET request
+
+ See
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current sink.
+ """
+ client=self._require_client(client)
+ data=client.sinks_api.sink_get(self.project,self.name)
+ self.filter_=data['filter']
+ self.destination=data['destination']
+
+
[docs]defupdate(self,client=None):
+ """API call: update sink configuration via a PUT request
+
+ See
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current sink.
+ """
+ client=self._require_client(client)
+ client.sinks_api.sink_update(
+ self.project,self.name,self.filter_,self.destination)
+
+
[docs]defdelete(self,client=None):
+ """API call: delete a sink via a DELETE request
+
+ See
+ https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete
+
+ :type client: :class:`gcloud.logging.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current sink.
+ """
+ client=self._require_client(client)
+ client.sinks_api.sink_delete(self.project,self.name)
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the `Google Stackdriver Monitoring API (V3)`_.
+
+Example::
+
+ >>> from gcloud import monitoring
+ >>> client = monitoring.Client()
+ >>> query = client.query(minutes=5)
+ >>> print(query.as_dataframe()) # Requires pandas.
+
+At present, the client supports querying of time series, metric descriptors,
+and monitored resource descriptors.
+
+.. _Google Stackdriver Monitoring API (V3):
+ https://cloud.google.com/monitoring/api/v3/
+"""
+
+fromgcloud.clientimportJSONClient
+fromgcloud.monitoring.connectionimportConnection
+fromgcloud.monitoring.metricimportMetricDescriptor
+fromgcloud.monitoring.metricimportMetricKind
+fromgcloud.monitoring.metricimportValueType
+fromgcloud.monitoring.queryimportQuery
+fromgcloud.monitoring.resourceimportResourceDescriptor
+
+
+
[docs]classClient(JSONClient):
+ """Client to bundle configuration needed for API requests.
+
+ :type project: string
+ :param project: The target project. If not passed, falls back to the
+ default inferred from the environment.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for the connection
+ owned by this client. If not passed (and if no ``http``
+ object is passed), falls back to the default inferred
+ from the environment.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``
+ :param http: An optional HTTP object to make requests. If not passed, an
+ ``http`` object is created that is bound to the
+ ``credentials`` for the current object.
+ """
+
+ _connection_class=Connection
+
+
[docs]defquery(self,
+ metric_type=Query.DEFAULT_METRIC_TYPE,
+ end_time=None,
+ days=0,hours=0,minutes=0):
+ """Construct a query object for retrieving metric data.
+
+ Example::
+
+ >>> query = client.query(minutes=5)
+ >>> print(query.as_dataframe()) # Requires pandas.
+
+ :type metric_type: string
+ :param metric_type: The metric type name. The default value is
+ :data:`Query.DEFAULT_METRIC_TYPE
+ <gcloud.monitoring.query.Query.DEFAULT_METRIC_TYPE>`,
+ but please note that this default value is provided only for
+ demonstration purposes and is subject to change. See the
+ `supported metrics`_.
+
+ :type end_time: :class:`datetime.datetime` or None
+ :param end_time: The end time (inclusive) of the time interval
+ for which results should be returned, as a datetime object.
+ The default is the start of the current minute.
+
+ The start time (exclusive) is determined by combining the
+ values of ``days``, ``hours``, and ``minutes``, and
+ subtracting the resulting duration from the end time.
+
+ It is also allowed to omit the end time and duration here,
+ in which case
+ :meth:`~gcloud.monitoring.query.Query.select_interval`
+ must be called before the query is executed.
+
+ :type days: integer
+ :param days: The number of days in the time interval.
+
+ :type hours: integer
+ :param hours: The number of hours in the time interval.
+
+ :type minutes: integer
+ :param minutes: The number of minutes in the time interval.
+
+ :rtype: :class:`~gcloud.monitoring.query.Query`
+ :returns: The query object.
+
+ :raises: :exc:`ValueError` if ``end_time`` is specified but
+ ``days``, ``hours``, and ``minutes`` are all zero.
+ If you really want to specify a point in time, use
+ :meth:`~gcloud.monitoring.query.Query.select_interval`.
+
+ .. _supported metrics: https://cloud.google.com/monitoring/api/metrics
+ """
+ returnQuery(self,metric_type,
+ end_time=end_time,
+ days=days,hours=hours,minutes=minutes)
+
+
[docs]defmetric_descriptor(self,type_,
+ metric_kind=MetricKind.METRIC_KIND_UNSPECIFIED,
+ value_type=ValueType.VALUE_TYPE_UNSPECIFIED,
+ labels=(),unit='',description='',display_name=''):
+ """Construct a metric descriptor object.
+
+ Metric descriptors specify the schema for a particular metric type.
+
+ This factory method is used most often in conjunction with the metric
+ descriptor :meth:`~gcloud.monitoring.metric.MetricDescriptor.create`
+ method to define custom metrics::
+
+ >>> descriptor = client.metric_descriptor(
+ ... 'custom.googleapis.com/my_metric',
+ ... metric_kind=MetricKind.GAUGE,
+ ... value_type=ValueType.DOUBLE,
+ ... description='This is a simple example of a custom metric.')
+ >>> descriptor.create()
+
+ Here is an example where the custom metric is parameterized by a
+ metric label::
+
+ >>> label = LabelDescriptor('response_code', LabelValueType.INT64,
+ ... description='HTTP status code')
+ >>> descriptor = client.metric_descriptor(
+ ... 'custom.googleapis.com/my_app/response_count',
+ ... metric_kind=MetricKind.CUMULATIVE,
+ ... value_type=ValueType.INT64,
+ ... labels=[label],
+ ... description='Cumulative count of HTTP responses.')
+ >>> descriptor.create()
+
+ :type type_: string
+ :param type_:
+ The metric type including a DNS name prefix. For example:
+ ``"custom.googleapis.com/my_metric"``
+
+ :type metric_kind: string
+ :param metric_kind:
+ The kind of measurement. It must be one of
+ :data:`MetricKind.GAUGE`, :data:`MetricKind.DELTA`,
+ or :data:`MetricKind.CUMULATIVE`.
+ See :class:`~gcloud.monitoring.metric.MetricKind`.
+
+ :type value_type: string
+ :param value_type:
+ The value type of the metric. It must be one of
+ :data:`ValueType.BOOL`, :data:`ValueType.INT64`,
+ :data:`ValueType.DOUBLE`, :data:`ValueType.STRING`,
+ or :data:`ValueType.DISTRIBUTION`.
+ See :class:`ValueType`.
+
+ :type labels: list of :class:`~gcloud.monitoring.label.LabelDescriptor`
+ :param labels:
+ A sequence of zero or more label descriptors specifying the labels
+ used to identify a specific instance of this metric.
+
+ :type unit: string
+ :param unit: An optional unit in which the metric value is reported.
+
+ :type description: string
+ :param description: An optional detailed description of the metric.
+
+ :type display_name: string
+ :param display_name: An optional concise name for the metric.
+
+ :rtype: :class:`MetricDescriptor`
+ :returns: The metric descriptor created with the passed-in arguments.
+ """
+ returnMetricDescriptor(
+ self,type_,
+ metric_kind=metric_kind,
+ value_type=value_type,
+ labels=labels,
+ unit=unit,
+ description=description,
+ display_name=display_name,
+ )
+
+
[docs]deffetch_metric_descriptor(self,metric_type):
+ """Look up a metric descriptor by type.
+
+ Example::
+
+ >>> METRIC = 'compute.googleapis.com/instance/cpu/utilization'
+ >>> print(client.fetch_metric_descriptor(METRIC))
+
+ :type metric_type: string
+ :param metric_type: The metric type name.
+
+ :rtype: :class:`~gcloud.monitoring.metric.MetricDescriptor`
+ :returns: The metric descriptor instance.
+
+ :raises: :class:`gcloud.exceptions.NotFound` if the metric descriptor
+ is not found.
+ """
+ returnMetricDescriptor._fetch(self,metric_type)
+
+
[docs]deflist_metric_descriptors(self,filter_string=None,type_prefix=None):
+ """List all metric descriptors for the project.
+
+ Examples::
+
+ >>> for descriptor in client.list_metric_descriptors():
+ ... print(descriptor.type)
+
+ >>> for descriptor in client.list_metric_descriptors(
+ ... type_prefix='custom.'):
+ ... print(descriptor.type)
+
+ :type filter_string: string or None
+ :param filter_string:
+ An optional filter expression describing the metric descriptors
+ to be returned. See the `filter documentation`_.
+
+ :type type_prefix: string or None
+ :param type_prefix: An optional prefix constraining the selected
+ metric types. This adds ``metric.type = starts_with("<prefix>")``
+ to the filter.
+
+ :rtype: list of :class:`~gcloud.monitoring.metric.MetricDescriptor`
+ :returns: A list of metric descriptor instances.
+
+ .. _filter documentation:
+ https://cloud.google.com/monitoring/api/v3/filters
+ """
+ returnMetricDescriptor._list(self,filter_string,
+ type_prefix=type_prefix)
+
+
[docs]deffetch_resource_descriptor(self,resource_type):
+ """Look up a monitored resource descriptor by type.
+
+ Example::
+
+ >>> print(client.fetch_resource_descriptor('gce_instance'))
+
+ :type resource_type: string
+ :param resource_type: The resource type name.
+
+ :rtype: :class:`~gcloud.monitoring.resource.ResourceDescriptor`
+ :returns: The resource descriptor instance.
+
+ :raises: :class:`gcloud.exceptions.NotFound` if the resource descriptor
+ is not found.
+ """
+ returnResourceDescriptor._fetch(self,resource_type)
+
+
[docs]deflist_resource_descriptors(self,filter_string=None):
+ """List all monitored resource descriptors for the project.
+
+ Example::
+
+ >>> for descriptor in client.list_resource_descriptors():
+ ... print(descriptor.type)
+
+ :type filter_string: string or None
+ :param filter_string:
+ An optional filter expression describing the resource descriptors
+ to be returned. See the `filter documentation`_.
+
+ :rtype: list of :class:`~gcloud.monitoring.resource.ResourceDescriptor`
+ :returns: A list of resource descriptor instances.
+
+ .. _filter documentation:
+ https://cloud.google.com/monitoring/api/v3/filters
+ """
+ returnResourceDescriptor._list(self,filter_string)
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with Stackdriver Monitoring connections."""
+
+fromgcloudimportconnectionasbase_connection
+
+
+
[docs]classConnection(base_connection.JSONConnection):
+ """A connection to Google Stackdriver Monitoring via the JSON REST API.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials`
+ :param credentials: (Optional) The OAuth2 Credentials to use for this
+ connection.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``
+ :param http: (Optional) HTTP object to make requests.
+
+ :type api_base_url: string
+ :param api_base_url: The base of the API call URL. Defaults to the value
+ :attr:`Connection.API_BASE_URL`.
+ """
+
+ API_BASE_URL='https://monitoring.googleapis.com'
+ """The base of the API call URL."""
+
+ API_VERSION='v3'
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE='{api_base_url}/{api_version}{path}'
+ """A template for the URL of a particular API call."""
+
+ SCOPE=('https://www.googleapis.com/auth/monitoring.read',
+ 'https://www.googleapis.com/auth/monitoring',
+ 'https://www.googleapis.com/auth/cloud-platform')
+ """The scopes required for authenticating as a Monitoring consumer."""
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Label Descriptors for the `Google Monitoring API (V3)`_.
+
+.. _Google Monitoring API (V3):
+ https://cloud.google.com/monitoring/api/ref_v3/rest/v3/LabelDescriptor
+"""
+
+
+
[docs]classLabelValueType(object):
+ """Allowed values for the `type of a label`_.
+
+ .. _type of a label:
+ https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
+ LabelDescriptor#ValueType
+ """
+
+ STRING='STRING'
+ BOOL='BOOL'
+ INT64='INT64'
+
+
+
[docs]classLabelDescriptor(object):
+ """Schema specification and documentation for a single label.
+
+ :type key: string
+ :param key: The name of the label.
+
+ :type value_type: string
+ :param value_type:
+ The type of the label. It must be one of :data:`LabelValueType.STRING`,
+ :data:`LabelValueType.BOOL`, or :data:`LabelValueType.INT64`.
+ See :class:`LabelValueType`.
+
+ :type description: string
+ :param description: A human-readable description for the label.
+ """
+
+ def__init__(self,key,value_type=LabelValueType.STRING,description=''):
+ self.key=key
+ self.value_type=value_type
+ self.description=description
+
+ @classmethod
+ def_from_dict(cls,info):
+ """Construct a label descriptor from the parsed JSON representation.
+
+ :type info: dict
+ :param info:
+ A ``dict`` parsed from the JSON wire-format representation.
+
+ :rtype: :class:`LabelDescriptor`
+ :returns: A label descriptor.
+ """
+ returncls(
+ info['key'],
+ info.get('valueType',LabelValueType.STRING),
+ info.get('description',''),
+ )
+
+ def_to_dict(self):
+ """Build a dictionary ready to be serialized to the JSON wire format.
+
+ :rtype: dict
+ :returns: A dictionary.
+ """
+ info={
+ 'key':self.key,
+ 'valueType':self.value_type,
+ }
+
+ ifself.description:
+ info['description']=self.description
+
+ returninfo
+
+ def__eq__(self,other):
+ returnself.__dict__==other.__dict__
+
+ def__ne__(self,other):
+ returnself.__dict__!=other.__dict__
+
+ def__repr__(self):
+ return(
+ 'LabelDescriptor(key={key!r}, value_type={value_type!r},'
+ ' description={description!r})'
+ ).format(**self.__dict__)
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Metric Descriptors for the `Google Stackdriver Monitoring API (V3)`_.
+
+.. _Google Stackdriver Monitoring API (V3):
+ https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
+ projects.metricDescriptors
+"""
+
+importcollections
+
+fromgcloud.monitoring.labelimportLabelDescriptor
+
+
+
[docs]classMetricKind(object):
+ """Choices for the `kind of measurement`_.
+
+ .. _kind of measurement:
+ https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
+ projects.metricDescriptors#MetricKind
+ """
+
+ METRIC_KIND_UNSPECIFIED='METRIC_KIND_UNSPECIFIED'
+ """.. note:: An unspecified kind is not allowed in metric descriptors."""
+
+ GAUGE='GAUGE'
+ DELTA='DELTA'
+ CUMULATIVE='CUMULATIVE'
+
+
+
[docs]classValueType(object):
+ """Choices for the `metric value type`_.
+
+ .. _metric value type:
+ https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
+ projects.metricDescriptors#ValueType
+ """
+
+ VALUE_TYPE_UNSPECIFIED='VALUE_TYPE_UNSPECIFIED'
+ """.. note:: An unspecified type is not allowed in metric descriptors."""
+
+ BOOL='BOOL'
+ INT64='INT64'
+ DOUBLE='DOUBLE'
+ STRING='STRING'
+ DISTRIBUTION='DISTRIBUTION'
+
+
+
[docs]classMetricDescriptor(object):
+ """Specification of a metric type and its schema.
+
+ The preferred way to construct a metric descriptor object is using the
+ :meth:`~gcloud.monitoring.client.Client.metric_descriptor` factory method
+ of the :class:`~gcloud.monitoring.client.Client` class.
+
+ :type client: :class:`gcloud.monitoring.client.Client`
+ :param client: A client for operating on the metric descriptor.
+
+ :type type_: string
+ :param type_:
+ The metric type including a DNS name prefix. For example:
+ ``"compute.googleapis.com/instance/cpu/utilization"``
+
+ :type metric_kind: string
+ :param metric_kind:
+ The kind of measurement. It must be one of
+ :data:`MetricKind.GAUGE`, :data:`MetricKind.DELTA`,
+ or :data:`MetricKind.CUMULATIVE`. See :class:`MetricKind`.
+
+ :type value_type: string
+ :param value_type:
+ The value type of the metric. It must be one of
+ :data:`ValueType.BOOL`, :data:`ValueType.INT64`,
+ :data:`ValueType.DOUBLE`, :data:`ValueType.STRING`,
+ or :data:`ValueType.DISTRIBUTION`.
+ See :class:`ValueType`.
+
+ :type labels: list of :class:`~gcloud.monitoring.label.LabelDescriptor`
+ :param labels:
+ A sequence of zero or more label descriptors specifying the labels
+ used to identify a specific instance of this metric.
+
+ :type unit: string
+ :param unit: An optional unit in which the metric value is reported.
+
+ :type description: string
+ :param description: An optional detailed description of the metric.
+
+ :type display_name: string
+ :param display_name: An optional concise name for the metric.
+
+ :type name: string or None
+ :param name:
+ The "resource name" of the metric descriptor. For example:
+ ``"projects/<project_id>/metricDescriptors/<type>"``. As
+ retrieved from the service, this will always be specified.
+ You can and should omit it when constructing an instance for
+ the purpose of creating a new metric descriptor.
+ """
+
+ def__init__(self,client,type_,
+ metric_kind=MetricKind.METRIC_KIND_UNSPECIFIED,
+ value_type=ValueType.VALUE_TYPE_UNSPECIFIED,
+ labels=(),
+ unit='',description='',display_name='',
+ name=None):
+ self.client=client
+ self.name=name
+ self.type=type_
+ self.labels=labels
+ self.metric_kind=metric_kind
+ self.value_type=value_type
+ self.unit=unit
+ self.description=description
+ self.display_name=display_name
+
+
[docs]defcreate(self):
+ """Create a new metric descriptor based on this object.
+
+ Example::
+
+ >>> descriptor = client.metric_descriptor(
+ ... 'custom.googleapis.com/my_metric',
+ ... metric_kind=MetricKind.GAUGE,
+ ... value_type=ValueType.DOUBLE,
+ ... description='This is a simple example of a custom metric.')
+ >>> descriptor.create()
+
+ The metric kind must not be :data:`MetricKind.METRIC_KIND_UNSPECIFIED`,
+ and the value type must not be
+ :data:`ValueType.VALUE_TYPE_UNSPECIFIED`.
+
+ The ``name`` attribute is ignored in preparing the creation request.
+ All attributes are overwritten by the values received in the response
+ (normally affecting only ``name``).
+ """
+ path='/projects/{project}/metricDescriptors/'.format(
+ project=self.client.project)
+ response=self.client.connection.api_request(method='POST',path=path,
+ data=self._to_dict())
+ self._init_from_dict(response)
+
+
[docs]defdelete(self):
+ """Delete the metric descriptor identified by this object.
+
+ Example::
+
+ >>> descriptor = client.metric_descriptor(
+ ... 'custom.googleapis.com/my_metric')
+ >>> descriptor.delete()
+
+ Only the ``client`` and ``type`` attributes are used.
+ """
+ path='/projects/{project}/metricDescriptors/{type}'.format(
+ project=self.client.project,
+ type=self.type)
+ self.client.connection.api_request(method='DELETE',path=path)
+
+ @classmethod
+ def_fetch(cls,client,metric_type):
+ """Look up a metric descriptor by type.
+
+ :type client: :class:`gcloud.monitoring.client.Client`
+ :param client: The client to use.
+
+ :type metric_type: string
+ :param metric_type: The metric type name.
+
+ :rtype: :class:`MetricDescriptor`
+ :returns: The metric descriptor instance.
+
+ :raises: :class:`gcloud.exceptions.NotFound` if the metric descriptor
+ is not found.
+ """
+ path='/projects/{project}/metricDescriptors/{type}'.format(
+ project=client.project,
+ type=metric_type)
+ info=client.connection.api_request(method='GET',path=path)
+ returncls._from_dict(client,info)
+
+ @classmethod
+ def_list(cls,client,filter_string=None,type_prefix=None):
+ """List all metric descriptors for the project.
+
+ :type client: :class:`gcloud.monitoring.client.Client`
+ :param client: The client to use.
+
+ :type filter_string: string or None
+ :param filter_string:
+ An optional filter expression describing the metric descriptors
+ to be returned. See the `filter documentation`_.
+
+ :type type_prefix: string or None
+ :param type_prefix: An optional prefix constraining the selected
+ metric types. This adds ``metric.type = starts_with("<prefix>")``
+ to the filter.
+
+ :rtype: list of :class:`MetricDescriptor`
+ :returns: A list of metric descriptor instances.
+
+ .. _filter documentation:
+ https://cloud.google.com/monitoring/api/v3/filters
+ """
+ path='/projects/{project}/metricDescriptors/'.format(
+ project=client.project)
+
+ filters=[]
+ iffilter_stringisnotNone:
+ filters.append(filter_string)
+
+ iftype_prefixisnotNone:
+ filters.append('metric.type = starts_with("{prefix}")'.format(
+ prefix=type_prefix))
+
+ descriptors=[]
+ page_token=None
+ whileTrue:
+ params={}
+
+ iffilters:
+ params['filter']=' AND '.join(filters)
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ response=client.connection.api_request(
+ method='GET',path=path,query_params=params)
+ forinfoinresponse.get('metricDescriptors',()):
+ descriptors.append(cls._from_dict(client,info))
+
+ page_token=response.get('nextPageToken')
+ ifnotpage_token:
+ break
+
+ returndescriptors
+
+ @classmethod
+ def_from_dict(cls,client,info):
+ """Construct a metric descriptor from the parsed JSON representation.
+
+ :type client: :class:`gcloud.monitoring.client.Client`
+ :param client: A client to be included in the returned object.
+
+ :type info: dict
+ :param info:
+ A ``dict`` parsed from the JSON wire-format representation.
+
+ :rtype: :class:`MetricDescriptor`
+ :returns: A metric descriptor.
+ """
+ descriptor=cls(client,None)
+ descriptor._init_from_dict(info)
+ returndescriptor
+
+ def_init_from_dict(self,info):
+ """Initialize attributes from the parsed JSON representation.
+
+ :type info: dict
+ :param info:
+ A ``dict`` parsed from the JSON wire-format representation.
+ """
+ self.name=info['name']
+ self.type=info['type']
+ self.labels=tuple(LabelDescriptor._from_dict(label)
+ forlabelininfo.get('labels',[]))
+ self.metric_kind=info['metricKind']
+ self.value_type=info['valueType']
+ self.unit=info.get('unit','')
+ self.description=info.get('description','')
+ self.display_name=info.get('displayName','')
+
+ def_to_dict(self):
+ """Build a dictionary ready to be serialized to the JSON wire format.
+
+ :rtype: dict
+ :returns: A dictionary.
+ """
+ info={
+ 'type':self.type,
+ 'metricKind':self.metric_kind,
+ 'valueType':self.value_type,
+ }
+
+ ifself.labels:
+ info['labels']=[label._to_dict()forlabelinself.labels]
+ ifself.unit:
+ info['unit']=self.unit
+ ifself.description:
+ info['description']=self.description
+ ifself.display_name:
+ info['displayName']=self.display_name
+
+ returninfo
+
+ def__repr__(self):
+ return(
+ '<MetricDescriptor:\n'
+ ' name={name!r},\n'
+ ' type={type!r},\n'
+ ' metric_kind={metric_kind!r}, value_type={value_type!r},\n'
+ ' labels={labels!r},\n'
+ ' display_name={display_name!r}, unit={unit!r},\n'
+ ' description={description!r}>'
+ ).format(**self.__dict__)
+
+
+
[docs]classMetric(collections.namedtuple('Metric','type labels')):
+ """A specific metric identified by specifying values for all labels.
+
+ :type type: string
+ :param type: The metric type name.
+
+ :type labels: dict
+ :param labels: A mapping from label names to values for all labels
+ enumerated in the associated :class:`MetricDescriptor`.
+ """
+ __slots__=()
+
+ @classmethod
+ def_from_dict(cls,info):
+ """Construct a metric object from the parsed JSON representation.
+
+ :type info: dict
+ :param info:
+ A ``dict`` parsed from the JSON wire-format representation.
+
+ :rtype: :class:`Metric`
+ :returns: A metric object.
+ """
+ returncls(
+ type=info['type'],
+ labels=info.get('labels',{}),
+ )
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Time series query for the `Google Stackdriver Monitoring API (V3)`_.
+
+.. _Google Stackdriver Monitoring API (V3):
+ https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
+ projects.timeSeries/list
+"""
+
+importcopy
+importdatetime
+importitertools
+
+importsix
+
+fromgcloud._helpersimport_datetime_to_rfc3339
+fromgcloud.monitoring._dataframeimport_build_dataframe
+fromgcloud.monitoring.timeseriesimportTimeSeries
+
+_UTCNOW=datetime.datetime.utcnow# To be replaced by tests.
+
+
+
[docs]classQuery(object):
+ """Query object for retrieving metric data.
+
+ The preferred way to construct a query object is using the
+ :meth:`~gcloud.monitoring.client.Client.query` method
+ of the :class:`~gcloud.monitoring.client.Client` class.
+
+ :type client: :class:`gcloud.monitoring.client.Client`
+ :param client: The client to use.
+
+ :type metric_type: string
+ :param metric_type: The metric type name. The default value is
+ :data:`Query.DEFAULT_METRIC_TYPE
+ <gcloud.monitoring.query.Query.DEFAULT_METRIC_TYPE>`,
+ but please note that this default value is provided only for
+ demonstration purposes and is subject to change. See the
+ `supported metrics`_.
+
+ :type end_time: :class:`datetime.datetime` or None
+ :param end_time: The end time (inclusive) of the time interval
+ for which results should be returned, as a datetime object.
+ The default is the start of the current minute.
+
+ The start time (exclusive) is determined by combining the
+ values of ``days``, ``hours``, and ``minutes``, and
+ subtracting the resulting duration from the end time.
+
+ It is also allowed to omit the end time and duration here,
+ in which case
+ :meth:`~gcloud.monitoring.query.Query.select_interval`
+ must be called before the query is executed.
+
+ :type days: integer
+ :param days: The number of days in the time interval.
+
+ :type hours: integer
+ :param hours: The number of hours in the time interval.
+
+ :type minutes: integer
+ :param minutes: The number of minutes in the time interval.
+
+ :raises: :exc:`ValueError` if ``end_time`` is specified but
+ ``days``, ``hours``, and ``minutes`` are all zero.
+ If you really want to specify a point in time, use
+ :meth:`~gcloud.monitoring.query.Query.select_interval`.
+
+ .. _supported metrics: https://cloud.google.com/monitoring/api/metrics
+ """
+
+ DEFAULT_METRIC_TYPE='compute.googleapis.com/instance/cpu/utilization'
+
+ def__init__(self,client,
+ metric_type=DEFAULT_METRIC_TYPE,
+ end_time=None,days=0,hours=0,minutes=0):
+ start_time=None
+ ifdaysorhoursorminutes:
+ ifend_timeisNone:
+ end_time=_UTCNOW().replace(second=0,microsecond=0)
+ start_time=end_time-datetime.timedelta(days=days,
+ hours=hours,
+ minutes=minutes)
+ elifend_timeisnotNone:
+ raiseValueError('Non-zero duration required for time interval.')
+
+ self._client=client
+ self._end_time=end_time
+ self._start_time=start_time
+ self._filter=_Filter(metric_type)
+
+ self._per_series_aligner=None
+ self._alignment_period_seconds=None
+ self._cross_series_reducer=None
+ self._group_by_fields=()
+
+ def__iter__(self):
+ returnself.iter()
+
+ @property
+ defmetric_type(self):
+ """The metric type name."""
+ returnself._filter.metric_type
+
+ @property
+ deffilter(self):
+ """The filter string.
+
+ This is constructed from the metric type, the resource type, and
+ selectors for the group ID, monitored projects, resource labels,
+ and metric labels.
+ """
+ returnstr(self._filter)
+
+
[docs]defselect_interval(self,end_time,start_time=None):
+ """Copy the query and set the query time interval.
+
+ Example::
+
+ import datetime
+
+ now = datetime.datetime.utcnow()
+ query = query.select_interval(
+ end_time=now,
+ start_time=now - datetime.timedelta(minutes=5))
+
+ As a convenience, you can alternatively specify the end time and
+ an interval duration when you create the query initially.
+
+ :type end_time: :class:`datetime.datetime`
+ :param end_time: The end time (inclusive) of the time interval
+ for which results should be returned, as a datetime object.
+
+ :type start_time: :class:`datetime.datetime` or None
+ :param start_time: The start time (exclusive) of the time interval
+ for which results should be returned, as a datetime object.
+ If not specified, the interval is a point in time.
+
+ :rtype: :class:`Query`
+ :returns: The new query object.
+ """
+ new_query=self.copy()
+ new_query._end_time=end_time
+ new_query._start_time=start_time
+ returnnew_query
+
+
[docs]defselect_group(self,group_id):
+ """Copy the query and add filtering by group.
+
+ Example::
+
+ query = query.select_group('1234567')
+
+ :type group_id: string
+ :param group_id: The ID of a group to filter by.
+
+ :rtype: :class:`Query`
+ :returns: The new query object.
+ """
+ new_query=self.copy()
+ new_query._filter.group_id=group_id
+ returnnew_query
+
+
[docs]defselect_projects(self,*args):
+ """Copy the query and add filtering by monitored projects.
+
+ This is only useful if the target project represents a Stackdriver
+ account containing the specified monitored projects.
+
+ Examples::
+
+ query = query.select_projects('project-1')
+ query = query.select_projects('project-1', 'project-2')
+
+ :type args: tuple
+ :param args: Project IDs limiting the resources to be included
+ in the query.
+
+ :rtype: :class:`Query`
+ :returns: The new query object.
+ """
+ new_query=self.copy()
+ new_query._filter.projects=args
+ returnnew_query
+
+
[docs]defselect_resources(self,*args,**kwargs):
+ """Copy the query and add filtering by resource labels.
+
+ Examples::
+
+ query = query.select_resources(zone='us-central1-a')
+ query = query.select_resources(zone_prefix='europe-')
+ query = query.select_resources(resource_type='gce_instance')
+
+ A keyword argument ``<label>=<value>`` ordinarily generates a filter
+ expression of the form::
+
+ resource.label.<label> = "<value>"
+
+ However, by adding ``"_prefix"`` or ``"_suffix"`` to the keyword,
+ you can specify a partial match.
+
+ ``<label>_prefix=<value>`` generates::
+
+ resource.label.<label> = starts_with("<value>")
+
+ ``<label>_suffix=<value>`` generates::
+
+ resource.label.<label> = ends_with("<value>")
+
+ As a special case, ``"resource_type"`` is treated as a special
+ pseudo-label corresponding to the filter object ``resource.type``.
+ For example, ``resource_type=<value>`` generates::
+
+ resource.type = "<value>"
+
+ See the `defined resource types`_.
+
+ .. note::
+
+ The label ``"instance_name"`` is a metric label,
+ not a resource label. You would filter on it using
+ ``select_metrics(instance_name=...)``.
+
+ :type args: tuple
+ :param args: Raw filter expression strings to include in the
+ conjunction. If just one is provided and no keyword arguments
+ are provided, it can be a disjunction.
+
+ :type kwargs: dict
+ :param kwargs: Label filters to include in the conjunction as
+ described above.
+
+ :rtype: :class:`Query`
+ :returns: The new query object.
+
+ .. _defined resource types:
+ https://cloud.google.com/monitoring/api/v3/monitored-resources
+ """
+ new_query=self.copy()
+ new_query._filter.select_resources(*args,**kwargs)
+ returnnew_query
+
+
[docs]defselect_metrics(self,*args,**kwargs):
+ """Copy the query and add filtering by metric labels.
+
+ Examples::
+
+ query = query.select_metrics(instance_name='myinstance')
+ query = query.select_metrics(instance_name_prefix='mycluster-')
+
+ A keyword argument ``<label>=<value>`` ordinarily generates a filter
+ expression of the form::
+
+ metric.label.<label> = "<value>"
+
+ However, by adding ``"_prefix"`` or ``"_suffix"`` to the keyword,
+ you can specify a partial match.
+
+ ``<label>_prefix=<value>`` generates::
+
+ metric.label.<label> = starts_with("<value>")
+
+ ``<label>_suffix=<value>`` generates::
+
+ metric.label.<label> = ends_with("<value>")
+
+ :type args: tuple
+ :param args: Raw filter expression strings to include in the
+ conjunction. If just one is provided and no keyword arguments
+ are provided, it can be a disjunction.
+
+ :type kwargs: dict
+ :param kwargs: Label filters to include in the conjunction as
+ described above.
+
+ :rtype: :class:`Query`
+ :returns: The new query object.
+ """
+ new_query=self.copy()
+ new_query._filter.select_metrics(*args,**kwargs)
+ returnnew_query
+
+
[docs]defalign(self,per_series_aligner,seconds=0,minutes=0,hours=0):
+ """Copy the query and add temporal alignment.
+
+ If ``per_series_aligner`` is not :data:`Aligner.ALIGN_NONE`, each time
+ series will contain data points only on the period boundaries.
+
+ Example::
+
+ query = query.align(Aligner.ALIGN_MEAN, minutes=5)
+
+ It is also possible to specify the aligner as a literal string::
+
+ query = query.align('ALIGN_MEAN', minutes=5)
+
+ :type per_series_aligner: string
+ :param per_series_aligner: The approach to be used to align
+ individual time series. For example: :data:`Aligner.ALIGN_MEAN`.
+ See :class:`Aligner` and the descriptions of the `supported
+ aligners`_.
+
+ :type seconds: integer
+ :param seconds: The number of seconds in the alignment period.
+
+ :type minutes: integer
+ :param minutes: The number of minutes in the alignment period.
+
+ :type hours: integer
+ :param hours: The number of hours in the alignment period.
+
+ :rtype: :class:`Query`
+ :returns: The new query object.
+
+ .. _supported aligners:
+ https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
+ projects.timeSeries/list#Aligner
+ """
+ new_query=self.copy()
+ new_query._per_series_aligner=per_series_aligner
+ new_query._alignment_period_seconds=seconds+60*(minutes+
+ 60*hours)
+ returnnew_query
+
+
[docs]defreduce(self,cross_series_reducer,*group_by_fields):
+ """Copy the query and add cross-series reduction.
+
+ Cross-series reduction combines time series by aggregating their
+ data points.
+
+ For example, you could request an aggregated time series for each
+ combination of project and zone as follows::
+
+ query = query.reduce(Reducer.REDUCE_MEAN,
+ 'resource.project_id', 'resource.zone')
+
+ :type cross_series_reducer: string
+ :param cross_series_reducer:
+ The approach to be used to combine time series. For example:
+ :data:`Reducer.REDUCE_MEAN`. See :class:`Reducer` and the
+ descriptions of the `supported reducers`_.
+
+ :type group_by_fields: strings
+ :param group_by_fields:
+ Fields to be preserved by the reduction. For example, specifying
+ just ``"resource.zone"`` will result in one time series per zone.
+ The default is to aggregate all of the time series into just one.
+
+ :rtype: :class:`Query`
+ :returns: The new query object.
+
+ .. _supported reducers:
+ https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
+ projects.timeSeries/list#Reducer
+ """
+ new_query=self.copy()
+ new_query._cross_series_reducer=cross_series_reducer
+ new_query._group_by_fields=group_by_fields
+ returnnew_query
+
+
[docs]defiter(self,headers_only=False,page_size=None):
+ """Yield all time series objects selected by the query.
+
+ The generator returned iterates over
+ :class:`~gcloud.monitoring.timeseries.TimeSeries` objects
+ containing points ordered from oldest to newest.
+
+ Note that the :class:`Query` object itself is an iterable, such that
+ the following are equivalent::
+
+ for timeseries in query:
+ ...
+
+ for timeseries in query.iter():
+ ...
+
+ :type headers_only: boolean
+ :param headers_only:
+ Whether to omit the point data from the time series objects.
+
+ :type page_size: integer or None
+ :param page_size:
+ An optional positive number specifying the maximum number of
+ points to return per page. This can be used to control how far
+ the iterator reads ahead.
+
+ :raises: :exc:`ValueError` if the query time interval has not been
+ specified.
+ """
+ # The following use of groupby() relies on equality comparison
+ # of time series as (named) tuples.
+ fortimeseries,fragmentsinitertools.groupby(
+ self._iter_fragments(headers_only,page_size),
+ lambdafragment:fragment.header()):
+ points=list(itertools.chain.from_iterable(
+ fragment.pointsforfragmentinfragments))
+ points.reverse()# Order from oldest to newest.
+ yieldtimeseries.header(points=points)
+
+ def_iter_fragments(self,headers_only=False,page_size=None):
+ """Yield all time series fragments selected by the query.
+
+ There may be multiple fragments per time series. These will be
+ contiguous.
+
+ The parameters and return value are as for :meth:`Query.iter`.
+ """
+ ifself._end_timeisNone:
+ raiseValueError('Query time interval not specified.')
+
+ path='/projects/{project}/timeSeries/'.format(
+ project=self._client.project)
+
+ page_token=None
+ whileTrue:
+ params=list(self._build_query_params(
+ headers_only=headers_only,
+ page_size=page_size,
+ page_token=page_token,
+ ))
+ response=self._client.connection.api_request(
+ method='GET',
+ path=path,
+ query_params=params,
+ )
+ forinfoinresponse.get('timeSeries',()):
+ yieldTimeSeries._from_dict(info)
+
+ page_token=response.get('nextPageToken')
+ ifnotpage_token:
+ break
+
+ def_build_query_params(self,headers_only=False,
+ page_size=None,page_token=None):
+ """Yield key-value pairs for the URL query string.
+
+ We use a series of key-value pairs (suitable for passing to
+ ``urlencode``) instead of a ``dict`` to allow for repeated fields.
+
+ :type headers_only: boolean
+ :param headers_only:
+ Whether to omit the point data from the
+ :class:`~gcloud.monitoring.timeseries.TimeSeries` objects.
+
+ :type page_size: integer or None
+ :param page_size: A limit on the number of points to return per page.
+
+ :type page_token: string or None
+ :param page_token: A token to continue the retrieval.
+ """
+ yield'filter',self.filter
+
+ yield'interval.endTime',_datetime_to_rfc3339(
+ self._end_time,ignore_zone=False)
+
+ ifself._start_timeisnotNone:
+ yield'interval.startTime',_datetime_to_rfc3339(
+ self._start_time,ignore_zone=False)
+
+ ifself._per_series_alignerisnotNone:
+ yield'aggregation.perSeriesAligner',self._per_series_aligner
+
+ ifself._alignment_period_secondsisnotNone:
+ alignment_period='{period}s'.format(
+ period=self._alignment_period_seconds)
+ yield'aggregation.alignmentPeriod',alignment_period
+
+ ifself._cross_series_reducerisnotNone:
+ yield('aggregation.crossSeriesReducer',
+ self._cross_series_reducer)
+
+ forfieldinself._group_by_fields:
+ yield'aggregation.groupByFields',field
+
+ ifheaders_only:
+ yield'view','HEADERS'
+
+ ifpage_sizeisnotNone:
+ yield'pageSize',page_size
+
+ ifpage_tokenisnotNone:
+ yield'pageToken',page_token
+
+
[docs]defas_dataframe(self,label=None,labels=None):
+ """Return all the selected time series as a :mod:`pandas` dataframe.
+
+ .. note::
+
+ Use of this method requires that you have :mod:`pandas` installed.
+
+ Examples::
+
+ # Generate a dataframe with a multi-level column header including
+ # the resource type and all available resource and metric labels.
+ # This can be useful for seeing what labels are available.
+ dataframe = query.as_dataframe()
+
+ # Generate a dataframe using a particular label for the column
+ # names.
+ dataframe = query.as_dataframe(label='instance_name')
+
+ # Generate a dataframe with a multi-level column header.
+ dataframe = query.as_dataframe(labels=['zone', 'instance_name'])
+
+ # Generate a dataframe with a multi-level column header, assuming
+ # the metric is issued by more than one type of resource.
+ dataframe = query.as_dataframe(
+ labels=['resource_type', 'instance_id'])
+
+ :type label: string or None
+ :param label: The label name to use for the dataframe header.
+ This can be the name of a resource label or metric label
+ (e.g., ``"instance_name"``), or the string ``"resource_type"``.
+
+ :type labels: list of strings, or None
+ :param labels: A list or tuple of label names to use for the dataframe
+ header. If more than one label name is provided, the resulting
+ dataframe will have a multi-level column header. Providing values
+ for both ``label`` and ``labels`` is an error.
+
+ :rtype: :class:`pandas.DataFrame`
+ :returns: A dataframe where each column represents one time series.
+ """
+ return_build_dataframe(self,label,labels)# pragma: NO COVER
+
+
[docs]defcopy(self):
+ """Copy the query object.
+
+ :rtype: :class:`Query`
+ :returns: The new query object.
+ """
+ # Using copy.deepcopy() would be appropriate, except that we want
+ # to copy self._client only as a reference.
+ new_query=copy.copy(self)
+ new_query._filter=copy.copy(self._filter)
+ returnnew_query
+
+
+class_Filter(object):
+ """Helper for assembling a filter string."""
+
+ def__init__(self,metric_type):
+ self.metric_type=metric_type
+ self.group_id=None
+ self.projects=()
+ self.resource_label_filter=None
+ self.metric_label_filter=None
+
+ defselect_resources(self,*args,**kwargs):
+ """Select by resource labels.
+
+ See :meth:`Query.select_resources`.
+ """
+ self.resource_label_filter=_build_label_filter('resource',
+ *args,**kwargs)
+
+ defselect_metrics(self,*args,**kwargs):
+ """Select by metric labels.
+
+ See :meth:`Query.select_metrics`.
+ """
+ self.metric_label_filter=_build_label_filter('metric',
+ *args,**kwargs)
+
+ def__str__(self):
+ filters=['metric.type = "{type}"'.format(type=self.metric_type)]
+ ifself.group_idisnotNone:
+ filters.append('group.id = "{id}"'.format(id=self.group_id))
+ ifself.projects:
+ filters.append(
+ ' OR '.join('project = "{project}"'.format(project=project)
+ forprojectinself.projects))
+ ifself.resource_label_filter:
+ filters.append(self.resource_label_filter)
+ ifself.metric_label_filter:
+ filters.append(self.metric_label_filter)
+
+ # Parentheses are never actually required, because OR binds more
+ # tightly than AND in the Monitoring API's filter syntax.
+ return' AND '.join(filters)
+
+
+def_build_label_filter(category,*args,**kwargs):
+ """Construct a filter string to filter on metric or resource labels."""
+ terms=list(args)
+ forkey,valueinsix.iteritems(kwargs):
+ ifvalueisNone:
+ continue
+
+ suffix=None
+ ifkey.endswith('_prefix')orkey.endswith('_suffix'):
+ key,suffix=key.rsplit('_',1)
+
+ ifcategory=='resource'andkey=='resource_type':
+ key='resource.type'
+ else:
+ key='.'.join((category,'label',key))
+
+ ifsuffix=='prefix':
+ term='{key} = starts_with("{value}")'
+ elifsuffix=='suffix':
+ term='{key} = ends_with("{value}")'
+ else:
+ term='{key} = "{value}"'
+
+ terms.append(term.format(key=key,value=value))
+
+ return' AND '.join(sorted(terms))
+
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Monitored Resource Descriptors for the
+`Google Stackdriver Monitoring API (V3)`_.
+
+.. _Google Stackdriver Monitoring API (V3):
+ https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
+ projects.monitoredResourceDescriptors
+"""
+
+importcollections
+
+fromgcloud.monitoring.labelimportLabelDescriptor
+
+
+
[docs]classResourceDescriptor(object):
+ """Specification of a monitored resource type and its schema.
+
+ :type name: string
+ :param name:
+ The "resource name" of the monitored resource descriptor:
+ ``"projects/<project_id>/monitoredResourceDescriptors/<type>"``
+
+ :type type_: string
+ :param type_:
+ The monitored resource type. For example: ``"gce_instance"``
+
+ :type display_name: string
+ :param display_name:
+ A concise name that might be displayed in user interfaces.
+
+ :type description: string
+ :param description:
+ A detailed description that might be used in documentation.
+
+ :type labels: list of :class:`~gcloud.monitoring.label.LabelDescriptor`
+ :param labels:
+ A sequence of label descriptors specifying the labels used
+ to identify a specific instance of this monitored resource.
+ """
+
+ def__init__(self,name,type_,display_name,description,labels):
+ self.name=name
+ self.type=type_
+ self.display_name=display_name
+ self.description=description
+ self.labels=labels
+
+ @classmethod
+ def_fetch(cls,client,resource_type):
+ """Look up a monitored resource descriptor by type.
+
+ :type client: :class:`gcloud.monitoring.client.Client`
+ :param client: The client to use.
+
+ :type resource_type: string
+ :param resource_type: The resource type name.
+
+ :rtype: :class:`ResourceDescriptor`
+ :returns: The resource descriptor instance.
+
+ :raises: :class:`gcloud.exceptions.NotFound` if the resource descriptor
+ is not found.
+ """
+ path=('/projects/{project}/monitoredResourceDescriptors/{type}'
+ .format(project=client.project,
+ type=resource_type))
+ info=client.connection.api_request(method='GET',path=path)
+ returncls._from_dict(info)
+
+ @classmethod
+ def_list(cls,client,filter_string=None):
+ """List all monitored resource descriptors for the project.
+
+ :type client: :class:`gcloud.monitoring.client.Client`
+ :param client: The client to use.
+
+ :type filter_string: string or None
+ :param filter_string:
+ An optional filter expression describing the resource descriptors
+ to be returned. See the `filter documentation`_.
+
+ :rtype: list of :class:`ResourceDescriptor`
+ :returns: A list of resource descriptor instances.
+
+ .. _filter documentation:
+ https://cloud.google.com/monitoring/api/v3/filters
+ """
+ path='/projects/{project}/monitoredResourceDescriptors/'.format(
+ project=client.project)
+
+ descriptors=[]
+
+ page_token=None
+ whileTrue:
+ params={}
+
+ iffilter_stringisnotNone:
+ params['filter']=filter_string
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ response=client.connection.api_request(
+ method='GET',path=path,query_params=params)
+ forinfoinresponse.get('resourceDescriptors',()):
+ descriptors.append(cls._from_dict(info))
+
+ page_token=response.get('nextPageToken')
+ ifnotpage_token:
+ break
+
+ returndescriptors
+
+ @classmethod
+ def_from_dict(cls,info):
+ """Construct a resource descriptor from the parsed JSON representation.
+
+ :type info: dict
+ :param info:
+ A ``dict`` parsed from the JSON wire-format representation.
+
+ :rtype: :class:`ResourceDescriptor`
+ :returns: A resource descriptor.
+ """
+ returncls(
+ name=info['name'],
+ type_=info['type'],
+ display_name=info.get('displayName',''),
+ description=info.get('description',''),
+ labels=tuple(LabelDescriptor._from_dict(label)
+ forlabelininfo.get('labels',())),
+ )
+
+ def__repr__(self):
+ return(
+ '<ResourceDescriptor:\n'
+ ' name={name!r},\n'
+ ' type={type!r},\n'
+ ' labels={labels!r},\n'
+ ' display_name={display_name!r},\n'
+ ' description={description!r}>'
+ ).format(**self.__dict__)
+
+
+
[docs]classResource(collections.namedtuple('Resource','type labels')):
+ """A monitored resource identified by specifying values for all labels.
+
+ :type type: string
+ :param type: The resource type name.
+
+ :type labels: dict
+ :param labels: A mapping from label names to values for all labels
+ enumerated in the associated :class:`ResourceDescriptor`.
+ """
+ __slots__=()
+
+ @classmethod
+ def_from_dict(cls,info):
+ """Construct a resource object from the parsed JSON representation.
+
+ :type info: dict
+ :param info:
+ A ``dict`` parsed from the JSON wire-format representation.
+
+ :rtype: :class:`Resource`
+ :returns: A resource object.
+ """
+ returncls(
+ type=info['type'],
+ labels=info.get('labels',{}),
+ )
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Time series for the `Google Stackdriver Monitoring API (V3)`_.
+
+Features intentionally omitted from this first version of the client library:
+ * Writing time series.
+ * Natural representation of distribution values.
+
+.. _Google Stackdriver Monitoring API (V3):
+ https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries
+"""
+
+importcollections
+
+fromgcloud.monitoring.metricimportMetric
+fromgcloud.monitoring.resourceimportResource
+
+
+
[docs]classTimeSeries(collections.namedtuple(
+ 'TimeSeries','metric resource metric_kind value_type points')):
+ """A single time series of metric values.
+
+ :type metric: :class:`~gcloud.monitoring.metric.Metric`
+ :param metric: A metric object.
+
+ :type resource: :class:`~gcloud.monitoring.resource.Resource`
+ :param resource: A resource object.
+
+ :type metric_kind: string
+ :param metric_kind:
+ The kind of measurement: :data:`MetricKind.GAUGE`,
+ :data:`MetricKind.DELTA`, or :data:`MetricKind.CUMULATIVE`.
+ See :class:`~gcloud.monitoring.metric.MetricKind`.
+
+ :type value_type: string
+ :param value_type:
+ The value type of the metric: :data:`ValueType.BOOL`,
+ :data:`ValueType.INT64`, :data:`ValueType.DOUBLE`,
+ :data:`ValueType.STRING`, or :data:`ValueType.DISTRIBUTION`.
+ See :class:`~gcloud.monitoring.metric.ValueType`.
+
+ :type points: list of :class:`Point`
+ :param points: A list of point objects.
+ """
+
+ _labels=None
+
+ @property
+ deflabels(self):
+ """A single dictionary with values for all the labels.
+
+ This combines ``resource.labels`` and ``metric.labels`` and also
+ adds ``"resource_type"``.
+ """
+ ifself._labelsisNone:
+ labels={'resource_type':self.resource.type}
+ labels.update(self.resource.labels)
+ labels.update(self.metric.labels)
+ self._labels=labels
+
+ returnself._labels
+
+
[docs]defheader(self,points=None):
+ """Copy everything but the point data.
+
+ :type points: list of :class:`Point`, or None
+ :param points: An optional point list.
+
+ :rtype: :class:`TimeSeries`
+ :returns: The new time series object.
+ """
+ points=list(points)ifpointselse[]
+ returnself._replace(points=points)
+
+ @classmethod
+ def_from_dict(cls,info):
+ """Construct a time series from the parsed JSON representation.
+
+ :type info: dict
+ :param info:
+ A ``dict`` parsed from the JSON wire-format representation.
+
+ :rtype: :class:`TimeSeries`
+ :returns: A time series object.
+ """
+ metric=Metric._from_dict(info['metric'])
+ resource=Resource._from_dict(info['resource'])
+ metric_kind=info['metricKind']
+ value_type=info['valueType']
+ points=[Point._from_dict(p)forpininfo.get('points',())]
+ returncls(metric,resource,metric_kind,value_type,points)
+
+ def__repr__(self):
+ """Return a representation string with the points elided."""
+ return(
+ '<TimeSeries with {num} points:\n'
+ ' metric={metric!r},\n'
+ ' resource={resource!r},\n'
+ ' metric_kind={kind!r}, value_type={type!r}>'
+ ).format(
+ num=len(self.points),
+ metric=self.metric,
+ resource=self.resource,
+ kind=self.metric_kind,
+ type=self.value_type,
+ )
+
+
+
[docs]classPoint(collections.namedtuple('Point','end_time start_time value')):
+ """A single point in a time series.
+
+ :type end_time: string
+ :param end_time: The end time in RFC3339 UTC "Zulu" format.
+
+ :type start_time: string or None
+ :param start_time: An optional start time in RFC3339 UTC "Zulu" format.
+
+ :type value: object
+ :param value: The metric value. This can be a scalar or a distribution.
+ """
+ __slots__=()
+
+ @classmethod
+ def_from_dict(cls,info):
+ """Construct a Point from the parsed JSON representation.
+
+ :type info: dict
+ :param info:
+ A ``dict`` parsed from the JSON wire-format representation.
+
+ :rtype: :class:`Point`
+ :returns: A point object.
+ """
+ end_time=info['interval']['endTime']
+ start_time=info['interval'].get('startTime')
+ (value_type,value),=info['value'].items()
+ ifvalue_type=='int64Value':
+ value=int(value)# Convert from string.
+
+ returncls(end_time,start_time,value)
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the Google Cloud Pub/Sub API."""
+
+importos
+
+fromgcloud.clientimportJSONClient
+fromgcloud.pubsub.connectionimportConnection
+fromgcloud.pubsub.connectionimport_PublisherAPIasJSONPublisherAPI
+fromgcloud.pubsub.connectionimport_SubscriberAPIasJSONSubscriberAPI
+fromgcloud.pubsub.connectionimport_IAMPolicyAPI
+fromgcloud.pubsub.subscriptionimportSubscription
+fromgcloud.pubsub.topicimportTopic
+
+# pylint: disable=ungrouped-imports
+try:
+ fromgoogle.cloud.pubsub.v1.publisher_apiimport(
+ PublisherApiasGeneratedPublisherAPI)
+ fromgoogle.cloud.pubsub.v1.subscriber_apiimport(
+ SubscriberApiasGeneratedSubscriberAPI)
+ fromgcloud.pubsub._gaximport_PublisherAPIasGAXPublisherAPI
+ fromgcloud.pubsub._gaximport_SubscriberAPIasGAXSubscriberAPI
+exceptImportError:# pragma: NO COVER
+ _HAVE_GAX=False
+ GeneratedPublisherAPI=GAXPublisherAPI=None
+ GeneratedSubscriberAPI=GAXSubscriberAPI=None
+else:
+ _HAVE_GAX=True
+# pylint: enable=ungrouped-imports
+
+
+_USE_GAX=_HAVE_GAXand(os.environ.get('GCLOUD_ENABLE_GAX')isnotNone)
+
+
+
[docs]classClient(JSONClient):
+ """Client to bundle configuration needed for API requests.
+
+ :type project: string
+ :param project: the project which the client acts on behalf of. Will be
+ passed when creating a topic. If not passed,
+ falls back to the default inferred from the environment.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for the connection
+ owned by this client. If not passed (and if no ``http``
+ object is passed), falls back to the default inferred
+ from the environment.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests. If not passed, an
+ ``http`` object is created that is bound to the
+ ``credentials`` for the current object.
+ """
+
+ _connection_class=Connection
+ _publisher_api=_subscriber_api=_iam_policy_api=None
+
+ @property
+ defpublisher_api(self):
+ """Helper for publisher-related API calls."""
+ ifself._publisher_apiisNone:
+ if_USE_GAX:
+ generated=GeneratedPublisherAPI()
+ self._publisher_api=GAXPublisherAPI(generated)
+ else:
+ self._publisher_api=JSONPublisherAPI(self.connection)
+ returnself._publisher_api
+
+ @property
+ defsubscriber_api(self):
+ """Helper for subscriber-related API calls."""
+ ifself._subscriber_apiisNone:
+ if_USE_GAX:
+ generated=GeneratedSubscriberAPI()
+ self._subscriber_api=GAXSubscriberAPI(generated)
+ else:
+ self._subscriber_api=JSONSubscriberAPI(self.connection)
+ returnself._subscriber_api
+
+ @property
+ defiam_policy_api(self):
+ """Helper for IAM policy-related API calls."""
+ ifself._iam_policy_apiisNone:
+ self._iam_policy_api=_IAMPolicyAPI(self.connection)
+ returnself._iam_policy_api
+
+
[docs]deflist_topics(self,page_size=None,page_token=None):
+ """List topics for the project associated with this client.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/list
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START client_list_topics]
+ :end-before: [END client_list_topics]
+
+ :type page_size: int
+ :param page_size: maximum number of topics to return, If not passed,
+ defaults to a value set by the API.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of topics. If not
+ passed, the API will return the first page of
+ topics.
+
+ :rtype: tuple, (list, str)
+ :returns: list of :class:`gcloud.pubsub.topic.Topic`, plus a
+ "next page token" string: if not None, indicates that
+ more topics can be retrieved with another call (pass that
+ value as ``page_token``).
+ """
+ api=self.publisher_api
+ resources,next_token=api.list_topics(
+ self.project,page_size,page_token)
+ topics=[Topic.from_api_repr(resource,self)
+ forresourceinresources]
+ returntopics,next_token
+
+
[docs]deflist_subscriptions(self,page_size=None,page_token=None):
+ """List subscriptions for the project associated with this client.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/list
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START client_list_subscriptions]
+ :end-before: [END client_list_subscriptions]
+
+ :type page_size: int
+ :param page_size: maximum number of topics to return, If not passed,
+ defaults to a value set by the API.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of topics. If not
+ passed, the API will return the first page of
+ topics.
+
+ :rtype: tuple, (list, str)
+ :returns: list of :class:`gcloud.pubsub.subscription.Subscription`,
+ plus a "next page token" string: if not None, indicates that
+ more topics can be retrieved with another call (pass that
+ value as ``page_token``).
+ """
+ api=self.subscriber_api
+ resources,next_token=api.list_subscriptions(
+ self.project,page_size,page_token)
+ topics={}
+ subscriptions=[Subscription.from_api_repr(resource,self,
+ topics=topics)
+ forresourceinresources]
+ returnsubscriptions,next_token
+
+
[docs]deftopic(self,name,timestamp_messages=False):
+ """Creates a topic bound to the current client.
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START client_topic]
+ :end-before: [END client_topic]
+
+ :type name: string
+ :param name: the name of the topic to be constructed.
+
+ :type timestamp_messages: boolean
+ :param timestamp_messages: To be passed to ``Topic`` constructor.
+
+ :rtype: :class:`gcloud.pubsub.topic.Topic`
+ :returns: Topic created with the current client.
+ """
+ returnTopic(name,client=self,timestamp_messages=timestamp_messages)
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with gcloud pubsub connections."""
+
+importos
+
+fromgcloudimportconnectionasbase_connection
+fromgcloud.environment_varsimportPUBSUB_EMULATOR
+
+
+
[docs]classConnection(base_connection.JSONConnection):
+ """A connection to Google Cloud Pubsub via the JSON REST API.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials`
+ :param credentials: (Optional) The OAuth2 Credentials to use for this
+ connection.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: (Optional) HTTP object to make requests.
+
+ :type api_base_url: string
+ :param api_base_url: The base of the API call URL. Defaults to the value
+ :attr:`Connection.API_BASE_URL`.
+ """
+
+ API_BASE_URL='https://pubsub.googleapis.com'
+ """The base of the API call URL."""
+
+ API_VERSION='v1'
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE='{api_base_url}/{api_version}{path}'
+ """A template for the URL of a particular API call."""
+
+ SCOPE=('https://www.googleapis.com/auth/pubsub',
+ 'https://www.googleapis.com/auth/cloud-platform')
+ """The scopes required for authenticating as a Cloud Pub/Sub consumer."""
+
+ def__init__(self,credentials=None,http=None,api_base_url=None):
+ super(Connection,self).__init__(credentials=credentials,http=http)
+ ifapi_base_urlisNone:
+ emulator_host=os.getenv(PUBSUB_EMULATOR)
+ ifemulator_hostisNone:
+ api_base_url=self.__class__.API_BASE_URL
+ else:
+ api_base_url='http://'+emulator_host
+ self.api_base_url=api_base_url
+
+
[docs]defbuild_api_url(self,path,query_params=None,
+ api_base_url=None,api_version=None):
+ """Construct an API url given a few components, some optional.
+
+ Typically, you shouldn't need to use this method.
+
+ :type path: string
+ :param path: The path to the resource.
+
+ :type query_params: dict or list
+ :param query_params: A dictionary of keys and values (or list of
+ key-value pairs) to insert into the query
+ string of the URL.
+
+ :type api_base_url: string
+ :param api_base_url: The base URL for the API endpoint.
+ Typically you won't have to provide this.
+
+ :type api_version: string
+ :param api_version: The version of the API to call.
+ Typically you shouldn't provide this and instead
+ use the default for the library.
+
+ :rtype: string
+ :returns: The URL assembled from the pieces provided.
+ """
+ ifapi_base_urlisNone:
+ api_base_url=self.api_base_url
+ returnsuper(Connection,self.__class__).build_api_url(
+ path,query_params=query_params,
+ api_base_url=api_base_url,api_version=api_version)
+
+
+class_PublisherAPI(object):
+ """Helper mapping publisher-related APIs.
+
+ :type connection: :class:`Connection`
+ :param connection: the connection used to make API requests.
+ """
+
+ def__init__(self,connection):
+ self._connection=connection
+
+ deflist_topics(self,project,page_size=None,page_token=None):
+ """API call: list topics for a given project
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/list
+
+ :type project: string
+ :param project: project ID
+
+ :type page_size: int
+ :param page_size: maximum number of topics to return, If not passed,
+ defaults to a value set by the API.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of topics. If not
+ passed, the API will return the first page of
+ topics.
+
+ :rtype: tuple, (list, str)
+ :returns: list of ``Topic`` resource dicts, plus a
+ "next page token" string: if not None, indicates that
+ more topics can be retrieved with another call (pass that
+ value as ``page_token``).
+ """
+ conn=self._connection
+ params={}
+
+ ifpage_sizeisnotNone:
+ params['pageSize']=page_size
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ path='/projects/%s/topics'%(project,)
+ resp=conn.api_request(method='GET',path=path,query_params=params)
+ returnresp.get('topics',()),resp.get('nextPageToken')
+
+ deftopic_create(self,topic_path):
+ """API call: create a topic
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/create
+
+ :type topic_path: string
+ :param topic_path: the fully-qualified path of the new topic, in format
+ ``projects/<PROJECT>/topics/<TOPIC_NAME>``.
+
+ :rtype: dict
+ :returns: ``Topic`` resource returned from the API.
+ """
+ conn=self._connection
+ returnconn.api_request(method='PUT',path='/%s'%(topic_path,))
+
+ deftopic_get(self,topic_path):
+ """API call: retrieve a topic
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/get
+
+ :type topic_path: string
+ :param topic_path: the fully-qualified path of the topic, in format
+ ``projects/<PROJECT>/topics/<TOPIC_NAME>``.
+
+ :rtype: dict
+ :returns: ``Topic`` resource returned from the API.
+ """
+ conn=self._connection
+ returnconn.api_request(method='GET',path='/%s'%(topic_path,))
+
+ deftopic_delete(self,topic_path):
+ """API call: delete a topic
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/delete
+
+ :type topic_path: string
+ :param topic_path: the fully-qualified path of the topic, in format
+ ``projects/<PROJECT>/topics/<TOPIC_NAME>``.
+ """
+ conn=self._connection
+ conn.api_request(method='DELETE',path='/%s'%(topic_path,))
+
+ deftopic_publish(self,topic_path,messages):
+ """API call: publish one or more messages to a topic
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/publish
+
+ :type topic_path: string
+ :param topic_path: the fully-qualified path of the topic, in format
+ ``projects/<PROJECT>/topics/<TOPIC_NAME>``.
+
+ :type messages: list of dict
+ :param messages: messages to be published.
+
+ :rtype: list of string
+ :returns: list of opaque IDs for published messages.
+ """
+ conn=self._connection
+ data={'messages':messages}
+ response=conn.api_request(
+ method='POST',path='/%s:publish'%(topic_path,),data=data)
+ returnresponse['messageIds']
+
+ deftopic_list_subscriptions(self,topic_path,page_size=None,
+ page_token=None):
+ """API call: list subscriptions bound to a topic
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics.subscriptions/list
+
+ :type topic_path: string
+ :param topic_path: the fully-qualified path of the topic, in format
+ ``projects/<PROJECT>/topics/<TOPIC_NAME>``.
+
+ :type page_size: int
+ :param page_size: maximum number of subscriptions to return, If not
+ passed, defaults to a value set by the API.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of topics. If not
+ passed, the API will return the first page of
+ topics.
+
+ :rtype: list of strings
+ :returns: fully-qualified names of subscriptions for the supplied
+ topic.
+ """
+ conn=self._connection
+ params={}
+
+ ifpage_sizeisnotNone:
+ params['pageSize']=page_size
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ path='/%s/subscriptions'%(topic_path,)
+ resp=conn.api_request(method='GET',path=path,query_params=params)
+ returnresp.get('subscriptions',()),resp.get('nextPageToken')
+
+
+class_SubscriberAPI(object):
+ """Helper mapping subscriber-related APIs.
+
+ :type connection: :class:`Connection`
+ :param connection: the connection used to make API requests.
+ """
+
+ def__init__(self,connection):
+ self._connection=connection
+
+ deflist_subscriptions(self,project,page_size=None,page_token=None):
+ """API call: list subscriptions for a given project
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/list
+
+ :type project: string
+ :param project: project ID
+
+ :type page_size: int
+ :param page_size: maximum number of subscriptions to return, If not
+ passed, defaults to a value set by the API.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of subscriptions.
+ If not passed, the API will return the first page
+ of subscriptions.
+
+ :rtype: tuple, (list, str)
+ :returns: list of ``Subscription`` resource dicts, plus a
+ "next page token" string: if not None, indicates that
+ more subscriptions can be retrieved with another call (pass
+ that value as ``page_token``).
+ """
+ conn=self._connection
+ params={}
+
+ ifpage_sizeisnotNone:
+ params['pageSize']=page_size
+
+ ifpage_tokenisnotNone:
+ params['pageToken']=page_token
+
+ path='/projects/%s/subscriptions'%(project,)
+ resp=conn.api_request(method='GET',path=path,query_params=params)
+ returnresp.get('subscriptions',()),resp.get('nextPageToken')
+
+ defsubscription_create(self,subscription_path,topic_path,
+ ack_deadline=None,push_endpoint=None):
+ """API call: create a subscription
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/create
+
+ :type subscription_path: string
+ :param subscription_path: the fully-qualified path of the new
+ subscription, in format
+ ``projects/<PROJECT>/subscriptions/<SUB_NAME>``.
+
+ :type topic_path: string
+ :param topic_path: the fully-qualified path of the topic being
+ subscribed, in format
+ ``projects/<PROJECT>/topics/<TOPIC_NAME>``.
+
+ :type ack_deadline: int, or ``NoneType``
+ :param ack_deadline: the deadline (in seconds) by which messages pulled
+ from the back-end must be acknowledged.
+
+ :type push_endpoint: string, or ``NoneType``
+ :param push_endpoint: URL to which messages will be pushed by the
+ back-end. If not set, the application must pull
+ messages.
+
+ :rtype: dict
+ :returns: ``Subscription`` resource returned from the API.
+ """
+ conn=self._connection
+ path='/%s'%(subscription_path,)
+ resource={'topic':topic_path}
+
+ ifack_deadlineisnotNone:
+ resource['ackDeadlineSeconds']=ack_deadline
+
+ ifpush_endpointisnotNone:
+ resource['pushConfig']={'pushEndpoint':push_endpoint}
+
+ returnconn.api_request(method='PUT',path=path,data=resource)
+
+ defsubscription_get(self,subscription_path):
+ """API call: retrieve a subscription
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/get
+
+ :type subscription_path: string
+ :param subscription_path: the fully-qualified path of the subscription,
+ in format
+ ``projects/<PROJECT>/subscriptions/<SUB_NAME>``.
+
+ :rtype: dict
+ :returns: ``Subscription`` resource returned from the API.
+ """
+ conn=self._connection
+ path='/%s'%(subscription_path,)
+ returnconn.api_request(method='GET',path=path)
+
+ defsubscription_delete(self,subscription_path):
+ """API call: delete a subscription
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/delete
+
+ :type subscription_path: string
+ :param subscription_path: the fully-qualified path of the subscription,
+ in format
+ ``projects/<PROJECT>/subscriptions/<SUB_NAME>``.
+ """
+ conn=self._connection
+ path='/%s'%(subscription_path,)
+ conn.api_request(method='DELETE',path=path)
+
+ defsubscription_modify_push_config(self,subscription_path,
+ push_endpoint):
+ """API call: update push config of a subscription
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/modifyPushConfig
+
+ :type subscription_path: string
+ :param subscription_path: the fully-qualified path of the new
+ subscription, in format
+ ``projects/<PROJECT>/subscriptions/<SUB_NAME>``.
+
+ :type push_endpoint: string, or ``NoneType``
+ :param push_endpoint: URL to which messages will be pushed by the
+ back-end. If not set, the application must pull
+ messages.
+ """
+ conn=self._connection
+ path='/%s:modifyPushConfig'%(subscription_path,)
+ resource={'pushConfig':{'pushEndpoint':push_endpoint}}
+ conn.api_request(method='POST',path=path,data=resource)
+
+ defsubscription_pull(self,subscription_path,return_immediately=False,
+ max_messages=1):
+ """API call: retrieve messages for a subscription
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/modifyPushConfig
+
+ :type subscription_path: string
+ :param subscription_path: the fully-qualified path of the new
+ subscription, in format
+ ``projects/<PROJECT>/subscriptions/<SUB_NAME>``.
+
+ :type return_immediately: boolean
+ :param return_immediately: if True, the back-end returns even if no
+ messages are available; if False, the API
+ call blocks until one or more messages are
+ available.
+
+ :type max_messages: int
+ :param max_messages: the maximum number of messages to return.
+
+ :rtype: list of dict
+ :returns: the ``receivedMessages`` element of the response.
+ """
+ conn=self._connection
+ path='/%s:pull'%(subscription_path,)
+ data={
+ 'returnImmediately':return_immediately,
+ 'maxMessages':max_messages,
+ }
+ response=conn.api_request(method='POST',path=path,data=data)
+ returnresponse.get('receivedMessages',())
+
+ defsubscription_acknowledge(self,subscription_path,ack_ids):
+ """API call: acknowledge retrieved messages
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/modifyPushConfig
+
+ :type subscription_path: string
+ :param subscription_path: the fully-qualified path of the new
+ subscription, in format
+ ``projects/<PROJECT>/subscriptions/<SUB_NAME>``.
+
+ :type ack_ids: list of string
+ :param ack_ids: ack IDs of messages being acknowledged
+ """
+ conn=self._connection
+ path='/%s:acknowledge'%(subscription_path,)
+ data={
+ 'ackIds':ack_ids,
+ }
+ conn.api_request(method='POST',path=path,data=data)
+
+ defsubscription_modify_ack_deadline(self,subscription_path,ack_ids,
+ ack_deadline):
+ """API call: update ack deadline for retrieved messages
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/modifyAckDeadline
+
+ :type subscription_path: string
+ :param subscription_path: the fully-qualified path of the new
+ subscription, in format
+ ``projects/<PROJECT>/subscriptions/<SUB_NAME>``.
+
+ :type ack_ids: list of string
+ :param ack_ids: ack IDs of messages being acknowledged
+
+ :type ack_deadline: int
+ :param ack_deadline: the deadline (in seconds) by which messages pulled
+ from the back-end must be acknowledged.
+ """
+ conn=self._connection
+ path='/%s:modifyAckDeadline'%(subscription_path,)
+ data={
+ 'ackIds':ack_ids,
+ 'ackDeadlineSeconds':ack_deadline,
+ }
+ conn.api_request(method='POST',path=path,data=data)
+
+
+class_IAMPolicyAPI(object):
+ """Helper mapping IAM policy-related APIs.
+
+ :type connection: :class:`Connection`
+ :param connection: the connection used to make API requests.
+ """
+
+ def__init__(self,connection):
+ self._connection=connection
+
+ defget_iam_policy(self,target_path):
+ """API call: fetch the IAM policy for the target
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/getIamPolicy
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/getIamPolicy
+
+ :type target_path: string
+ :param target_path: the path of the target object.
+
+ :rtype: dict
+ :returns: the resource returned by the ``getIamPolicy`` API request.
+ """
+ conn=self._connection
+ path='/%s:getIamPolicy'%(target_path,)
+ returnconn.api_request(method='GET',path=path)
+
+ defset_iam_policy(self,target_path,policy):
+ """API call: update the IAM policy for the target
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/setIamPolicy
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/setIamPolicy
+
+ :type target_path: string
+ :param target_path: the path of the target object.
+
+ :type policy: dict
+ :param policy: the new policy resource.
+
+ :rtype: dict
+ :returns: the resource returned by the ``setIamPolicy`` API request.
+ """
+ conn=self._connection
+ wrapped={'policy':policy}
+ path='/%s:setIamPolicy'%(target_path,)
+ returnconn.api_request(method='POST',path=path,data=wrapped)
+
+ deftest_iam_permissions(self,target_path,permissions):
+ """API call: test permissions
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/testIamPermissions
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/testIamPermissions
+
+ :type target_path: string
+ :param target_path: the path of the target object.
+
+ :type permissions: list of string
+ :param permissions: the permissions to check
+
+ :rtype: dict
+ :returns: the resource returned by the ``getIamPolicy`` API request.
+ """
+ conn=self._connection
+ wrapped={'permissions':permissions}
+ path='/%s:testIamPermissions'%(target_path,)
+ resp=conn.api_request(method='POST',path=path,data=wrapped)
+ returnresp.get('permissions',[])
+
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""PubSub API IAM policy definitions
+
+For allowed roles / permissions, see:
+https://cloud.google.com/pubsub/access_control#permissions
+"""
+
+# Generic IAM roles
+
+OWNER_ROLE='roles/owner'
+"""Generic role implying all rights to an object."""
+
+EDITOR_ROLE='roles/editor'
+"""Generic role implying rights to modify an object."""
+
+VIEWER_ROLE='roles/viewer'
+"""Generic role implying rights to access an object."""
+
+# Pubsub-specific IAM roles
+
+PUBSUB_ADMIN_ROLE='roles/pubsub.admin'
+"""Role implying all rights to an object."""
+
+PUBSUB_EDITOR_ROLE='roles/pubsub.editor'
+"""Role implying rights to modify an object."""
+
+PUBSUB_VIEWER_ROLE='roles/pubsub.viewer'
+"""Role implying rights to access an object."""
+
+PUBSUB_PUBLISHER_ROLE='roles/pubsub.publisher'
+"""Role implying rights to publish to a topic."""
+
+PUBSUB_SUBSCRIBER_ROLE='roles/pubsub.subscriber'
+"""Role implying rights to subscribe to a topic."""
+
+
+# Pubsub-specific permissions
+
+PUBSUB_TOPICS_CONSUME='pubsub.topics.consume'
+"""Permission: consume events from a subscription."""
+
+PUBSUB_TOPICS_CREATE='pubsub.topics.create'
+"""Permission: create topics."""
+
+PUBSUB_TOPICS_DELETE='pubsub.topics.delete'
+"""Permission: delete topics."""
+
+PUBSUB_TOPICS_GET='pubsub.topics.get'
+"""Permission: retrieve topics."""
+
+PUBSUB_TOPICS_GET_IAM_POLICY='pubsub.topics.getIamPolicy'
+"""Permission: retrieve subscription IAM policies."""
+
+PUBSUB_TOPICS_LIST='pubsub.topics.list'
+"""Permission: list topics."""
+
+PUBSUB_TOPICS_SET_IAM_POLICY='pubsub.topics.setIamPolicy'
+"""Permission: update subscription IAM policies."""
+
+PUBSUB_SUBSCRIPTIONS_CONSUME='pubsub.subscriptions.consume'
+"""Permission: consume events from a subscription."""
+
+PUBSUB_SUBSCRIPTIONS_CREATE='pubsub.subscriptions.create'
+"""Permission: create subscriptions."""
+
+PUBSUB_SUBSCRIPTIONS_DELETE='pubsub.subscriptions.delete'
+"""Permission: delete subscriptions."""
+
+PUBSUB_SUBSCRIPTIONS_GET='pubsub.subscriptions.get'
+"""Permission: retrieve subscriptions."""
+
+PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY='pubsub.subscriptions.getIamPolicy'
+"""Permission: retrieve subscription IAM policies."""
+
+PUBSUB_SUBSCRIPTIONS_LIST='pubsub.subscriptions.list'
+"""Permission: list subscriptions."""
+
+PUBSUB_SUBSCRIPTIONS_SET_IAM_POLICY='pubsub.subscriptions.setIamPolicy'
+"""Permission: update subscription IAM policies."""
+
+PUBSUB_SUBSCRIPTIONS_UPDATE='pubsub.subscriptions.update'
+"""Permission: update subscriptions."""
+
+
+
[docs]classPolicy(object):
+ """Combined IAM Policy / Bindings.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/Shared.Types/Policy
+ https://cloud.google.com/pubsub/reference/rest/Shared.Types/Binding
+
+ :type etag: string
+ :param etag: ETag used to identify a unique of the policy
+
+ :type version: int
+ :param version: unique version of the policy
+ """
+ def__init__(self,etag=None,version=None):
+ self.etag=etag
+ self.version=version
+ self.owners=set()
+ self.editors=set()
+ self.viewers=set()
+ self.publishers=set()
+ self.subscribers=set()
+
+ @staticmethod
+
[docs]defuser(email):
+ """Factory method for a user member.
+
+ :type email: string
+ :param email: E-mail for this particular user.
+
+ :rtype: string
+ :returns: A member string corresponding to the given user.
+ """
+ return'user:%s'%(email,)
+
+ @staticmethod
+
[docs]defservice_account(email):
+ """Factory method for a service account member.
+
+ :type email: string
+ :param email: E-mail for this particular service account.
+
+ :rtype: string
+ :returns: A member string corresponding to the given service account.
+ """
+ return'serviceAccount:%s'%(email,)
+
+ @staticmethod
+
[docs]defgroup(email):
+ """Factory method for a group member.
+
+ :type email: string
+ :param email: An id or e-mail for this particular group.
+
+ :rtype: string
+ :returns: A member string corresponding to the given group.
+ """
+ return'group:%s'%(email,)
+
+ @staticmethod
+
[docs]defdomain(domain):
+ """Factory method for a domain member.
+
+ :type domain: string
+ :param domain: The domain for this member.
+
+ :rtype: string
+ :returns: A member string corresponding to the given domain.
+ """
+ return'domain:%s'%(domain,)
+
+ @staticmethod
+
[docs]defall_users():
+ """Factory method for a member representing all users.
+
+ :rtype: string
+ :returns: A member string representing all users.
+ """
+ return'allUsers'
+
+ @staticmethod
+
[docs]defauthenticated_users():
+ """Factory method for a member representing all authenticated users.
+
+ :rtype: string
+ :returns: A member string representing all authenticated users.
+ """
+ return'allAuthenticatedUsers'
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource):
+ """Create a policy from the resource returned from the API.
+
+ :type resource: dict
+ :param resource: resource returned from the ``getIamPolicy`` API.
+
+ :rtype: :class:`Policy`
+ :returns: the parsed policy
+ """
+ version=resource.get('version')
+ etag=resource.get('etag')
+ policy=cls(etag,version)
+ forbindinginresource.get('bindings',()):
+ role=binding['role']
+ members=set(binding['members'])
+ ifrolein(OWNER_ROLE,PUBSUB_ADMIN_ROLE):
+ policy.owners|=members
+ elifrolein(EDITOR_ROLE,PUBSUB_EDITOR_ROLE):
+ policy.editors|=members
+ elifrolein(VIEWER_ROLE,PUBSUB_VIEWER_ROLE):
+ policy.viewers|=members
+ elifrole==PUBSUB_PUBLISHER_ROLE:
+ policy.publishers|=members
+ elifrole==PUBSUB_SUBSCRIBER_ROLE:
+ policy.subscribers|=members
+ else:
+ raiseValueError('Unknown role: %s'%(role,))
+ returnpolicy
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define API Topics."""
+
+importbase64
+
+fromgcloud._helpersimport_rfc3339_to_datetime
+
+
+
[docs]classMessage(object):
+ """Messages can be published to a topic and received by subscribers.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/PubsubMessage
+
+ :type data: bytes
+ :param data: the payload of the message
+
+ :type message_id: string
+ :param message_id: An ID assigned to the message by the API.
+
+ :type attributes: dict or None
+ :param attributes: Extra metadata associated by the publisher with the
+ message.
+ """
+ _service_timestamp=None
+
+ def__init__(self,data,message_id,attributes=None):
+ self.data=data
+ self.message_id=message_id
+ self._attributes=attributes
+
+ @property
+ defattributes(self):
+ """Lazily-constructed attribute dictionary"""
+ ifself._attributesisNone:
+ self._attributes={}
+ returnself._attributes
+
+ @property
+ deftimestamp(self):
+ """Return sortable timestamp from attributes, if passed.
+
+ Allows sorting messages in publication order (assuming consistent
+ clocks across all publishers).
+
+ :rtype: :class:`datetime.datetime`
+ :returns: timestamp (in UTC timezone) parsed from RFC 3339 timestamp
+ :raises: ValueError if timestamp not in ``attributes``, or if it does
+ not match the RFC 3339 format.
+ """
+ stamp=self.attributes.get('timestamp')
+ ifstampisNone:
+ raiseValueError('No timestamp')
+ return_rfc3339_to_datetime(stamp)
+
+ @property
+ defservice_timestamp(self):
+ """Return server-set timestamp.
+
+ :rtype: string
+ :returns: timestamp (in UTC timezone) in RFC 3339 format
+ """
+ returnself._service_timestamp
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,api_repr):
+ """Factory: construct message from API representation.
+
+ :type api_repr: dict or None
+ :param api_repr: The API representation of the message
+
+ :rtype: :class:`Message`
+ :returns: The message created from the response.
+ """
+ data=base64.b64decode(api_repr.get('data',b''))
+ instance=cls(
+ data=data,message_id=api_repr['messageId'],
+ attributes=api_repr.get('attributes'))
+ instance._service_timestamp=api_repr.get('publishTimestamp')
+ returninstance
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define API Subscriptions."""
+
+fromgcloud.exceptionsimportNotFound
+fromgcloud.pubsub._helpersimporttopic_name_from_path
+fromgcloud.pubsub.iamimportPolicy
+fromgcloud.pubsub.messageimportMessage
+
+
+
[docs]classSubscription(object):
+ """Subscriptions receive messages published to their topics.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions
+
+ :type name: string
+ :param name: the name of the subscription
+
+ :type topic: :class:`gcloud.pubsub.topic.Topic` or ``NoneType``
+ :param topic: the topic to which the subscription belongs; if ``None``,
+ the subscription's topic has been deleted.
+
+ :type ack_deadline: int
+ :param ack_deadline: the deadline (in seconds) by which messages pulled
+ from the back-end must be acknowledged.
+
+ :type push_endpoint: string
+ :param push_endpoint: URL to which messages will be pushed by the back-end.
+ If not set, the application must pull messages.
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the topic.
+ """
+
+ _DELETED_TOPIC_PATH='_deleted-topic_'
+ """Value of ``projects.subscriptions.topic`` when topic has been deleted.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions#Subscription.FIELDS.topic
+ """
+
+ def__init__(self,name,topic=None,ack_deadline=None,push_endpoint=None,
+ client=None):
+
+ ifclientisNoneandtopicisNone:
+ raiseTypeError("Pass only one of 'topic' or 'client'.")
+
+ ifclientisnotNoneandtopicisnotNone:
+ raiseTypeError("Pass only one of 'topic' or 'client'.")
+
+ self.name=name
+ self.topic=topic
+ self._client=clientortopic._client
+ self._project=self._client.project
+ self.ack_deadline=ack_deadline
+ self.push_endpoint=push_endpoint
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,client,topics=None):
+ """Factory: construct a topic given its API representation
+
+ :type resource: dict
+ :param resource: topic resource representation returned from the API
+
+ :type client: :class:`gcloud.pubsub.client.Client`
+ :param client: Client which holds credentials and project
+ configuration for a topic.
+
+ :type topics: dict or None
+ :param topics: A mapping of topic names -> topics. If not passed,
+ the subscription will have a newly-created topic.
+
+ :rtype: :class:`gcloud.pubsub.subscription.Subscription`
+ :returns: Subscription parsed from ``resource``.
+ """
+ iftopicsisNone:
+ topics={}
+ topic_path=resource['topic']
+ iftopic_path==cls._DELETED_TOPIC_PATH:
+ topic=None
+ else:
+ topic=topics.get(topic_path)
+ iftopicisNone:
+ # NOTE: This duplicates behavior from Topic.from_api_repr to
+ # avoid an import cycle.
+ topic_name=topic_name_from_path(topic_path,client.project)
+ topic=topics[topic_path]=client.topic(topic_name)
+ _,_,_,name=resource['name'].split('/')
+ ack_deadline=resource.get('ackDeadlineSeconds')
+ push_config=resource.get('pushConfig',{})
+ push_endpoint=push_config.get('pushEndpoint')
+ iftopicisNone:
+ returncls(name,ack_deadline=ack_deadline,
+ push_endpoint=push_endpoint,client=client)
+ returncls(name,topic,ack_deadline,push_endpoint)
+
+ @property
+ defproject(self):
+ """Project bound to the subscription."""
+ returnself._client.project
+
+ @property
+ deffull_name(self):
+ """Fully-qualified name used in subscription APIs"""
+ return'projects/%s/subscriptions/%s'%(self.project,self.name)
+
+ @property
+ defpath(self):
+ """URL path for the subscription's APIs"""
+ return'/%s'%(self.full_name,)
+
+
[docs]defauto_ack(self,return_immediately=False,max_messages=1,client=None):
+ """:class:`AutoAck` factory
+
+ :type return_immediately: boolean
+ :param return_immediately: passed through to :meth:`Subscription.pull`
+
+ :type max_messages: int
+ :param max_messages: passed through to :meth:`Subscription.pull`
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: passed through to :meth:`Subscription.pull` and
+ :meth:`Subscription.acknowledge`.
+
+ :rtype: :class:`AutoAck`
+ :returns: the instance created for the given ``ack_id`` and ``message``
+ """
+ returnAutoAck(self,return_immediately,max_messages,client)
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the topic of the
+ current subscription.
+
+ :rtype: :class:`gcloud.pubsub.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self._client
+ returnclient
+
+
[docs]defcreate(self,client=None):
+ """API call: create the subscription via a PUT request
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/create
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START subscription_create]
+ :end-before: [END subscription_create]
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current subscription's topic.
+ """
+ client=self._require_client(client)
+ api=client.subscriber_api
+ api.subscription_create(
+ self.full_name,self.topic.full_name,self.ack_deadline,
+ self.push_endpoint)
+
+
[docs]defexists(self,client=None):
+ """API call: test existence of the subscription via a GET request
+
+ See
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/get
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START subscription_exists]
+ :end-before: [END subscription_exists]
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current subscription's topic.
+
+ :rtype: bool
+ :returns: Boolean indicating existence of the subscription.
+ """
+ client=self._require_client(client)
+ api=client.subscriber_api
+ try:
+ api.subscription_get(self.full_name)
+ exceptNotFound:
+ returnFalse
+ else:
+ returnTrue
+
+
[docs]defreload(self,client=None):
+ """API call: sync local subscription configuration via a GET request
+
+ See
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/get
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START subscription_reload]
+ :end-before: [END subscription_reload]
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current subscription's topic.
+ """
+ client=self._require_client(client)
+ api=client.subscriber_api
+ data=api.subscription_get(self.full_name)
+ self.ack_deadline=data.get('ackDeadlineSeconds')
+ push_config=data.get('pushConfig',{})
+ self.push_endpoint=push_config.get('pushEndpoint')
+
+
[docs]defdelete(self,client=None):
+ """API call: delete the subscription via a DELETE request.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/delete
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START subscription_delete]
+ :end-before: [END subscription_delete]
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current subscription's topic.
+ """
+ client=self._require_client(client)
+ api=client.subscriber_api
+ api.subscription_delete(self.full_name)
+
+
[docs]defmodify_push_configuration(self,push_endpoint,client=None):
+ """API call: update the push endpoint for the subscription.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/modifyPushConfig
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START subscription_push_pull]
+ :end-before: [END subscription_push_pull]
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START subscription_pull_push]
+ :end-before: [END subscription_pull_push]
+
+ :type push_endpoint: string
+ :param push_endpoint: URL to which messages will be pushed by the
+ back-end. If None, the application must pull
+ messages.
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current subscription's topic.
+ """
+ client=self._require_client(client)
+ api=client.subscriber_api
+ api.subscription_modify_push_config(self.full_name,push_endpoint)
+ self.push_endpoint=push_endpoint
+
+
[docs]defpull(self,return_immediately=False,max_messages=1,client=None):
+ """API call: retrieve messages for the subscription.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/pull
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START subscription_pull]
+ :end-before: [END subscription_pull]
+
+ :type return_immediately: boolean
+ :param return_immediately: if True, the back-end returns even if no
+ messages are available; if False, the API
+ call blocks until one or more messages are
+ available.
+
+ :type max_messages: int
+ :param max_messages: the maximum number of messages to return.
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current subscription's topic.
+
+ :rtype: list of (ack_id, message) tuples
+ :returns: sequence of tuples: ``ack_id`` is the ID to be used in a
+ subsequent call to :meth:`acknowledge`, and ``message``
+ is an instance of :class:`gcloud.pubsub.message.Message`.
+ """
+ client=self._require_client(client)
+ api=client.subscriber_api
+ response=api.subscription_pull(
+ self.full_name,return_immediately,max_messages)
+ return[(info['ackId'],Message.from_api_repr(info['message']))
+ forinfoinresponse]
+
+
[docs]defacknowledge(self,ack_ids,client=None):
+ """API call: acknowledge retrieved messages for the subscription.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/acknowledge
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START subscription_acknowledge]
+ :end-before: [END subscription_acknowledge]
+
+ :type ack_ids: list of string
+ :param ack_ids: ack IDs of messages being acknowledged
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current subscription's topic.
+ """
+ client=self._require_client(client)
+ api=client.subscriber_api
+ api.subscription_acknowledge(self.full_name,ack_ids)
+
+
[docs]defmodify_ack_deadline(self,ack_ids,ack_deadline,client=None):
+ """API call: update acknowledgement deadline for a retrieved message.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/modifyAckDeadline
+
+ :type ack_ids: list of string
+ :param ack_ids: ack IDs of messages being updated
+
+ :type ack_deadline: int
+ :param ack_deadline: new deadline for the message, in seconds
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current subscription's topic.
+ """
+ client=self._require_client(client)
+ api=client.subscriber_api
+ api.subscription_modify_ack_deadline(
+ self.full_name,ack_ids,ack_deadline)
+
+
[docs]defget_iam_policy(self,client=None):
+ """Fetch the IAM policy for the subscription.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/getIamPolicy
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START subscription_get_iam_policy]
+ :end-before: [END subscription_get_iam_policy]
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current subscription's topic.
+
+ :rtype: :class:`gcloud.pubsub.iam.Policy`
+ :returns: policy created from the resource returned by the
+ ``getIamPolicy`` API request.
+ """
+ client=self._require_client(client)
+ api=client.iam_policy_api
+ resp=api.get_iam_policy(self.full_name)
+ returnPolicy.from_api_repr(resp)
+
+
[docs]defset_iam_policy(self,policy,client=None):
+ """Update the IAM policy for the subscription.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/setIamPolicy
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START subscription_set_iam_policy]
+ :end-before: [END subscription_set_iam_policy]
+
+ :type policy: :class:`gcloud.pubsub.iam.Policy`
+ :param policy: the new policy, typically fetched via
+ :meth:`get_iam_policy` and updated in place.
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current subscription's topic.
+
+ :rtype: :class:`gcloud.pubsub.iam.Policy`
+ :returns: updated policy created from the resource returned by the
+ ``setIamPolicy`` API request.
+ """
+ client=self._require_client(client)
+ api=client.iam_policy_api
+ resource=policy.to_api_repr()
+ resp=api.set_iam_policy(self.full_name,resource)
+ returnPolicy.from_api_repr(resp)
+
+
[docs]defcheck_iam_permissions(self,permissions,client=None):
+ """Verify permissions allowed for the current user.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.subscriptions/testIamPermissions
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START subscription_check_iam_permissions]
+ :end-before: [END subscription_check_iam_permissions]
+
+ :type permissions: list of string
+ :param permissions: list of permissions to be tested
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current subscription's topic.
+
+ :rtype: sequence of string
+ :returns: subset of ``permissions`` allowed by current IAM policy.
+ """
+ client=self._require_client(client)
+ api=client.iam_policy_api
+ returnapi.test_iam_permissions(
+ self.full_name,list(permissions))
+
+
+
[docs]classAutoAck(dict):
+ """Wrapper for :meth:`Subscription.pull` results.
+
+ Mapping, tracks messages still-to-be-acknowledged.
+
+ When used as a context manager, acknowledges all messages still in the
+ mapping on `__exit__`. When processing the pulled messsages, application
+ code MUST delete messages from the :class:`AutoAck` mapping which are not
+ successfully processed, e.g.:
+
+ .. code-block: python
+
+ with AutoAck(subscription) as ack: # calls ``subscription.pull``
+ for ack_id, message in ack.items():
+ try:
+ do_something_with(message):
+ except:
+ del ack[ack_id]
+
+ :type subscription: :class:`Subscription`
+ :param subscription: subcription to be pulled.
+
+ :type return_immediately: boolean
+ :param return_immediately: passed through to :meth:`Subscription.pull`
+
+ :type max_messages: int
+ :param max_messages: passed through to :meth:`Subscription.pull`
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: passed through to :meth:`Subscription.pull` and
+ :meth:`Subscription.acknowledge`.
+ """
+ def__init__(self,subscription,
+ return_immediately=False,max_messages=1,client=None):
+ super(AutoAck,self).__init__()
+ self._subscription=subscription
+ self._return_immediately=return_immediately
+ self._max_messages=max_messages
+ self._client=client
+
+ def__enter__(self):
+ items=self._subscription.pull(
+ self._return_immediately,self._max_messages,self._client)
+ self.update(items)
+ returnself
+
+ def__exit__(self,exc_type,exc_val,exc_tb):
+ self._subscription.acknowledge(list(self),self._client)
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Define API Topics."""
+
+importbase64
+
+fromgcloud._helpersimport_datetime_to_rfc3339
+fromgcloud._helpersimport_NOW
+fromgcloud.exceptionsimportNotFound
+fromgcloud.pubsub._helpersimportsubscription_name_from_path
+fromgcloud.pubsub._helpersimporttopic_name_from_path
+fromgcloud.pubsub.iamimportPolicy
+fromgcloud.pubsub.subscriptionimportSubscription
+
+
+
[docs]classTopic(object):
+ """Topics are targets to which messages can be published.
+
+ Subscribers then receive those messages.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics
+
+ :type name: string
+ :param name: the name of the topic
+
+ :type client: :class:`gcloud.pubsub.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the topic (which requires a project).
+
+ :type timestamp_messages: boolean
+ :param timestamp_messages: If true, the topic will add a ``timestamp`` key
+ to the attributes of each published message:
+ the value will be an RFC 3339 timestamp.
+ """
+ def__init__(self,name,client,timestamp_messages=False):
+ self.name=name
+ self._client=client
+ self.timestamp_messages=timestamp_messages
+
+
[docs]defsubscription(self,name,ack_deadline=None,push_endpoint=None):
+ """Creates a subscription bound to the current topic.
+
+ Example: pull-mode subcription, default paramter values
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_subscription_defaults]
+ :end-before: [END topic_subscription_defaults]
+
+ Example: pull-mode subcription, override ``ack_deadline`` default
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_subscription_ack90]
+ :end-before: [END topic_subscription_ack90]
+
+ Example: push-mode subcription
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_subscription_push]
+ :end-before: [END topic_subscription_push]
+
+ :type name: string
+ :param name: the name of the subscription
+
+ :type ack_deadline: int
+ :param ack_deadline: the deadline (in seconds) by which messages pulled
+ from the back-end must be acknowledged.
+
+ :type push_endpoint: string
+ :param push_endpoint: URL to which messages will be pushed by the
+ back-end. If not set, the application must pull
+ messages.
+
+ :rtype: :class:`Subscription`
+ :returns: The subscription created with the passed in arguments.
+ """
+ returnSubscription(name,self,ack_deadline=ack_deadline,
+ push_endpoint=push_endpoint)
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,client):
+ """Factory: construct a topic given its API representation
+
+ :type resource: dict
+ :param resource: topic resource representation returned from the API
+
+ :type client: :class:`gcloud.pubsub.client.Client`
+ :param client: Client which holds credentials and project
+ configuration for the topic.
+
+ :rtype: :class:`gcloud.pubsub.topic.Topic`
+ :returns: Topic parsed from ``resource``.
+ :raises: :class:`ValueError` if ``client`` is not ``None`` and the
+ project from the resource does not agree with the project
+ from the client.
+ """
+ topic_name=topic_name_from_path(resource['name'],client.project)
+ returncls(topic_name,client=client)
+
+ @property
+ defproject(self):
+ """Project bound to the topic."""
+ returnself._client.project
+
+ @property
+ deffull_name(self):
+ """Fully-qualified name used in topic / subscription APIs"""
+ return'projects/%s/topics/%s'%(self.project,self.name)
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current topic.
+
+ :rtype: :class:`gcloud.pubsub.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self._client
+ returnclient
+
+
[docs]defcreate(self,client=None):
+ """API call: create the topic via a PUT request
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/create
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_create]
+ :end-before: [END topic_create]
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current topic.
+ """
+ client=self._require_client(client)
+ api=client.publisher_api
+ api.topic_create(topic_path=self.full_name)
+
+
[docs]defexists(self,client=None):
+ """API call: test for the existence of the topic via a GET request
+
+ See
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/get
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_exists]
+ :end-before: [END topic_exists]
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current topic.
+
+ :rtype: bool
+ :returns: Boolean indicating existence of the topic.
+ """
+ client=self._require_client(client)
+ api=client.publisher_api
+
+ try:
+ api.topic_get(topic_path=self.full_name)
+ exceptNotFound:
+ returnFalse
+ else:
+ returnTrue
+
+
[docs]defdelete(self,client=None):
+ """API call: delete the topic via a DELETE request
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/delete
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_delete]
+ :end-before: [END topic_delete]
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current topic.
+ """
+ client=self._require_client(client)
+ api=client.publisher_api
+ api.topic_delete(topic_path=self.full_name)
+
+ def_timestamp_message(self,attrs):
+ """Add a timestamp to ``attrs``, if the topic is so configured.
+
+ If ``attrs`` already has the key, do nothing.
+
+ Helper method for ``publish``/``Batch.publish``.
+ """
+ ifself.timestamp_messagesand'timestamp'notinattrs:
+ attrs['timestamp']=_datetime_to_rfc3339(_NOW())
+
+
[docs]defpublish(self,message,client=None,**attrs):
+ """API call: publish a message to a topic via a POST request
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/publish
+
+ Example without message attributes:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_publish_simple_message]
+ :end-before: [END topic_publish_simple_message]
+
+ With message attributes:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_publish_message_with_attrs]
+ :end-before: [END topic_publish_message_with_attrs]
+
+ :type message: bytes
+ :param message: the message payload
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current topic.
+
+ :type attrs: dict (string -> string)
+ :param attrs: key-value pairs to send as message attributes
+
+ :rtype: str
+ :returns: message ID assigned by the server to the published message
+ """
+ client=self._require_client(client)
+ api=client.publisher_api
+
+ self._timestamp_message(attrs)
+ message_b=base64.b64encode(message).decode('ascii')
+ message_data={'data':message_b,'attributes':attrs}
+ message_ids=api.topic_publish(self.full_name,[message_data])
+ returnmessage_ids[0]
+
+
[docs]defbatch(self,client=None):
+ """Return a batch to use as a context manager.
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_batch]
+ :end-before: [END topic_batch]
+
+ .. note::
+
+ The only API request happens during the ``__exit__()`` of the topic
+ used as a context manager, and only if the block exits without
+ raising an exception.
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current topic.
+
+ :rtype: :class:`Batch`
+ :returns: A batch to use as a context manager.
+ """
+ client=self._require_client(client)
+ returnBatch(self,client)
+
+
[docs]deflist_subscriptions(self,page_size=None,page_token=None,client=None):
+ """List subscriptions for the project associated with this client.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics.subscriptions/list
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_list_subscriptions]
+ :end-before: [END topic_list_subscriptions]
+
+ :type page_size: int
+ :param page_size: maximum number of topics to return, If not passed,
+ defaults to a value set by the API.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of topics. If not
+ passed, the API will return the first page of
+ topics.
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current topic.
+
+ :rtype: tuple, (list, str)
+ :returns: list of :class:`gcloud.pubsub.subscription.Subscription`,
+ plus a "next page token" string: if not None, indicates that
+ more topics can be retrieved with another call (pass that
+ value as ``page_token``).
+ """
+ client=self._require_client(client)
+ api=client.publisher_api
+ sub_paths,next_token=api.topic_list_subscriptions(
+ self.full_name,page_size,page_token)
+ subscriptions=[]
+ forsub_pathinsub_paths:
+ sub_name=subscription_name_from_path(sub_path,self.project)
+ subscriptions.append(Subscription(sub_name,self))
+ returnsubscriptions,next_token
+
+
[docs]defget_iam_policy(self,client=None):
+ """Fetch the IAM policy for the topic.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/getIamPolicy
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_get_iam_policy]
+ :end-before: [END topic_get_iam_policy]
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current batch.
+
+ :rtype: :class:`gcloud.pubsub.iam.Policy`
+ :returns: policy created from the resource returned by the
+ ``getIamPolicy`` API request.
+ """
+ client=self._require_client(client)
+ api=client.iam_policy_api
+ resp=api.get_iam_policy(self.full_name)
+ returnPolicy.from_api_repr(resp)
+
+
[docs]defset_iam_policy(self,policy,client=None):
+ """Update the IAM policy for the topic.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/setIamPolicy
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_set_iam_policy]
+ :end-before: [END topic_set_iam_policy]
+
+ :type policy: :class:`gcloud.pubsub.iam.Policy`
+ :param policy: the new policy, typically fetched via
+ :meth:`get_iam_policy` and updated in place.
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current batch.
+
+ :rtype: :class:`gcloud.pubsub.iam.Policy`
+ :returns: updated policy created from the resource returned by the
+ ``setIamPolicy`` API request.
+ """
+ client=self._require_client(client)
+ api=client.iam_policy_api
+ resource=policy.to_api_repr()
+ resp=api.set_iam_policy(self.full_name,resource)
+ returnPolicy.from_api_repr(resp)
+
+
[docs]defcheck_iam_permissions(self,permissions,client=None):
+ """Verify permissions allowed for the current user.
+
+ See:
+ https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/testIamPermissions
+
+ Example:
+
+ .. literalinclude:: pubsub_snippets.py
+ :start-after: [START topic_check_iam_permissions]
+ :end-before: [END topic_check_iam_permissions]
+
+ :type permissions: list of string
+ :param permissions: list of permissions to be tested
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current batch.
+
+ :rtype: sequence of string
+ :returns: subset of ``permissions`` allowed by current IAM policy.
+ """
+ client=self._require_client(client)
+ api=client.iam_policy_api
+ returnapi.test_iam_permissions(
+ self.full_name,list(permissions))
+
+
+
[docs]classBatch(object):
+ """Context manager: collect messages to publish via a single API call.
+
+ Helper returned by :meth:Topic.batch
+
+ :type topic: :class:`gcloud.pubsub.topic.Topic`
+ :param topic: the topic being published
+
+ :type client: :class:`gcloud.pubsub.client.Client`
+ :param client: The client to use.
+ """
+ def__init__(self,topic,client):
+ self.topic=topic
+ self.messages=[]
+ self.message_ids=[]
+ self.client=client
+
+ def__enter__(self):
+ returnself
+
+ def__exit__(self,exc_type,exc_val,exc_tb):
+ ifexc_typeisNone:
+ self.commit()
+
+ def__iter__(self):
+ returniter(self.message_ids)
+
+
[docs]defpublish(self,message,**attrs):
+ """Emulate publishing a message, but save it.
+
+ :type message: bytes
+ :param message: the message payload
+
+ :type attrs: dict (string -> string)
+ :param attrs: key-value pairs to send as message attributes
+ """
+ self.topic._timestamp_message(attrs)
+ self.messages.append(
+ {'data':base64.b64encode(message).decode('ascii'),
+ 'attributes':attrs})
+
+
[docs]defcommit(self,client=None):
+ """Send saved messages as a single API call.
+
+ :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current batch.
+ """
+ ifnotself.messages:
+ return
+
+ ifclientisNone:
+ client=self.client
+ api=client.publisher_api
+ message_ids=api.topic_publish(self.topic.full_name,self.messages[:])
+ self.message_ids.extend(message_ids)
+ delself.messages[:]
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A Client for interacting with the Resource Manager API."""
+
+
+fromgcloud.clientimportClientasBaseClient
+fromgcloud.iteratorimportIterator
+fromgcloud.resource_manager.connectionimportConnection
+fromgcloud.resource_manager.projectimportProject
+
+
+
[docs]classClient(BaseClient):
+ """Client to bundle configuration needed for API requests.
+
+ See
+ https://cloud.google.com/resource-manager/reference/rest/
+ for more information on this API.
+
+ Automatically get credentials::
+
+ >>> from gcloud import resource_manager
+ >>> client = resource_manager.Client()
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for the connection
+ owned by this client. If not passed (and if no ``http``
+ object is passed), falls back to the default inferred
+ from the environment.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests. If not passed, an
+ ``http`` object is created that is bound to the
+ ``credentials`` for the current object.
+ """
+
+ _connection_class=Connection
+
+
[docs]defnew_project(self,project_id,name=None,labels=None):
+ """Creates a :class:`.Project` bound to the current client.
+
+ Use :meth:`Project.reload() \
+ <gcloud.resource_manager.project.Project.reload>` to retrieve
+ project metadata after creating a :class:`.Project` instance.
+
+ .. note:
+
+ This does not make an API call.
+
+ :type project_id: str
+ :param project_id: The ID for this project.
+
+ :type name: string
+ :param name: The display name of the project.
+
+ :type labels: dict
+ :param labels: A list of labels associated with the project.
+
+ :rtype: :class:`.Project`
+ :returns: A new instance of a :class:`.Project` **without**
+ any metadata loaded.
+ """
+ returnProject(project_id=project_id,
+ client=self,name=name,labels=labels)
+
+
[docs]deffetch_project(self,project_id):
+ """Fetch an existing project and it's relevant metadata by ID.
+
+ .. note::
+
+ If the project does not exist, this will raise a
+ :class:`NotFound <gcloud.exceptions.NotFound>` error.
+
+ :type project_id: str
+ :param project_id: The ID for this project.
+
+ :rtype: :class:`.Project`
+ :returns: A :class:`.Project` with metadata fetched from the API.
+ """
+ project=self.new_project(project_id)
+ project.reload()
+ returnproject
+
+
[docs]deflist_projects(self,filter_params=None,page_size=None):
+ """List the projects visible to this client.
+
+ Example::
+
+ >>> from gcloud import resource_manager
+ >>> client = resource_manager.Client()
+ >>> for project in client.list_projects():
+ ... print project.project_id
+
+ List all projects with label ``'environment'`` set to ``'prod'``
+ (filtering by labels)::
+
+ >>> from gcloud import resource_manager
+ >>> client = resource_manager.Client()
+ >>> env_filter = {'labels.environment': 'prod'}
+ >>> for project in client.list_projects(env_filter):
+ ... print project.project_id
+
+ See:
+ https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/list
+
+ Complete filtering example::
+
+ >>> project_filter = { # Return projects with...
+ ... 'name': 'My Project', # name set to 'My Project'.
+ ... 'id': 'my-project-id', # id set to 'my-project-id'.
+ ... 'labels.stage': 'prod', # the label 'stage' set to 'prod'
+ ... 'labels.color': '*' # a label 'color' set to anything.
+ ... }
+ >>> client.list_projects(project_filter)
+
+ :type filter_params: dict
+ :param filter_params: (Optional) A dictionary of filter options where
+ each key is a property to filter on, and each
+ value is the (case-insensitive) value to check
+ (or the glob ``*`` to check for existence of the
+ property). See the example above for more
+ details.
+
+ :type page_size: int
+ :param page_size: (Optional) Maximum number of projects to return in a
+ single page. If not passed, defaults to a value set
+ by the API.
+
+ :rtype: :class:`_ProjectIterator`
+ :returns: A project iterator. The iterator will make multiple API
+ requests if you continue iterating and there are more
+ pages of results. Each item returned will be a.
+ :class:`.Project`.
+ """
+ extra_params={}
+
+ ifpage_sizeisnotNone:
+ extra_params['pageSize']=page_size
+
+ iffilter_paramsisnotNone:
+ extra_params['filter']=filter_params
+
+ return_ProjectIterator(self,extra_params=extra_params)
+
+
+class_ProjectIterator(Iterator):
+ """An iterator over a list of Project resources.
+
+ You shouldn't have to use this directly, but instead should use the
+ helper methods on :class:`gcloud.resource_manager.client.Client`
+ objects.
+
+ :type client: :class:`gcloud.resource_manager.client.Client`
+ :param client: The client to use for making connections.
+
+ :type extra_params: dict
+ :param extra_params: (Optional) Extra query string parameters for
+ the API call.
+ """
+
+ def__init__(self,client,extra_params=None):
+ super(_ProjectIterator,self).__init__(client=client,path='/projects',
+ extra_params=extra_params)
+
+ defget_items_from_response(self,response):
+ """Yield :class:`.Project` items from response.
+
+ :type response: dict
+ :param response: The JSON API response for a page of projects.
+ """
+ forresourceinresponse.get('projects',[]):
+ item=Project.from_api_repr(resource,client=self.client)
+ yielditem
+
Source code for gcloud.resource_manager.connection
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with gcloud.resource_manager connections."""
+
+
+fromgcloudimportconnectionasbase_connection
+
+
+
[docs]classConnection(base_connection.JSONConnection):
+ """A connection to Google Cloud Resource Manager via the JSON REST API.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials`
+ :param credentials: (Optional) The OAuth2 Credentials to use for this
+ connection.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: (Optional) HTTP object to make requests.
+ """
+
+ API_BASE_URL='https://cloudresourcemanager.googleapis.com'
+ """The base of the API call URL."""
+
+ API_VERSION='v1beta1'
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE='{api_base_url}/{api_version}{path}'
+ """A template for the URL of a particular API call."""
+
+ SCOPE=('https://www.googleapis.com/auth/cloud-platform',)
+ """The scopes required for authenticating as a Resouce Manager consumer."""
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utility for managing projects via the Cloud Resource Manager API."""
+
+
+fromgcloud.exceptionsimportNotFound
+
+
+
[docs]classProject(object):
+ """Projects are containers for your work on Google Cloud Platform.
+
+ .. note::
+
+ A :class:`Project` can also be created via
+ :meth:`Client.new_project() \
+ <gcloud.resource_manager.client.Client.new_project>`
+
+ To manage labels on a :class:`Project`::
+
+ >>> from gcloud import resource_manager
+ >>> client = resource_manager.Client()
+ >>> project = client.new_project('purple-spaceship-123')
+ >>> project.labels = {'color': 'purple'}
+ >>> project.labels['environment'] = 'production'
+ >>> project.update()
+
+ See:
+ https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects
+
+ :type project_id: string
+ :param project_id: The globally unique ID of the project.
+
+ :type client: :class:`gcloud.resource_manager.client.Client`
+ :param client: The Client used with this project.
+
+ :type name: string
+ :param name: The display name of the project.
+
+ :type labels: dict
+ :param labels: A list of labels associated with the project.
+ """
+ def__init__(self,project_id,client,name=None,labels=None):
+ self._client=client
+ self.project_id=project_id
+ self.name=name
+ self.number=None
+ self.labels=labelsor{}
+ self.status=None
+
+ def__repr__(self):
+ return'<Project: %r (%r)>'%(self.name,self.project_id)
+
+ @classmethod
+
[docs]deffrom_api_repr(cls,resource,client):
+ """Factory: construct a project given its API representation.
+
+ :type resource: dict
+ :param resource: project resource representation returned from the API
+
+ :type client: :class:`gcloud.resource_manager.client.Client`
+ :param client: The Client used with this project.
+
+ :rtype: :class:`gcloud.resource_manager.project.Project`
+ :returns: The project created.
+ """
+ project=cls(project_id=resource['projectId'],client=client)
+ project.set_properties_from_api_repr(resource)
+ returnproject
+
+
[docs]defset_properties_from_api_repr(self,resource):
+ """Update specific properties from its API representation."""
+ self.name=resource.get('name')
+ self.number=resource['projectNumber']
+ self.labels=resource.get('labels',{})
+ self.status=resource['lifecycleState']
+
+ @property
+ deffull_name(self):
+ """Fully-qualified name (ie, ``'projects/purple-spaceship-123'``)."""
+ ifnotself.project_id:
+ raiseValueError('Missing project ID.')
+ return'projects/%s'%(self.project_id)
+
+ @property
+ defpath(self):
+ """URL for the project (ie, ``'/projects/purple-spaceship-123'``)."""
+ return'/%s'%(self.full_name)
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.resource_manager.client.Client` or
+ ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current project.
+
+ :rtype: :class:`gcloud.resource_manager.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self._client
+ returnclient
+
+
[docs]defcreate(self,client=None):
+ """API call: create the project via a ``POST`` request.
+
+ See
+ https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/create
+
+ :type client: :class:`gcloud.resource_manager.client.Client` or
+ :data:`NoneType <types.NoneType>`
+ :param client: the client to use. If not passed, falls back to
+ the client stored on the current project.
+ """
+ client=self._require_client(client)
+
+ data={
+ 'projectId':self.project_id,
+ 'name':self.name,
+ 'labels':self.labels,
+ }
+ resp=client.connection.api_request(method='POST',path='/projects',
+ data=data)
+ self.set_properties_from_api_repr(resource=resp)
+
+
[docs]defreload(self,client=None):
+ """API call: reload the project via a ``GET`` request.
+
+ This method will reload the newest metadata for the project. If you've
+ created a new :class:`Project` instance via
+ :meth:`Client.new_project() \
+ <gcloud.resource_manager.client.Client.new_project>`,
+ this method will retrieve project metadata.
+
+ .. warning::
+
+ This will overwrite any local changes you've made and not saved
+ via :meth:`update`.
+
+ See
+ https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/get
+
+ :type client: :class:`gcloud.resource_manager.client.Client` or
+ :data:`NoneType <types.NoneType>`
+ :param client: the client to use. If not passed, falls back to
+ the client stored on the current project.
+ """
+ client=self._require_client(client)
+
+ # We assume the project exists. If it doesn't it will raise a NotFound
+ # exception.
+ resp=client.connection.api_request(method='GET',path=self.path)
+ self.set_properties_from_api_repr(resource=resp)
+
+
[docs]defexists(self,client=None):
+ """API call: test the existence of a project via a ``GET`` request.
+
+ See
+ https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/get
+
+ :type client: :class:`gcloud.resource_manager.client.Client` or
+ :data:`NoneType <types.NoneType>`
+ :param client: the client to use. If not passed, falls back to
+ the client stored on the current project.
+
+ :rtype: bool
+ :returns: Boolean indicating existence of the project.
+ """
+ client=self._require_client(client)
+
+ try:
+ # Note that we have to request the entire resource as the API
+ # doesn't provide a way tocheck for existence only.
+ client.connection.api_request(method='GET',path=self.path)
+ exceptNotFound:
+ returnFalse
+ else:
+ returnTrue
+
+
[docs]defupdate(self,client=None):
+ """API call: update the project via a ``PUT`` request.
+
+ See
+ https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/update
+
+ :type client: :class:`gcloud.resource_manager.client.Client` or
+ :data:`NoneType <types.NoneType>`
+ :param client: the client to use. If not passed, falls back to
+ the client stored on the current project.
+ """
+ client=self._require_client(client)
+
+ data={'name':self.name,'labels':self.labels}
+ resp=client.connection.api_request(method='PUT',path=self.path,
+ data=data)
+ self.set_properties_from_api_repr(resp)
+
+
[docs]defdelete(self,client=None,reload_data=False):
+ """API call: delete the project via a ``DELETE`` request.
+
+ See:
+ https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/delete
+
+ This actually changes the status (``lifecycleState``) from ``ACTIVE``
+ to ``DELETE_REQUESTED``.
+ Later (it's not specified when), the project will move into the
+ ``DELETE_IN_PROGRESS`` state, which means the deleting has actually
+ begun.
+
+ :type client: :class:`gcloud.resource_manager.client.Client` or
+ :data:`NoneType <types.NoneType>`
+ :param client: the client to use. If not passed, falls back to
+ the client stored on the current project.
+
+ :type reload_data: bool
+ :param reload_data: Whether to reload the project with the latest
+ state. If you want to get the updated status,
+ you'll want this set to :data:`True` as the DELETE
+ method doesn't send back the updated project.
+ Default: :data:`False`.
+ """
+ client=self._require_client(client)
+ client.connection.api_request(method='DELETE',path=self.path)
+
+ # If the reload flag is set, reload the project.
+ ifreload_data:
+ self.reload()
+
+
[docs]defundelete(self,client=None,reload_data=False):
+ """API call: undelete the project via a ``POST`` request.
+
+ See
+ https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects/undelete
+
+ This actually changes the project status (``lifecycleState``) from
+ ``DELETE_REQUESTED`` to ``ACTIVE``.
+ If the project has already reached a status of ``DELETE_IN_PROGRESS``,
+ this request will fail and the project cannot be restored.
+
+ :type client: :class:`gcloud.resource_manager.client.Client` or
+ :data:`NoneType <types.NoneType>`
+ :param client: the client to use. If not passed, falls back to
+ the client stored on the current project.
+
+ :type reload_data: bool
+ :param reload_data: Whether to reload the project with the latest
+ state. If you want to get the updated status,
+ you'll want this set to :data:`True` as the DELETE
+ method doesn't send back the updated project.
+ Default: :data:`False`.
+ """
+ client=self._require_client(client)
+ client.connection.api_request(method='POST',
+ path=self.path+':undelete')
+
+ # If the reload flag is set, reload the project.
+ ifreload_data:
+ self.reload()
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Manipulate access control lists that Cloud Storage provides.
+
+:class:`gcloud.storage.bucket.Bucket` has a getting method that creates
+an ACL object under the hood, and you can interact with that using
+:func:`gcloud.storage.bucket.Bucket.acl`::
+
+ >>> from gcloud import storage
+ >>> client = storage.Client()
+ >>> bucket = client.get_bucket(bucket_name)
+ >>> acl = bucket.acl
+
+Adding and removing permissions can be done with the following methods
+(in increasing order of granularity):
+
+- :func:`ACL.all`
+ corresponds to access for all users.
+- :func:`ACL.all_authenticated` corresponds
+ to access for all users that are signed into a Google account.
+- :func:`ACL.domain` corresponds to access on a
+ per Google Apps domain (ie, ``example.com``).
+- :func:`ACL.group` corresponds to access on a
+ per group basis (either by ID or e-mail address).
+- :func:`ACL.user` corresponds to access on a
+ per user basis (either by ID or e-mail address).
+
+And you are able to ``grant`` and ``revoke`` the following roles:
+
+- **Reading**:
+ :func:`_ACLEntity.grant_read` and :func:`_ACLEntity.revoke_read`
+- **Writing**:
+ :func:`_ACLEntity.grant_write` and :func:`_ACLEntity.revoke_write`
+- **Owning**:
+ :func:`_ACLEntity.grant_owner` and :func:`_ACLEntity.revoke_owner`
+
+You can use any of these like any other factory method (these happen to
+be :class:`_ACLEntity` factories)::
+
+ >>> acl.user('me@example.org').grant_read()
+ >>> acl.all_authenticated().grant_write()
+
+You can also chain these ``grant_*`` and ``revoke_*`` methods together
+for brevity::
+
+ >>> acl.all().grant_read().revoke_write()
+
+After that, you can save any changes you make with the
+:func:`gcloud.storage.acl.ACL.save` method::
+
+ >>> acl.save()
+
+You can alternatively save any existing :class:`gcloud.storage.acl.ACL`
+object (whether it was created by a factory method or not) from a
+:class:`gcloud.storage.bucket.Bucket`::
+
+ >>> bucket.acl.save(acl=acl)
+
+To get the list of ``entity`` and ``role`` for each unique pair, the
+:class:`ACL` class is iterable::
+
+ >>> print list(ACL)
+ [{'role': 'OWNER', 'entity': 'allUsers'}, ...]
+
+This list of tuples can be used as the ``entity`` and ``role`` fields
+when sending metadata for ACLs to the API.
+"""
+
+
+class_ACLEntity(object):
+ """Class representing a set of roles for an entity.
+
+ This is a helper class that you likely won't ever construct
+ outside of using the factor methods on the :class:`ACL` object.
+
+ :type entity_type: string
+ :param entity_type: The type of entity (ie, 'group' or 'user').
+
+ :type identifier: string
+ :param identifier: The ID or e-mail of the entity. For the special
+ entity types (like 'allUsers') this is optional.
+ """
+
+ READER_ROLE='READER'
+ WRITER_ROLE='WRITER'
+ OWNER_ROLE='OWNER'
+
+ def__init__(self,entity_type,identifier=None):
+ self.identifier=identifier
+ self.roles=set([])
+ self.type=entity_type
+
+ def__str__(self):
+ ifnotself.identifier:
+ returnstr(self.type)
+ else:
+ return'{acl.type}-{acl.identifier}'.format(acl=self)
+
+ def__repr__(self):
+ return'<ACL Entity: {acl} ({roles})>'.format(
+ acl=self,roles=', '.join(self.roles))
+
+ defget_roles(self):
+ """Get the list of roles permitted by this entity.
+
+ :rtype: list of strings
+ :returns: The list of roles associated with this entity.
+ """
+ returnself.roles
+
+ defgrant(self,role):
+ """Add a role to the entity.
+
+ :type role: string
+ :param role: The role to add to the entity.
+ """
+ self.roles.add(role)
+
+ defrevoke(self,role):
+ """Remove a role from the entity.
+
+ :type role: string
+ :param role: The role to remove from the entity.
+ """
+ ifroleinself.roles:
+ self.roles.remove(role)
+
+ defgrant_read(self):
+ """Grant read access to the current entity."""
+ self.grant(_ACLEntity.READER_ROLE)
+
+ defgrant_write(self):
+ """Grant write access to the current entity."""
+ self.grant(_ACLEntity.WRITER_ROLE)
+
+ defgrant_owner(self):
+ """Grant owner access to the current entity."""
+ self.grant(_ACLEntity.OWNER_ROLE)
+
+ defrevoke_read(self):
+ """Revoke read access from the current entity."""
+ self.revoke(_ACLEntity.READER_ROLE)
+
+ defrevoke_write(self):
+ """Revoke write access from the current entity."""
+ self.revoke(_ACLEntity.WRITER_ROLE)
+
+ defrevoke_owner(self):
+ """Revoke owner access from the current entity."""
+ self.revoke(_ACLEntity.OWNER_ROLE)
+
+
+
[docs]classACL(object):
+ """Container class representing a list of access controls."""
+
+ _URL_PATH_ELEM='acl'
+ _PREDEFINED_QUERY_PARAM='predefinedAcl'
+
+ PREDEFINED_XML_ACLS={
+ # XML API name -> JSON API name
+ 'project-private':'projectPrivate',
+ 'public-read':'publicRead',
+ 'public-read-write':'publicReadWrite',
+ 'authenticated-read':'authenticatedRead',
+ 'bucket-owner-read':'bucketOwnerRead',
+ 'bucket-owner-full-control':'bucketOwnerFullControl',
+ }
+
+ PREDEFINED_JSON_ACLS=frozenset([
+ 'private',
+ 'projectPrivate',
+ 'publicRead',
+ 'publicReadWrite',
+ 'authenticatedRead',
+ 'bucketOwnerRead',
+ 'bucketOwnerFullControl',
+ ])
+ """See:
+ https://cloud.google.com/storage/docs/access-control#predefined-acl
+ """
+
+ loaded=False
+
+ # Subclasses must override to provide these attributes (typically,
+ # as properties).
+ reload_path=None
+ save_path=None
+
+ def__init__(self):
+ self.entities={}
+
+ def_ensure_loaded(self):
+ """Load if not already loaded."""
+ ifnotself.loaded:
+ self.reload()
+
+
[docs]defreset(self):
+ """Remove all entities from the ACL, and clear the ``loaded`` flag."""
+ self.entities.clear()
+ self.loaded=False
[docs]defentity_from_dict(self,entity_dict):
+ """Build an _ACLEntity object from a dictionary of data.
+
+ An entity is a mutable object that represents a list of roles
+ belonging to either a user or group or the special types for all
+ users and all authenticated users.
+
+ :type entity_dict: dict
+ :param entity_dict: Dictionary full of data from an ACL lookup.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An Entity constructed from the dictionary.
+ """
+ entity=entity_dict['entity']
+ role=entity_dict['role']
+
+ ifentity=='allUsers':
+ entity=self.all()
+
+ elifentity=='allAuthenticatedUsers':
+ entity=self.all_authenticated()
+
+ elif'-'inentity:
+ entity_type,identifier=entity.split('-',1)
+ entity=self.entity(entity_type=entity_type,
+ identifier=identifier)
+
+ ifnotisinstance(entity,_ACLEntity):
+ raiseValueError('Invalid dictionary: %s'%entity_dict)
+
+ entity.grant(role)
+ returnentity
+
+
[docs]defhas_entity(self,entity):
+ """Returns whether or not this ACL has any entries for an entity.
+
+ :type entity: :class:`_ACLEntity`
+ :param entity: The entity to check for existence in this ACL.
+
+ :rtype: boolean
+ :returns: True of the entity exists in the ACL.
+ """
+ self._ensure_loaded()
+ returnstr(entity)inself.entities
+
+
[docs]defget_entity(self,entity,default=None):
+ """Gets an entity object from the ACL.
+
+ :type entity: :class:`_ACLEntity` or string
+ :param entity: The entity to get lookup in the ACL.
+
+ :type default: anything
+ :param default: This value will be returned if the entity
+ doesn't exist.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: The corresponding entity or the value provided
+ to ``default``.
+ """
+ self._ensure_loaded()
+ returnself.entities.get(str(entity),default)
+
+
[docs]defadd_entity(self,entity):
+ """Add an entity to the ACL.
+
+ :type entity: :class:`_ACLEntity`
+ :param entity: The entity to add to this ACL.
+ """
+ self._ensure_loaded()
+ self.entities[str(entity)]=entity
+
+
[docs]defentity(self,entity_type,identifier=None):
+ """Factory method for creating an Entity.
+
+ If an entity with the same type and identifier already exists,
+ this will return a reference to that entity. If not, it will
+ create a new one and add it to the list of known entities for
+ this ACL.
+
+ :type entity_type: string
+ :param entity_type: The type of entity to create
+ (ie, ``user``, ``group``, etc)
+
+ :type identifier: string
+ :param identifier: The ID of the entity (if applicable).
+ This can be either an ID or an e-mail address.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: A new Entity or a reference to an existing identical entity.
+ """
+ entity=_ACLEntity(entity_type=entity_type,identifier=identifier)
+ ifself.has_entity(entity):
+ entity=self.get_entity(entity)
+ else:
+ self.add_entity(entity)
+ returnentity
+
+
[docs]defuser(self,identifier):
+ """Factory method for a user Entity.
+
+ :type identifier: string
+ :param identifier: An id or e-mail for this particular user.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An Entity corresponding to this user.
+ """
+ returnself.entity('user',identifier=identifier)
+
+
[docs]defgroup(self,identifier):
+ """Factory method for a group Entity.
+
+ :type identifier: string
+ :param identifier: An id or e-mail for this particular group.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An Entity corresponding to this group.
+ """
+ returnself.entity('group',identifier=identifier)
+
+
[docs]defdomain(self,domain):
+ """Factory method for a domain Entity.
+
+ :type domain: string
+ :param domain: The domain for this entity.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An entity corresponding to this domain.
+ """
+ returnself.entity('domain',identifier=domain)
+
+
[docs]defall(self):
+ """Factory method for an Entity representing all users.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An entity representing all users.
+ """
+ returnself.entity('allUsers')
+
+
[docs]defall_authenticated(self):
+ """Factory method for an Entity representing all authenticated users.
+
+ :rtype: :class:`_ACLEntity`
+ :returns: An entity representing all authenticated users.
+ """
+ returnself.entity('allAuthenticatedUsers')
+
+
[docs]defget_entities(self):
+ """Get a list of all Entity objects.
+
+ :rtype: list of :class:`_ACLEntity` objects
+ :returns: A list of all Entity objects.
+ """
+ self._ensure_loaded()
+ returnlist(self.entities.values())
+
+ @property
+ defclient(self):
+ """Abstract getter for the object client."""
+ raiseNotImplementedError
+
+ def_require_client(self,client):
+ """Check client or verify over-ride.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: the client to use. If not passed, falls back to the
+ ``client`` stored on the current ACL.
+
+ :rtype: :class:`gcloud.storage.client.Client`
+ :returns: The client passed in or the currently bound client.
+ """
+ ifclientisNone:
+ client=self.client
+ returnclient
+
+
[docs]defreload(self,client=None):
+ """Reload the ACL data from Cloud Storage.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the ACL's parent.
+ """
+ path=self.reload_path
+ client=self._require_client(client)
+
+ self.entities.clear()
+
+ found=client.connection.api_request(method='GET',path=path)
+ self.loaded=True
+ forentryinfound.get('items',()):
+ self.add_entity(self.entity_from_dict(entry))
+
+ def_save(self,acl,predefined,client):
+ """Helper for :meth:`save` and :meth:`save_predefined`.
+
+ :type acl: :class:`gcloud.storage.acl.ACL`, or a compatible list.
+ :param acl: The ACL object to save. If left blank, this will save
+ current entries.
+
+ :type predefined: string or None
+ :param predefined: An identifier for a predefined ACL. Must be one
+ of the keys in :attr:`PREDEFINED_JSON_ACLS`
+ If passed, `acl` must be None.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the ACL's parent.
+ """
+ query_params={'projection':'full'}
+ ifpredefinedisnotNone:
+ acl=[]
+ query_params[self._PREDEFINED_QUERY_PARAM]=predefined
+
+ path=self.save_path
+ client=self._require_client(client)
+ result=client.connection.api_request(
+ method='PATCH',
+ path=path,
+ data={self._URL_PATH_ELEM:list(acl)},
+ query_params=query_params)
+ self.entities.clear()
+ forentryinresult.get(self._URL_PATH_ELEM,()):
+ self.add_entity(self.entity_from_dict(entry))
+ self.loaded=True
+
+
[docs]defsave(self,acl=None,client=None):
+ """Save this ACL for the current bucket.
+
+ :type acl: :class:`gcloud.storage.acl.ACL`, or a compatible list.
+ :param acl: The ACL object to save. If left blank, this will save
+ current entries.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the ACL's parent.
+ """
+ ifaclisNone:
+ acl=self
+ save_to_backend=acl.loaded
+ else:
+ save_to_backend=True
+
+ ifsave_to_backend:
+ self._save(acl,None,client)
+
+
[docs]defsave_predefined(self,predefined,client=None):
+ """Save this ACL for the current bucket using a predefined ACL.
+
+ :type predefined: string
+ :param predefined: An identifier for a predefined ACL. Must be one
+ of the keys in :attr:`PREDEFINED_JSON_ACLS`
+ or :attr:`PREDEFINED_XML_ACLS` (which will be
+ aliased to the corresponding JSON name).
+ If passed, `acl` must be None.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the ACL's parent.
+ """
+ predefined=self.PREDEFINED_XML_ACLS.get(predefined,predefined)
+
+ ifpredefinednotinself.PREDEFINED_JSON_ACLS:
+ raiseValueError("Invalid predefined ACL: %s"%(predefined,))
+
+ self._save(None,predefined,client)
+
+
[docs]defclear(self,client=None):
+ """Remove all ACL entries.
+
+ Note that this won't actually remove *ALL* the rules, but it
+ will remove all the non-default rules. In short, you'll still
+ have access to a bucket that you created even after you clear
+ ACL rules with this method.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the ACL's parent.
+ """
+ self.save([],client=client)
+
+
+
[docs]classBucketACL(ACL):
+ """An ACL specifically for a bucket.
+
+ :type bucket: :class:`gcloud.storage.bucket.Bucket`
+ :param bucket: The bucket to which this ACL relates.
+ """
+
+ def__init__(self,bucket):
+ super(BucketACL,self).__init__()
+ self.bucket=bucket
+
+ @property
+ defclient(self):
+ """The client bound to this ACL's bucket."""
+ returnself.bucket.client
+
+ @property
+ defreload_path(self):
+ """Compute the path for GET API requests for this ACL."""
+ return'%s/%s'%(self.bucket.path,self._URL_PATH_ELEM)
+
+ @property
+ defsave_path(self):
+ """Compute the path for PATCH API requests for this ACL."""
+ returnself.bucket.path
+
+
+
[docs]classDefaultObjectACL(BucketACL):
+ """A class representing the default object ACL for a bucket."""
+
+ _URL_PATH_ELEM='defaultObjectAcl'
+ _PREDEFINED_QUERY_PARAM='predefinedDefaultObjectAcl'
+
+
+
[docs]classObjectACL(ACL):
+ """An ACL specifically for a Cloud Storage object / blob.
+
+ :type blob: :class:`gcloud.storage.blob.Blob`
+ :param blob: The blob that this ACL corresponds to.
+ """
+
+ def__init__(self,blob):
+ super(ObjectACL,self).__init__()
+ self.blob=blob
+
+ @property
+ defclient(self):
+ """The client bound to this ACL's blob."""
+ returnself.blob.client
+
+ @property
+ defreload_path(self):
+ """Compute the path for GET API requests for this ACL."""
+ return'%s/acl'%self.blob.path
+
+ @property
+ defsave_path(self):
+ """Compute the path for PATCH API requests for this ACL."""
+ returnself.blob.path
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Batch updates / deletes of storage buckets / blobs.
+
+See: https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch
+"""
+fromemail.encodersimportencode_noop
+fromemail.generatorimportGenerator
+fromemail.mime.applicationimportMIMEApplication
+fromemail.mime.multipartimportMIMEMultipart
+fromemail.parserimportParser
+importio
+importjson
+
+importhttplib2
+importsix
+
+fromgcloud.exceptionsimportmake_exception
+fromgcloud.storage.connectionimportConnection
+
+
+
[docs]classMIMEApplicationHTTP(MIMEApplication):
+ """MIME type for ``application/http``.
+
+ Constructs payload from headers and body
+
+ :type method: str
+ :param method: HTTP method
+
+ :type uri: str
+ :param uri: URI for HTTP request
+
+ :type headers: dict
+ :param headers: HTTP headers
+
+ :type body: str or None
+ :param body: HTTP payload
+
+ """
+ def__init__(self,method,uri,headers,body):
+ ifisinstance(body,dict):
+ body=json.dumps(body)
+ headers['Content-Type']='application/json'
+ headers['Content-Length']=len(body)
+ ifbodyisNone:
+ body=''
+ lines=['%s%s HTTP/1.1'%(method,uri)]
+ lines.extend(['%s: %s'%(key,value)
+ forkey,valueinsorted(headers.items())])
+ lines.append('')
+ lines.append(body)
+ payload='\r\n'.join(lines)
+ ifsix.PY2:
+ # email.message.Message is an old-style class, so we
+ # cannot use 'super()'.
+ MIMEApplication.__init__(self,payload,'http',encode_noop)
+ else:# pragma: NO COVER Python3
+ super_init=super(MIMEApplicationHTTP,self).__init__
+ super_init(payload,'http',encode_noop)
+
+
+
[docs]classNoContent(object):
+ """Emulate an HTTP '204 No Content' response."""
+ status=204
+
+
+class_FutureDict(object):
+ """Class to hold a future value for a deferred request.
+
+ Used by for requests that get sent in a :class:`Batch`.
+ """
+
+ @staticmethod
+ defget(key,default=None):
+ """Stand-in for dict.get.
+
+ :type key: object
+ :param key: Hashable dictionary key.
+
+ :type default: object
+ :param default: Fallback value to dict.get.
+
+ :raises: :class:`KeyError` always since the future is intended to fail
+ as a dictionary.
+ """
+ raiseKeyError('Cannot get(%r, default=%r) on a future'%(
+ key,default))
+
+ def__getitem__(self,key):
+ """Stand-in for dict[key].
+
+ :type key: object
+ :param key: Hashable dictionary key.
+
+ :raises: :class:`KeyError` always since the future is intended to fail
+ as a dictionary.
+ """
+ raiseKeyError('Cannot get item %r from a future'%(key,))
+
+ def__setitem__(self,key,value):
+ """Stand-in for dict[key] = value.
+
+ :type key: object
+ :param key: Hashable dictionary key.
+
+ :type value: object
+ :param value: Dictionary value.
+
+ :raises: :class:`KeyError` always since the future is intended to fail
+ as a dictionary.
+ """
+ raiseKeyError('Cannot set %r -> %r on a future'%(key,value))
+
+
+
[docs]classBatch(Connection):
+ """Proxy an underlying connection, batching up change operations.
+
+ :type client: :class:`gcloud.storage.client.Client`
+ :param client: The client to use for making connections.
+ """
+ _MAX_BATCH_SIZE=1000
+
+ def__init__(self,client):
+ super(Batch,self).__init__()
+ self._client=client
+ self._requests=[]
+ self._target_objects=[]
+
+ def_do_request(self,method,url,headers,data,target_object):
+ """Override Connection: defer actual HTTP request.
+
+ Only allow up to ``_MAX_BATCH_SIZE`` requests to be deferred.
+
+ :type method: str
+ :param method: The HTTP method to use in the request.
+
+ :type url: str
+ :param url: The URL to send the request to.
+
+ :type headers: dict
+ :param headers: A dictionary of HTTP headers to send with the request.
+
+ :type data: str
+ :param data: The data to send as the body of the request.
+
+ :type target_object: object or :class:`NoneType`
+ :param target_object: This allows us to enable custom behavior in our
+ batch connection. Here we defer an HTTP request
+ and complete initialization of the object at a
+ later time.
+
+ :rtype: tuple of ``response`` (a dictionary of sorts)
+ and ``content`` (a string).
+ :returns: The HTTP response object and the content of the response.
+ """
+ iflen(self._requests)>=self._MAX_BATCH_SIZE:
+ raiseValueError("Too many deferred requests (max %d)"%
+ self._MAX_BATCH_SIZE)
+ self._requests.append((method,url,headers,data))
+ result=_FutureDict()
+ self._target_objects.append(target_object)
+ iftarget_objectisnotNone:
+ target_object._properties=result
+ returnNoContent(),result
+
+ def_prepare_batch_request(self):
+ """Prepares headers and body for a batch request.
+
+ :rtype: tuple (dict, str)
+ :returns: The pair of headers and body of the batch request to be sent.
+ :raises: :class:`ValueError` if no requests have been deferred.
+ """
+ iflen(self._requests)==0:
+ raiseValueError("No deferred requests")
+
+ multi=MIMEMultipart()
+
+ formethod,uri,headers,bodyinself._requests:
+ subrequest=MIMEApplicationHTTP(method,uri,headers,body)
+ multi.attach(subrequest)
+
+ # The `email` package expects to deal with "native" strings
+ ifsix.PY3:# pragma: NO COVER Python3
+ buf=io.StringIO()
+ else:
+ buf=io.BytesIO()
+ generator=Generator(buf,False,0)
+ generator.flatten(multi)
+ payload=buf.getvalue()
+
+ # Strip off redundant header text
+ _,body=payload.split('\n\n',1)
+ returndict(multi._headers),body
+
+ def_finish_futures(self,responses):
+ """Apply all the batch responses to the futures created.
+
+ :type responses: list of (headers, payload) tuples.
+ :param responses: List of headers and payloads from each response in
+ the batch.
+
+ :raises: :class:`ValueError` if no requests have been deferred.
+ """
+ # If a bad status occurs, we track it, but don't raise an exception
+ # until all futures have been populated.
+ exception_args=None
+
+ iflen(self._target_objects)!=len(responses):
+ raiseValueError('Expected a response for every request.')
+
+ fortarget_object,sub_responseinzip(self._target_objects,
+ responses):
+ resp_headers,sub_payload=sub_response
+ ifnot200<=resp_headers.status<300:
+ exception_args=exception_argsor(resp_headers,
+ sub_payload)
+ eliftarget_objectisnotNone:
+ target_object._properties=sub_payload
+
+ ifexception_argsisnotNone:
+ raisemake_exception(*exception_args)
+
+
[docs]deffinish(self):
+ """Submit a single `multipart/mixed` request w/ deferred requests.
+
+ :rtype: list of tuples
+ :returns: one ``(headers, payload)`` tuple per deferred request.
+ """
+ headers,body=self._prepare_batch_request()
+
+ url='%s/batch'%self.API_BASE_URL
+
+ # Use the private ``_connection`` rather than the public
+ # ``.connection``, since the public connection may be this
+ # current batch.
+ response,content=self._client._connection._make_request(
+ 'POST',url,data=body,headers=headers)
+ responses=list(_unpack_batch_response(response,content))
+ self._finish_futures(responses)
+ returnresponses
+
+
[docs]defcurrent(self):
+ """Return the topmost batch, or None."""
+ returnself._client.current_batch
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with Google Cloud Storage blobs."""
+
+importbase64
+importcopy
+importhashlib
+fromioimportBytesIO
+fromioimportUnsupportedOperation
+importjson
+importmimetypes
+importos
+importtime
+
+importhttplib2
+importsix
+fromsix.moves.urllib.parseimportquote
+
+fromgcloud._helpersimport_rfc3339_to_datetime
+fromgcloud._helpersimport_to_bytes
+fromgcloud._helpersimport_bytes_to_unicode
+fromgcloud.credentialsimportgenerate_signed_url
+fromgcloud.exceptionsimportNotFound
+fromgcloud.exceptionsimportmake_exception
+fromgcloud.storage._helpersimport_PropertyMixin
+fromgcloud.storage._helpersimport_scalar_property
+fromgcloud.storage.aclimportObjectACL
+fromgcloud.streaming.http_wrapperimportRequest
+fromgcloud.streaming.http_wrapperimportmake_api_request
+fromgcloud.streaming.transferimportDownload
+fromgcloud.streaming.transferimportRESUMABLE_UPLOAD
+fromgcloud.streaming.transferimportUpload
+
+
+_API_ACCESS_ENDPOINT='https://storage.googleapis.com'
+
+
+
[docs]classBlob(_PropertyMixin):
+ """A wrapper around Cloud Storage's concept of an ``Object``.
+
+ :type name: string
+ :param name: The name of the blob. This corresponds to the
+ unique path of the object in the bucket.
+
+ :type bucket: :class:`gcloud.storage.bucket.Bucket`
+ :param bucket: The bucket to which this blob belongs.
+
+ :type chunk_size: integer
+ :param chunk_size: The size of a chunk of data whenever iterating (1 MB).
+ This must be a multiple of 256 KB per the API
+ specification.
+ """
+
+ _chunk_size=None# Default value for each instance.
+
+ _CHUNK_SIZE_MULTIPLE=256*1024
+ """Number (256 KB, in bytes) that must divide the chunk size."""
+
+ def__init__(self,name,bucket,chunk_size=None):
+ super(Blob,self).__init__(name=name)
+
+ self.chunk_size=chunk_size# Check that setter accepts value.
+ self.bucket=bucket
+ self._acl=ObjectACL(self)
+
+ @property
+ defchunk_size(self):
+ """Get the blob's default chunk size.
+
+ :rtype: integer or ``NoneType``
+ :returns: The current blob's chunk size, if it is set.
+ """
+ returnself._chunk_size
+
+ @chunk_size.setter
+ defchunk_size(self,value):
+ """Set the blob's default chunk size.
+
+ :type value: integer or ``NoneType``
+ :param value: The current blob's chunk size, if it is set.
+
+ :raises: :class:`ValueError` if ``value`` is not ``None`` and is not a
+ multiple of 256 KB.
+ """
+ ifvalueisnotNoneandvalue%self._CHUNK_SIZE_MULTIPLE!=0:
+ raiseValueError('Chunk size must be a multiple of %d.'%(
+ self._CHUNK_SIZE_MULTIPLE,))
+ self._chunk_size=value
+
+ @staticmethod
+
[docs]defpath_helper(bucket_path,blob_name):
+ """Relative URL path for a blob.
+
+ :type bucket_path: string
+ :param bucket_path: The URL path for a bucket.
+
+ :type blob_name: string
+ :param blob_name: The name of the blob.
+
+ :rtype: string
+ :returns: The relative URL path for ``blob_name``.
+ """
+ returnbucket_path+'/o/'+quote(blob_name,safe='')
+
+ @property
+ defacl(self):
+ """Create our ACL on demand."""
+ returnself._acl
+
+ def__repr__(self):
+ ifself.bucket:
+ bucket_name=self.bucket.name
+ else:
+ bucket_name=None
+
+ return'<Blob: %s, %s>'%(bucket_name,self.name)
+
+ @property
+ defpath(self):
+ """Getter property for the URL path to this Blob.
+
+ :rtype: string
+ :returns: The URL path to this Blob.
+ """
+ ifnotself.name:
+ raiseValueError('Cannot determine path without a blob name.')
+
+ returnself.path_helper(self.bucket.path,self.name)
+
+ @property
+ defclient(self):
+ """The client bound to this blob."""
+ returnself.bucket.client
+
+ @property
+ defpublic_url(self):
+ """The public URL for this blob's object.
+
+ :rtype: `string`
+ :returns: The public URL for this blob.
+ """
+ return'{storage_base_url}/{bucket_name}/{quoted_name}'.format(
+ storage_base_url='https://storage.googleapis.com',
+ bucket_name=self.bucket.name,
+ quoted_name=quote(self.name,safe=''))
+
+
[docs]defgenerate_signed_url(self,expiration,method='GET',
+ content_type=None,
+ generation=None,response_disposition=None,
+ response_type=None,client=None,credentials=None):
+ """Generates a signed URL for this blob.
+
+ .. note::
+
+ If you are on Google Compute Engine, you can't generate a signed
+ URL. Follow `Issue 922`_ for updates on this. If you'd like to
+ be able to generate a signed URL from GCE, you can use a standard
+ service account from a JSON file rather than a GCE service account.
+
+ .. _Issue 922: https://github.com/GoogleCloudPlatform/\
+ gcloud-python/issues/922
+
+ If you have a blob that you want to allow access to for a set
+ amount of time, you can use this method to generate a URL that
+ is only valid within a certain time period.
+
+ This is particularly useful if you don't want publicly
+ accessible blobs, but don't want to require users to explicitly
+ log in.
+
+ :type expiration: int, long, datetime.datetime, datetime.timedelta
+ :param expiration: When the signed URL should expire.
+
+ :type method: str
+ :param method: The HTTP verb that will be used when requesting the URL.
+
+ :type content_type: str
+ :param content_type: (Optional) The content type of the object
+ referenced by ``resource``.
+
+ :type generation: str
+ :param generation: (Optional) A value that indicates which generation
+ of the resource to fetch.
+
+ :type response_disposition: str
+ :param response_disposition: (Optional) Content disposition of
+ responses to requests for the signed URL.
+ For example, to enable the signed URL
+ to initiate a file of ``blog.png``, use
+ the value
+ ``'attachment; filename=blob.png'``.
+
+ :type response_type: str
+ :param response_type: (Optional) Content type of responses to requests
+ for the signed URL. Used to over-ride the content
+ type of the underlying blob/object.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: (Optional) The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: (Optional) The OAuth2 credentials to use to sign
+ the URL. Defaults to the credentials stored on the
+ client used.
+
+ :rtype: str
+ :returns: A signed URL you can use to access the resource
+ until expiration.
+ """
+ resource='/{bucket_name}/{quoted_name}'.format(
+ bucket_name=self.bucket.name,
+ quoted_name=quote(self.name,safe=''))
+
+ ifcredentialsisNone:
+ client=self._require_client(client)
+ credentials=client._connection.credentials
+
+ returngenerate_signed_url(
+ credentials,resource=resource,
+ api_access_endpoint=_API_ACCESS_ENDPOINT,
+ expiration=expiration,method=method,
+ content_type=content_type,
+ response_type=response_type,
+ response_disposition=response_disposition,
+ generation=generation)
+
+
[docs]defexists(self,client=None):
+ """Determines whether or not this blob exists.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :rtype: boolean
+ :returns: True if the blob exists in Cloud Storage.
+ """
+ client=self._require_client(client)
+ try:
+ # We only need the status code (200 or not) so we seek to
+ # minimize the returned payload.
+ query_params={'fields':'name'}
+ # We intentionally pass `_target_object=None` since fields=name
+ # would limit the local properties.
+ client.connection.api_request(method='GET',path=self.path,
+ query_params=query_params,
+ _target_object=None)
+ # NOTE: This will not fail immediately in a batch. However, when
+ # Batch.finish() is called, the resulting `NotFound` will be
+ # raised.
+ returnTrue
+ exceptNotFound:
+ returnFalse
+
+
[docs]defdelete(self,client=None):
+ """Deletes a blob from Cloud Storage.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :rtype: :class:`Blob`
+ :returns: The blob that was just deleted.
+ :raises: :class:`gcloud.exceptions.NotFound`
+ (propagated from
+ :meth:`gcloud.storage.bucket.Bucket.delete_blob`).
+ """
+ returnself.bucket.delete_blob(self.name,client=client)
+
+
[docs]defdownload_to_file(self,file_obj,encryption_key=None,client=None):
+ """Download the contents of this blob into a file-like object.
+
+ .. note::
+
+ If the server-set property, :attr:`media_link`, is not yet
+ initialized, makes an additional API request to load it.
+
+ Downloading a file that has been encrypted with a `customer-supplied`_
+ encryption key::
+
+ >>> from gcloud import storage
+ >>> from gcloud.storage import Blob
+
+ >>> client = storage.Client(project='my-project')
+ >>> bucket = client.get_bucket('my-bucket')
+ >>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
+ >>> blob = Blob('secure-data', bucket)
+ >>> with open('/tmp/my-secure-file', 'wb') as file_obj:
+ >>> blob.download_to_file(file_obj,
+ ... encryption_key=encryption_key)
+
+ The ``encryption_key`` should be a str or bytes with a length of at
+ least 32.
+
+ .. _customer-supplied: https://cloud.google.com/storage/docs/\
+ encryption#customer-supplied
+
+ :type file_obj: file
+ :param file_obj: A file handle to which to write the blob's data.
+
+ :type encryption_key: str or bytes
+ :param encryption_key: Optional 32 byte encryption key for
+ customer-supplied encryption.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :raises: :class:`gcloud.exceptions.NotFound`
+ """
+ client=self._require_client(client)
+ ifself.media_linkisNone:# not yet loaded
+ self.reload()
+
+ download_url=self.media_link
+
+ # Use apitools 'Download' facility.
+ download=Download.from_stream(file_obj)
+
+ ifself.chunk_sizeisnotNone:
+ download.chunksize=self.chunk_size
+
+ headers={}
+ ifencryption_key:
+ _set_encryption_headers(encryption_key,headers)
+
+ request=Request(download_url,'GET',headers)
+
+ # Use the private ``_connection`` rather than the public
+ # ``.connection``, since the public connection may be a batch. A
+ # batch wraps a client's connection, but does not store the `http`
+ # object. The rest (API_BASE_URL and build_api_url) are also defined
+ # on the Batch class, but we just use the wrapped connection since
+ # it has all three (http, API_BASE_URL and build_api_url).
+ download.initialize_download(request,client._connection.http)
+
+
[docs]defdownload_to_filename(self,filename,encryption_key=None,client=None):
+ """Download the contents of this blob into a named file.
+
+ :type filename: string
+ :param filename: A filename to be passed to ``open``.
+
+ :type encryption_key: str or bytes
+ :param encryption_key: Optional 32 byte encryption key for
+ customer-supplied encryption.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :raises: :class:`gcloud.exceptions.NotFound`
+ """
+ withopen(filename,'wb')asfile_obj:
+ self.download_to_file(file_obj,encryption_key=encryption_key,
+ client=client)
+
+ mtime=time.mktime(self.updated.timetuple())
+ os.utime(file_obj.name,(mtime,mtime))
+
+
[docs]defdownload_as_string(self,encryption_key=None,client=None):
+ """Download the contents of this blob as a string.
+
+ :type encryption_key: str or bytes
+ :param encryption_key: Optional 32 byte encryption key for
+ customer-supplied encryption.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :rtype: bytes
+ :returns: The data stored in this blob.
+ :raises: :class:`gcloud.exceptions.NotFound`
+ """
+ string_buffer=BytesIO()
+ self.download_to_file(string_buffer,encryption_key=encryption_key,
+ client=client)
+ returnstring_buffer.getvalue()
[docs]defupload_from_file(self,file_obj,rewind=False,size=None,
+ encryption_key=None,content_type=None,num_retries=6,
+ client=None):
+ """Upload the contents of this blob from a file-like object.
+
+ The content type of the upload will either be
+ - The value passed in to the function (if any)
+ - The value stored on the current blob
+ - The default value of 'application/octet-stream'
+
+ .. note::
+ The effect of uploading to an existing blob depends on the
+ "versioning" and "lifecycle" policies defined on the blob's
+ bucket. In the absence of those policies, upload will
+ overwrite any existing contents.
+
+ See the `object versioning
+ <https://cloud.google.com/storage/docs/object-versioning>`_ and
+ `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
+ API documents for details.
+
+ Uploading a file with a `customer-supplied`_ encryption key::
+
+ >>> from gcloud import storage
+ >>> from gcloud.storage import Blob
+
+ >>> client = storage.Client(project='my-project')
+ >>> bucket = client.get_bucket('my-bucket')
+ >>> encryption_key = 'aa426195405adee2c8081bb9e7e74b19'
+ >>> blob = Blob('secure-data', bucket)
+ >>> with open('my-file', 'rb') as my_file:
+ >>> blob.upload_from_file(my_file,
+ ... encryption_key=encryption_key)
+
+ The ``encryption_key`` should be a str or bytes with a length of at
+ least 32.
+
+ .. _customer-supplied: https://cloud.google.com/storage/docs/\
+ encryption#customer-supplied
+
+ :type file_obj: file
+ :param file_obj: A file handle open for reading.
+
+ :type rewind: boolean
+ :param rewind: If True, seek to the beginning of the file handle before
+ writing the file to Cloud Storage.
+
+ :type size: int
+ :param size: The number of bytes to read from the file handle.
+ If not provided, we'll try to guess the size using
+ :func:`os.fstat`. (If the file handle is not from the
+ filesystem this won't be possible.)
+
+ :type encryption_key: str or bytes
+ :param encryption_key: Optional 32 byte encryption key for
+ customer-supplied encryption.
+
+ :type content_type: string or ``NoneType``
+ :param content_type: Optional type of content being uploaded.
+
+ :type num_retries: integer
+ :param num_retries: Number of upload retries. Defaults to 6.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+
+ :raises: :class:`ValueError` if size is not passed in and can not be
+ determined; :class:`gcloud.exceptions.GCloudError` if the
+ upload response returns an error status.
+ """
+ client=self._require_client(client)
+ # Use the private ``_connection`` rather than the public
+ # ``.connection``, since the public connection may be a batch. A
+ # batch wraps a client's connection, but does not store the `http`
+ # object. The rest (API_BASE_URL and build_api_url) are also defined
+ # on the Batch class, but we just use the wrapped connection since
+ # it has all three (http, API_BASE_URL and build_api_url).
+ connection=client._connection
+ content_type=(content_typeorself._properties.get('contentType')or
+ 'application/octet-stream')
+
+ # Rewind the file if desired.
+ ifrewind:
+ file_obj.seek(0,os.SEEK_SET)
+
+ # Get the basic stats about the file.
+ total_bytes=size
+ iftotal_bytesisNone:
+ ifhasattr(file_obj,'fileno'):
+ try:
+ total_bytes=os.fstat(file_obj.fileno()).st_size
+ except(OSError,UnsupportedOperation):
+ pass# Assuming fd is not an actual file (maybe socket).
+
+ headers={
+ 'Accept':'application/json',
+ 'Accept-Encoding':'gzip, deflate',
+ 'User-Agent':connection.USER_AGENT,
+ }
+
+ ifencryption_key:
+ _set_encryption_headers(encryption_key,headers)
+
+ upload=Upload(file_obj,content_type,total_bytes,
+ auto_transfer=False)
+
+ ifself.chunk_sizeisnotNone:
+ upload.chunksize=self.chunk_size
+
+ iftotal_bytesisNone:
+ upload.strategy=RESUMABLE_UPLOAD
+ eliftotal_bytesisNone:
+ raiseValueError('total bytes could not be determined. Please '
+ 'pass an explicit size, or supply a chunk size '
+ 'for a streaming transfer.')
+
+ url_builder=_UrlBuilder(bucket_name=self.bucket.name,
+ object_name=self.name)
+ upload_config=_UploadConfig()
+
+ # Temporary URL, until we know simple vs. resumable.
+ base_url=connection.API_BASE_URL+'/upload'
+ upload_url=connection.build_api_url(api_base_url=base_url,
+ path=self.bucket.path+'/o')
+
+ # Use apitools 'Upload' facility.
+ request=Request(upload_url,'POST',headers)
+
+ upload.configure_request(upload_config,request,url_builder)
+ query_params=url_builder.query_params
+ base_url=connection.API_BASE_URL+'/upload'
+ request.url=connection.build_api_url(api_base_url=base_url,
+ path=self.bucket.path+'/o',
+ query_params=query_params)
+ upload.initialize_upload(request,connection.http)
+
+ ifupload.strategy==RESUMABLE_UPLOAD:
+ http_response=upload.stream_file(use_chunks=True)
+ else:
+ http_response=make_api_request(connection.http,request,
+ retries=num_retries)
+
+ self._check_response_error(request,http_response)
+ response_content=http_response.content
+
+ ifnotisinstance(response_content,
+ six.string_types):# pragma: NO COVER Python3
+ response_content=response_content.decode('utf-8')
+ self._set_properties(json.loads(response_content))
+ # pylint: enable=too-many-locals
+
+
[docs]defupload_from_filename(self,filename,content_type=None,
+ encryption_key=None,client=None):
+ """Upload this blob's contents from the content of a named file.
+
+ The content type of the upload will either be
+ - The value passed in to the function (if any)
+ - The value stored on the current blob
+ - The value given by mimetypes.guess_type
+
+ .. note::
+ The effect of uploading to an existing blob depends on the
+ "versioning" and "lifecycle" policies defined on the blob's
+ bucket. In the absence of those policies, upload will
+ overwrite any existing contents.
+
+ See the `object versioning
+ <https://cloud.google.com/storage/docs/object-versioning>`_ and
+ `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
+ API documents for details.
+
+ :type filename: string
+ :param filename: The path to the file.
+
+ :type content_type: string or ``NoneType``
+ :param content_type: Optional type of content being uploaded.
+
+ :type encryption_key: str or bytes
+ :param encryption_key: Optional 32 byte encryption key for
+ customer-supplied encryption.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+ """
+ content_type=content_typeorself._properties.get('contentType')
+ ifcontent_typeisNone:
+ content_type,_=mimetypes.guess_type(filename)
+
+ withopen(filename,'rb')asfile_obj:
+ self.upload_from_file(file_obj,content_type=content_type,
+ encryption_key=encryption_key,client=client)
+
+
[docs]defupload_from_string(self,data,content_type='text/plain',
+ encryption_key=None,client=None):
+ """Upload contents of this blob from the provided string.
+
+ .. note::
+ The effect of uploading to an existing blob depends on the
+ "versioning" and "lifecycle" policies defined on the blob's
+ bucket. In the absence of those policies, upload will
+ overwrite any existing contents.
+
+ See the `object versioning
+ <https://cloud.google.com/storage/docs/object-versioning>`_ and
+ `lifecycle <https://cloud.google.com/storage/docs/lifecycle>`_
+ API documents for details.
+
+ :type data: bytes or text
+ :param data: The data to store in this blob. If the value is
+ text, it will be encoded as UTF-8.
+
+ :type content_type: string
+ :param content_type: Optional type of content being uploaded. Defaults
+ to ``'text/plain'``.
+
+ :type encryption_key: str or bytes
+ :param encryption_key: Optional 32 byte encryption key for
+ customer-supplied encryption.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+ """
+ ifisinstance(data,six.text_type):
+ data=data.encode('utf-8')
+ string_buffer=BytesIO()
+ string_buffer.write(data)
+ self.upload_from_file(file_obj=string_buffer,rewind=True,
+ size=len(data),content_type=content_type,
+ encryption_key=encryption_key,client=client)
+
+
[docs]defmake_public(self,client=None):
+ """Make this blob public giving all users read access.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the blob's bucket.
+ """
+ self.acl.all().grant_read()
+ self.acl.save(client=client)
+
+ cache_control=_scalar_property('cacheControl')
+ """HTTP 'Cache-Control' header for this object.
+
+ See: https://tools.ietf.org/html/rfc7234#section-5.2 and
+ https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ If the property is not set locally, returns ``None``.
+
+ :rtype: string or ``NoneType``
+ """
+
+ content_disposition=_scalar_property('contentDisposition')
+ """HTTP 'Content-Disposition' header for this object.
+
+ See: https://tools.ietf.org/html/rfc6266 and
+ https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ If the property is not set locally, returns ``None``.
+
+ :rtype: string or ``NoneType``
+ """
+
+ content_encoding=_scalar_property('contentEncoding')
+ """HTTP 'Content-Encoding' header for this object.
+
+ See: https://tools.ietf.org/html/rfc7231#section-3.1.2.2 and
+ https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ If the property is not set locally, returns ``None``.
+
+ :rtype: string or ``NoneType``
+ """
+
+ content_language=_scalar_property('contentLanguage')
+ """HTTP 'Content-Language' header for this object.
+
+ See: http://tools.ietf.org/html/bcp47 and
+ https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ If the property is not set locally, returns ``None``.
+
+ :rtype: string or ``NoneType``
+ """
+
+ content_type=_scalar_property('contentType')
+ """HTTP 'Content-Type' header for this object.
+
+ See: https://tools.ietf.org/html/rfc2616#section-14.17 and
+ https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ If the property is not set locally, returns ``None``.
+
+ :rtype: string or ``NoneType``
+ """
+
+ crc32c=_scalar_property('crc32c')
+ """CRC32C checksum for this object.
+
+ See: http://tools.ietf.org/html/rfc4960#appendix-B and
+ https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ If the property is not set locally, returns ``None``.
+
+ :rtype: string or ``NoneType``
+ """
+
+ @property
+ defcomponent_count(self):
+ """Number of underlying components that make up this object.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: integer or ``NoneType``
+ :returns: The component count (in case of a composed object) or
+ ``None`` if the property is not set locally. This property
+ will not be set on objects not created via ``compose``.
+ """
+ component_count=self._properties.get('componentCount')
+ ifcomponent_countisnotNone:
+ returnint(component_count)
+
+ @property
+ defetag(self):
+ """Retrieve the ETag for the object.
+
+ See: http://tools.ietf.org/html/rfc2616#section-3.11 and
+ https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: string or ``NoneType``
+ :returns: The blob etag or ``None`` if the property is not set locally.
+ """
+ returnself._properties.get('etag')
+
+ @property
+ defgeneration(self):
+ """Retrieve the generation for the object.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: integer or ``NoneType``
+ :returns: The generation of the blob or ``None`` if the property
+ is not set locally.
+ """
+ generation=self._properties.get('generation')
+ ifgenerationisnotNone:
+ returnint(generation)
+
+ @property
+ defid(self):
+ """Retrieve the ID for the object.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: string or ``NoneType``
+ :returns: The ID of the blob or ``None`` if the property is not
+ set locally.
+ """
+ returnself._properties.get('id')
+
+ md5_hash=_scalar_property('md5Hash')
+ """MD5 hash for this object.
+
+ See: http://tools.ietf.org/html/rfc4960#appendix-B and
+ https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ If the property is not set locally, returns ``None``.
+
+ :rtype: string or ``NoneType``
+ """
+
+ @property
+ defmedia_link(self):
+ """Retrieve the media download URI for the object.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: string or ``NoneType``
+ :returns: The media link for the blob or ``None`` if the property is
+ not set locally.
+ """
+ returnself._properties.get('mediaLink')
+
+ @property
+ defmetadata(self):
+ """Retrieve arbitrary/application specific metadata for the object.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: dict or ``NoneType``
+ :returns: The metadata associated with the blob or ``None`` if the
+ property is not set locally.
+ """
+ returncopy.deepcopy(self._properties.get('metadata'))
+
+ @metadata.setter
+ defmetadata(self,value):
+ """Update arbitrary/application specific metadata for the object.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :type value: dict or ``NoneType``
+ :param value: The blob metadata to set.
+ """
+ self._patch_property('metadata',value)
+
+ @property
+ defmetageneration(self):
+ """Retrieve the metageneration for the object.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: integer or ``NoneType``
+ :returns: The metageneration of the blob or ``None`` if the property
+ is not set locally.
+ """
+ metageneration=self._properties.get('metageneration')
+ ifmetagenerationisnotNone:
+ returnint(metageneration)
+
+ @property
+ defowner(self):
+ """Retrieve info about the owner of the object.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: dict or ``NoneType``
+ :returns: Mapping of owner's role/ID. If the property is not set
+ locally, returns ``None``.
+ """
+ returncopy.deepcopy(self._properties.get('owner'))
+
+ @property
+ defself_link(self):
+ """Retrieve the URI for the object.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: string or ``NoneType``
+ :returns: The self link for the blob or ``None`` if the property is
+ not set locally.
+ """
+ returnself._properties.get('selfLink')
+
+ @property
+ defsize(self):
+ """Size of the object, in bytes.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: integer or ``NoneType``
+ :returns: The size of the blob or ``None`` if the property
+ is not set locally.
+ """
+ size=self._properties.get('size')
+ ifsizeisnotNone:
+ returnint(size)
+
+ @property
+ defstorage_class(self):
+ """Retrieve the storage class for the object.
+
+ See: https://cloud.google.com/storage/docs/storage-classes
+ https://cloud.google.com/storage/docs/nearline-storage
+ https://cloud.google.com/storage/docs/durable-reduced-availability
+
+ :rtype: string or ``NoneType``
+ :returns: If set, one of "STANDARD", "NEARLINE", or
+ "DURABLE_REDUCED_AVAILABILITY", else ``None``.
+ """
+ returnself._properties.get('storageClass')
+
+ @property
+ deftime_deleted(self):
+ """Retrieve the timestamp at which the object was deleted.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: :class:`datetime.datetime` or ``NoneType``
+ :returns: Datetime object parsed from RFC3339 valid timestamp, or
+ ``None`` if the property is not set locally. If the blob has
+ not been deleted, this will never be set.
+ """
+ value=self._properties.get('timeDeleted')
+ ifvalueisnotNone:
+ return_rfc3339_to_datetime(value)
+
+ @property
+ defupdated(self):
+ """Retrieve the timestamp at which the object was updated.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/objects
+
+ :rtype: :class:`datetime.datetime` or ``NoneType``
+ :returns: Datetime object parsed from RFC3339 valid timestamp, or
+ ``None`` if the property is not set locally.
+ """
+ value=self._properties.get('updated')
+ ifvalueisnotNone:
+ return_rfc3339_to_datetime(value)
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with gcloud storage buckets."""
+
+importcopy
+
+importsix
+
+fromgcloud._helpersimport_rfc3339_to_datetime
+fromgcloud.exceptionsimportNotFound
+fromgcloud.iteratorimportIterator
+fromgcloud.storage._helpersimport_PropertyMixin
+fromgcloud.storage._helpersimport_scalar_property
+fromgcloud.storage.aclimportBucketACL
+fromgcloud.storage.aclimportDefaultObjectACL
+fromgcloud.storage.blobimportBlob
+
+
+class_BlobIterator(Iterator):
+ """An iterator listing blobs in a bucket
+
+ You shouldn't have to use this directly, but instead should use the
+ :class:`gcloud.storage.blob.Bucket.list_blobs` method.
+
+ :type bucket: :class:`gcloud.storage.bucket.Bucket`
+ :param bucket: The bucket from which to list blobs.
+
+ :type extra_params: dict or None
+ :param extra_params: Extra query string parameters for the API call.
+
+ :type client: :class:`gcloud.storage.client.Client`
+ :param client: Optional. The client to use for making connections.
+ Defaults to the bucket's client.
+ """
+ def__init__(self,bucket,extra_params=None,client=None):
+ ifclientisNone:
+ client=bucket.client
+ self.bucket=bucket
+ self.prefixes=set()
+ self._current_prefixes=None
+ super(_BlobIterator,self).__init__(
+ client=client,path=bucket.path+'/o',
+ extra_params=extra_params)
+
+ defget_items_from_response(self,response):
+ """Yield :class:`.storage.blob.Blob` items from response.
+
+ :type response: dict
+ :param response: The JSON API response for a page of blobs.
+ """
+ self._current_prefixes=tuple(response.get('prefixes',()))
+ self.prefixes.update(self._current_prefixes)
+ foriteminresponse.get('items',[]):
+ name=item.get('name')
+ blob=Blob(name,bucket=self.bucket)
+ blob._set_properties(item)
+ yieldblob
+
+
+
[docs]classBucket(_PropertyMixin):
+ """A class representing a Bucket on Cloud Storage.
+
+ :type client: :class:`gcloud.storage.client.Client`
+ :param client: A client which holds credentials and project configuration
+ for the bucket (which requires a project).
+
+ :type name: string
+ :param name: The name of the bucket.
+ """
+ _iterator_class=_BlobIterator
+
+ _MAX_OBJECTS_FOR_ITERATION=256
+ """Maximum number of existing objects allowed in iteration.
+
+ This is used in Bucket.delete() and Bucket.make_public().
+ """
+
+ _STORAGE_CLASSES=('STANDARD','NEARLINE','DURABLE_REDUCED_AVAILABILITY')
+
+ def__init__(self,client,name=None):
+ super(Bucket,self).__init__(name=name)
+ self._client=client
+ self._acl=BucketACL(self)
+ self._default_object_acl=DefaultObjectACL(self)
+
+ def__repr__(self):
+ return'<Bucket: %s>'%self.name
+
+ @property
+ defclient(self):
+ """The client bound to this bucket."""
+ returnself._client
+
+
[docs]defblob(self,blob_name,chunk_size=None):
+ """Factory constructor for blob object.
+
+ .. note::
+ This will not make an HTTP request; it simply instantiates
+ a blob object owned by this bucket.
+
+ :type blob_name: string
+ :param blob_name: The name of the blob to be instantiated.
+
+ :type chunk_size: integer
+ :param chunk_size: The size of a chunk of data whenever iterating
+ (1 MB). This must be a multiple of 256 KB per the
+ API specification.
+
+ :rtype: :class:`gcloud.storage.blob.Blob`
+ :returns: The blob object created.
+ """
+ returnBlob(name=blob_name,bucket=self,chunk_size=chunk_size)
+
+
[docs]defexists(self,client=None):
+ """Determines whether or not this bucket exists.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :rtype: boolean
+ :returns: True if the bucket exists in Cloud Storage.
+ """
+ client=self._require_client(client)
+ try:
+ # We only need the status code (200 or not) so we seek to
+ # minimize the returned payload.
+ query_params={'fields':'name'}
+ # We intentionally pass `_target_object=None` since fields=name
+ # would limit the local properties.
+ client.connection.api_request(method='GET',path=self.path,
+ query_params=query_params,
+ _target_object=None)
+ # NOTE: This will not fail immediately in a batch. However, when
+ # Batch.finish() is called, the resulting `NotFound` will be
+ # raised.
+ returnTrue
+ exceptNotFound:
+ returnFalse
+
+
[docs]defcreate(self,client=None):
+ """Creates current bucket.
+
+ If the bucket already exists, will raise
+ :class:`gcloud.exceptions.Conflict`.
+
+ This implements "storage.buckets.insert".
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+ """
+ client=self._require_client(client)
+ query_params={'project':client.project}
+ properties=dict(
+ (key,self._properties[key])forkeyinself._changes)
+ properties['name']=self.name
+ api_response=client.connection.api_request(
+ method='POST',path='/b',query_params=query_params,
+ data=properties,_target_object=self)
+ self._set_properties(api_response)
[docs]defpath_helper(bucket_name):
+ """Relative URL path for a bucket.
+
+ :type bucket_name: string
+ :param bucket_name: The bucket name in the path.
+
+ :rtype: string
+ :returns: The relative URL path for ``bucket_name``.
+ """
+ return'/b/'+bucket_name
+
+ @property
+ defpath(self):
+ """The URL path to this bucket."""
+ ifnotself.name:
+ raiseValueError('Cannot determine path without bucket name.')
+
+ returnself.path_helper(self.name)
+
+
[docs]defget_blob(self,blob_name,client=None):
+ """Get a blob object by name.
+
+ This will return None if the blob doesn't exist::
+
+ >>> from gcloud import storage
+ >>> client = storage.Client()
+ >>> bucket = client.get_bucket('my-bucket')
+ >>> print bucket.get_blob('/path/to/blob.txt')
+ <Blob: my-bucket, /path/to/blob.txt>
+ >>> print bucket.get_blob('/does-not-exist.txt')
+ None
+
+ :type blob_name: string
+ :param blob_name: The name of the blob to retrieve.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :rtype: :class:`gcloud.storage.blob.Blob` or None
+ :returns: The blob object if it exists, otherwise None.
+ """
+ client=self._require_client(client)
+ blob=Blob(bucket=self,name=blob_name)
+ try:
+ response=client.connection.api_request(
+ method='GET',path=blob.path,_target_object=blob)
+ # NOTE: We assume response.get('name') matches `blob_name`.
+ blob._set_properties(response)
+ # NOTE: This will not fail immediately in a batch. However, when
+ # Batch.finish() is called, the resulting `NotFound` will be
+ # raised.
+ returnblob
+ exceptNotFound:
+ returnNone
+
+
[docs]deflist_blobs(self,max_results=None,page_token=None,prefix=None,
+ delimiter=None,versions=None,
+ projection='noAcl',fields=None,client=None):
+ """Return an iterator used to find blobs in the bucket.
+
+ :type max_results: integer or ``NoneType``
+ :param max_results: maximum number of blobs to return.
+
+ :type page_token: string
+ :param page_token: opaque marker for the next "page" of blobs. If not
+ passed, will return the first page of blobs.
+
+ :type prefix: string or ``NoneType``
+ :param prefix: optional prefix used to filter blobs.
+
+ :type delimiter: string or ``NoneType``
+ :param delimiter: optional delimter, used with ``prefix`` to
+ emulate hierarchy.
+
+ :type versions: boolean or ``NoneType``
+ :param versions: whether object versions should be returned as
+ separate blobs.
+
+ :type projection: string or ``NoneType``
+ :param projection: If used, must be 'full' or 'noAcl'. Defaults to
+ 'noAcl'. Specifies the set of properties to return.
+
+ :type fields: string or ``NoneType``
+ :param fields: Selector specifying which fields to include in a
+ partial response. Must be a list of fields. For example
+ to get a partial response with just the next page token
+ and the language of each blob returned:
+ 'items/contentLanguage,nextPageToken'
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :rtype: :class:`_BlobIterator`.
+ :returns: An iterator of blobs.
+ """
+ extra_params={}
+
+ ifmax_resultsisnotNone:
+ extra_params['maxResults']=max_results
+
+ ifprefixisnotNone:
+ extra_params['prefix']=prefix
+
+ ifdelimiterisnotNone:
+ extra_params['delimiter']=delimiter
+
+ ifversionsisnotNone:
+ extra_params['versions']=versions
+
+ extra_params['projection']=projection
+
+ iffieldsisnotNone:
+ extra_params['fields']=fields
+
+ result=self._iterator_class(
+ self,extra_params=extra_params,client=client)
+ # Page token must be handled specially since the base `Iterator`
+ # class has it as a reserved property.
+ ifpage_tokenisnotNone:
+ result.next_page_token=page_token
+ returnresult
+
+
[docs]defdelete(self,force=False,client=None):
+ """Delete this bucket.
+
+ The bucket **must** be empty in order to submit a delete request. If
+ ``force=True`` is passed, this will first attempt to delete all the
+ objects / blobs in the bucket (i.e. try to empty the bucket).
+
+ If the bucket doesn't exist, this will raise
+ :class:`gcloud.exceptions.NotFound`. If the bucket is not empty
+ (and ``force=False``), will raise :class:`gcloud.exceptions.Conflict`.
+
+ If ``force=True`` and the bucket contains more than 256 objects / blobs
+ this will cowardly refuse to delete the objects (or the bucket). This
+ is to prevent accidental bucket deletion and to prevent extremely long
+ runtime of this method.
+
+ :type force: boolean
+ :param force: If True, empties the bucket's objects then deletes it.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket
+ contains more than 256 objects / blobs.
+ """
+ client=self._require_client(client)
+ ifforce:
+ blobs=list(self.list_blobs(
+ max_results=self._MAX_OBJECTS_FOR_ITERATION+1,
+ client=client))
+ iflen(blobs)>self._MAX_OBJECTS_FOR_ITERATION:
+ message=(
+ 'Refusing to delete bucket with more than '
+ '%d objects. If you actually want to delete '
+ 'this bucket, please delete the objects '
+ 'yourself before calling Bucket.delete().'
+ )%(self._MAX_OBJECTS_FOR_ITERATION,)
+ raiseValueError(message)
+
+ # Ignore 404 errors on delete.
+ self.delete_blobs(blobs,on_error=lambdablob:None,
+ client=client)
+
+ # We intentionally pass `_target_object=None` since a DELETE
+ # request has no response value (whether in a standard request or
+ # in a batch request).
+ client.connection.api_request(method='DELETE',path=self.path,
+ _target_object=None)
+
+
[docs]defdelete_blob(self,blob_name,client=None):
+ """Deletes a blob from the current bucket.
+
+ If the blob isn't found (backend 404), raises a
+ :class:`gcloud.exceptions.NotFound`.
+
+ For example::
+
+ >>> from gcloud.exceptions import NotFound
+ >>> from gcloud import storage
+ >>> client = storage.Client()
+ >>> bucket = client.get_bucket('my-bucket')
+ >>> print bucket.list_blobs()
+ [<Blob: my-bucket, my-file.txt>]
+ >>> bucket.delete_blob('my-file.txt')
+ >>> try:
+ ... bucket.delete_blob('doesnt-exist')
+ ... except NotFound:
+ ... pass
+
+ :type blob_name: string
+ :param blob_name: A blob name to delete.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :raises: :class:`gcloud.exceptions.NotFound` (to suppress
+ the exception, call ``delete_blobs``, passing a no-op
+ ``on_error`` callback, e.g.::
+
+ >>> bucket.delete_blobs([blob], on_error=lambda blob: None)
+ """
+ client=self._require_client(client)
+ blob_path=Blob.path_helper(self.path,blob_name)
+ # We intentionally pass `_target_object=None` since a DELETE
+ # request has no response value (whether in a standard request or
+ # in a batch request).
+ client.connection.api_request(method='DELETE',path=blob_path,
+ _target_object=None)
+
+
[docs]defdelete_blobs(self,blobs,on_error=None,client=None):
+ """Deletes a list of blobs from the current bucket.
+
+ Uses :func:`Bucket.delete_blob` to delete each individual blob.
+
+ :type blobs: list of string or :class:`gcloud.storage.blob.Blob`
+ :param blobs: A list of blob names or Blob objects to delete.
+
+ :type on_error: a callable taking (blob)
+ :param on_error: If not ``None``, called once for each blob raising
+ :class:`gcloud.exceptions.NotFound`;
+ otherwise, the exception is propagated.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :raises: :class:`gcloud.exceptions.NotFound` (if
+ `on_error` is not passed).
+ """
+ forblobinblobs:
+ try:
+ blob_name=blob
+ ifnotisinstance(blob_name,six.string_types):
+ blob_name=blob.name
+ self.delete_blob(blob_name,client=client)
+ exceptNotFound:
+ ifon_errorisnotNone:
+ on_error(blob)
+ else:
+ raise
+
+
[docs]defcopy_blob(self,blob,destination_bucket,new_name=None,
+ client=None):
+ """Copy the given blob to the given bucket, optionally with a new name.
+
+ :type blob: :class:`gcloud.storage.blob.Blob`
+ :param blob: The blob to be copied.
+
+ :type destination_bucket: :class:`gcloud.storage.bucket.Bucket`
+ :param destination_bucket: The bucket into which the blob should be
+ copied.
+
+ :type new_name: string
+ :param new_name: (optional) the new name for the copied file.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :rtype: :class:`gcloud.storage.blob.Blob`
+ :returns: The new Blob.
+ """
+ client=self._require_client(client)
+ ifnew_nameisNone:
+ new_name=blob.name
+ new_blob=Blob(bucket=destination_bucket,name=new_name)
+ api_path=blob.path+'/copyTo'+new_blob.path
+ copy_result=client.connection.api_request(
+ method='POST',path=api_path,_target_object=new_blob)
+ new_blob._set_properties(copy_result)
+ returnnew_blob
+
+
[docs]defrename_blob(self,blob,new_name,client=None):
+ """Rename the given blob using copy and delete operations.
+
+ Effectively, copies blob to the same bucket with a new name, then
+ deletes the blob.
+
+ .. warning::
+
+ This method will first duplicate the data and then delete the
+ old blob. This means that with very large objects renaming
+ could be a very (temporarily) costly or a very slow operation.
+
+ :type blob: :class:`gcloud.storage.blob.Blob`
+ :param blob: The blob to be renamed.
+
+ :type new_name: string
+ :param new_name: The new name for this blob.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+
+ :rtype: :class:`Blob`
+ :returns: The newly-renamed blob.
+ """
+ new_blob=self.copy_blob(blob,self,new_name,client=client)
+ blob.delete(client=client)
+ returnnew_blob
+
+ @property
+ defcors(self):
+ """Retrieve CORS policies configured for this bucket.
+
+ See: http://www.w3.org/TR/cors/ and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: list of dictionaries
+ :returns: A sequence of mappings describing each CORS policy.
+ """
+ return[copy.deepcopy(policy)
+ forpolicyinself._properties.get('cors',())]
+
+ @cors.setter
+ defcors(self,entries):
+ """Set CORS policies configured for this bucket.
+
+ See: http://www.w3.org/TR/cors/ and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :type entries: list of dictionaries
+ :param entries: A sequence of mappings describing each CORS policy.
+ """
+ self._patch_property('cors',entries)
+
+ @property
+ defetag(self):
+ """Retrieve the ETag for the bucket.
+
+ See: http://tools.ietf.org/html/rfc2616#section-3.11 and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: string or ``NoneType``
+ :returns: The bucket etag or ``None`` if the property is not
+ set locally.
+ """
+ returnself._properties.get('etag')
+
+ @property
+ defid(self):
+ """Retrieve the ID for the bucket.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: string or ``NoneType``
+ :returns: The ID of the bucket or ``None`` if the property is not
+ set locally.
+ """
+ returnself._properties.get('id')
+
+ @property
+ deflifecycle_rules(self):
+ """Lifecycle rules configured for this bucket.
+
+ See: https://cloud.google.com/storage/docs/lifecycle and
+ https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: list(dict)
+ :returns: A sequence of mappings describing each lifecycle rule.
+ """
+ info=self._properties.get('lifecycle',{})
+ return[copy.deepcopy(rule)forruleininfo.get('rule',())]
+
+ @lifecycle_rules.setter
+ deflifecycle_rules(self,rules):
+ self._patch_property('lifecycle',{'rule':rules})
+
+ location=_scalar_property('location')
+ """Retrieve location configured for this bucket.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/buckets and
+ https://cloud.google.com/storage/docs/concepts-techniques#specifyinglocations
+
+ If the property is not set locally, returns ``None``.
+
+ :rtype: string or ``NoneType``
+ """
+
+
[docs]defget_logging(self):
+ """Return info about access logging for this bucket.
+
+ See: https://cloud.google.com/storage/docs/accesslogs#status
+
+ :rtype: dict or None
+ :returns: a dict w/ keys, ``logBucket`` and ``logObjectPrefix``
+ (if logging is enabled), or None (if not).
+ """
+ info=self._properties.get('logging')
+ returncopy.deepcopy(info)
+
+
[docs]defenable_logging(self,bucket_name,object_prefix=''):
+ """Enable access logging for this bucket.
+
+ See: https://cloud.google.com/storage/docs/accesslogs#delivery
+
+ :type bucket_name: string
+ :param bucket_name: name of bucket in which to store access logs
+
+ :type object_prefix: string
+ :param object_prefix: prefix for access log filenames
+ """
+ info={'logBucket':bucket_name,'logObjectPrefix':object_prefix}
+ self._patch_property('logging',info)
+
+
[docs]defdisable_logging(self):
+ """Disable access logging for this bucket.
+
+ See: https://cloud.google.com/storage/docs/accesslogs#disabling
+ """
+ self._patch_property('logging',None)
+
+ @property
+ defmetageneration(self):
+ """Retrieve the metageneration for the bucket.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: integer or ``NoneType``
+ :returns: The metageneration of the bucket or ``None`` if the property
+ is not set locally.
+ """
+ metageneration=self._properties.get('metageneration')
+ ifmetagenerationisnotNone:
+ returnint(metageneration)
+
+ @property
+ defowner(self):
+ """Retrieve info about the owner of the bucket.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: dict or ``NoneType``
+ :returns: Mapping of owner's role/ID. If the property is not set
+ locally, returns ``None``.
+ """
+ returncopy.deepcopy(self._properties.get('owner'))
+
+ @property
+ defproject_number(self):
+ """Retrieve the number of the project to which the bucket is assigned.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: integer or ``NoneType``
+ :returns: The project number that owns the bucket or ``None`` if the
+ property is not set locally.
+ """
+ project_number=self._properties.get('projectNumber')
+ ifproject_numberisnotNone:
+ returnint(project_number)
+
+ @property
+ defself_link(self):
+ """Retrieve the URI for the bucket.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: string or ``NoneType``
+ :returns: The self link for the bucket or ``None`` if the property is
+ not set locally.
+ """
+ returnself._properties.get('selfLink')
+
+ @property
+ defstorage_class(self):
+ """Retrieve the storage class for the bucket.
+
+ See: https://cloud.google.com/storage/docs/storage-classes
+ https://cloud.google.com/storage/docs/nearline-storage
+ https://cloud.google.com/storage/docs/durable-reduced-availability
+
+ :rtype: string or ``NoneType``
+ :returns: If set, one of "STANDARD", "NEARLINE", or
+ "DURABLE_REDUCED_AVAILABILITY", else ``None``.
+ """
+ returnself._properties.get('storageClass')
+
+ @storage_class.setter
+ defstorage_class(self,value):
+ """Set the storage class for the bucket.
+
+ See: https://cloud.google.com/storage/docs/storage-classes
+ https://cloud.google.com/storage/docs/nearline-storage
+ https://cloud.google.com/storage/docs/durable-reduced-availability
+
+ :type value: string
+ :param value: one of "STANDARD", "NEARLINE", or
+ "DURABLE_REDUCED_AVAILABILITY"
+ """
+ ifvaluenotinself._STORAGE_CLASSES:
+ raiseValueError('Invalid storage class: %s'%(value,))
+ self._patch_property('storageClass',value)
+
+ @property
+ deftime_created(self):
+ """Retrieve the timestamp at which the bucket was created.
+
+ See: https://cloud.google.com/storage/docs/json_api/v1/buckets
+
+ :rtype: :class:`datetime.datetime` or ``NoneType``
+ :returns: Datetime object parsed from RFC3339 valid timestamp, or
+ ``None`` if the property is not set locally.
+ """
+ value=self._properties.get('timeCreated')
+ ifvalueisnotNone:
+ return_rfc3339_to_datetime(value)
+
+ @property
+ defversioning_enabled(self):
+ """Is versioning enabled for this bucket?
+
+ See: https://cloud.google.com/storage/docs/object-versioning for
+ details.
+
+ :rtype: boolean
+ :returns: True if enabled, else False.
+ """
+ versioning=self._properties.get('versioning',{})
+ returnversioning.get('enabled',False)
+
+ @versioning_enabled.setter
+ defversioning_enabled(self,value):
+ """Enable versioning for this bucket.
+
+ See: https://cloud.google.com/storage/docs/object-versioning for
+ details.
+
+ :type value: convertible to boolean
+ :param value: should versioning be anabled for the bucket?
+ """
+ self._patch_property('versioning',{'enabled':bool(value)})
+
+
[docs]defconfigure_website(self,main_page_suffix=None,not_found_page=None):
+ """Configure website-related properties.
+
+ See: https://developers.google.com/storage/docs/website-configuration
+
+ .. note::
+ This (apparently) only works
+ if your bucket name is a domain name
+ (and to do that, you need to get approved somehow...).
+
+ If you want this bucket to host a website, just provide the name
+ of an index page and a page to use when a blob isn't found::
+
+ >>> from gcloud import storage
+ >>> client = storage.Client()
+ >>> bucket = client.get_bucket(bucket_name)
+ >>> bucket.configure_website('index.html', '404.html')
+
+ You probably should also make the whole bucket public::
+
+ >>> bucket.make_public(recursive=True, future=True)
+
+ This says: "Make the bucket public, and all the stuff already in
+ the bucket, and anything else I add to the bucket. Just make it
+ all public."
+
+ :type main_page_suffix: string
+ :param main_page_suffix: The page to use as the main page
+ of a directory.
+ Typically something like index.html.
+
+ :type not_found_page: string
+ :param not_found_page: The file to use when a page isn't found.
+ """
+ data={
+ 'mainPageSuffix':main_page_suffix,
+ 'notFoundPage':not_found_page,
+ }
+ self._patch_property('website',data)
+
+
[docs]defdisable_website(self):
+ """Disable the website configuration for this bucket.
+
+ This is really just a shortcut for setting the website-related
+ attributes to ``None``.
+ """
+ returnself.configure_website(None,None)
+
+
[docs]defmake_public(self,recursive=False,future=False,client=None):
+ """Make a bucket public.
+
+ If ``recursive=True`` and the bucket contains more than 256
+ objects / blobs this will cowardly refuse to make the objects public.
+ This is to prevent extremely long runtime of this method.
+
+ :type recursive: boolean
+ :param recursive: If True, this will make all blobs inside the bucket
+ public as well.
+
+ :type future: boolean
+ :param future: If True, this will make all objects created in the
+ future public as well.
+
+ :type client: :class:`gcloud.storage.client.Client` or ``NoneType``
+ :param client: Optional. The client to use. If not passed, falls back
+ to the ``client`` stored on the current bucket.
+ """
+ self.acl.all().grant_read()
+ self.acl.save(client=client)
+
+ iffuture:
+ doa=self.default_object_acl
+ ifnotdoa.loaded:
+ doa.reload(client=client)
+ doa.all().grant_read()
+ doa.save(client=client)
+
+ ifrecursive:
+ blobs=list(self.list_blobs(
+ projection='full',
+ max_results=self._MAX_OBJECTS_FOR_ITERATION+1,
+ client=client))
+ iflen(blobs)>self._MAX_OBJECTS_FOR_ITERATION:
+ message=(
+ 'Refusing to make public recursively with more than '
+ '%d objects. If you actually want to make every object '
+ 'in this bucket public, please do it on the objects '
+ 'yourself.'
+ )%(self._MAX_OBJECTS_FOR_ITERATION,)
+ raiseValueError(message)
+
+ forblobinblobs:
+ blob.acl.all().grant_read()
+ blob.acl.save(client=client)
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the Google Cloud Storage API."""
+
+
+fromgcloud._helpersimport_LocalStack
+fromgcloud.clientimportJSONClient
+fromgcloud.exceptionsimportNotFound
+fromgcloud.iteratorimportIterator
+fromgcloud.storage.batchimportBatch
+fromgcloud.storage.bucketimportBucket
+fromgcloud.storage.connectionimportConnection
+
+
+
[docs]classClient(JSONClient):
+ """Client to bundle configuration needed for API requests.
+
+ :type project: string
+ :param project: the project which the client acts on behalf of. Will be
+ passed when creating a topic. If not passed,
+ falls back to the default inferred from the environment.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials` or
+ :class:`NoneType`
+ :param credentials: The OAuth2 Credentials to use for the connection
+ owned by this client. If not passed (and if no ``http``
+ object is passed), falls back to the default inferred
+ from the environment.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: An optional HTTP object to make requests. If not passed, an
+ ``http`` object is created that is bound to the
+ ``credentials`` for the current object.
+ """
+
+ _connection_class=Connection
+
+ def__init__(self,project=None,credentials=None,http=None):
+ self._connection=None
+ super(Client,self).__init__(project=project,credentials=credentials,
+ http=http)
+ self._batch_stack=_LocalStack()
+
+ @property
+ defconnection(self):
+ """Get connection or batch on the client.
+
+ :rtype: :class:`gcloud.storage.connection.Connection`
+ :returns: The connection set on the client, or the batch
+ if one is set.
+ """
+ ifself.current_batchisnotNone:
+ returnself.current_batch
+ else:
+ returnself._connection
+
+ @connection.setter
+ defconnection(self,value):
+ """Set connection on the client.
+
+ Intended to be used by constructor (since the base class calls)
+ self.connection = connection
+ Will raise if the connection is set more than once.
+
+ :type value: :class:`gcloud.storage.connection.Connection`
+ :param value: The connection set on the client.
+
+ :raises: :class:`ValueError` if connection has already been set.
+ """
+ ifself._connectionisnotNone:
+ raiseValueError('Connection already set on client')
+ self._connection=value
+
+ def_push_batch(self,batch):
+ """Push a batch onto our stack.
+
+ "Protected", intended for use by batch context mgrs.
+
+ :type batch: :class:`gcloud.storage.batch.Batch`
+ :param batch: newly-active batch
+ """
+ self._batch_stack.push(batch)
+
+ def_pop_batch(self):
+ """Pop a batch from our stack.
+
+ "Protected", intended for use by batch context mgrs.
+
+ :raises: IndexError if the stack is empty.
+ :rtype: :class:`gcloud.storage.batch.Batch`
+ :returns: the top-most batch/transaction, after removing it.
+ """
+ returnself._batch_stack.pop()
+
+ @property
+ defcurrent_batch(self):
+ """Currently-active batch.
+
+ :rtype: :class:`gcloud.storage.batch.Batch` or ``NoneType`` (if
+ no batch is active).
+ :returns: The batch at the top of the batch stack.
+ """
+ returnself._batch_stack.top
+
+
[docs]defbucket(self,bucket_name):
+ """Factory constructor for bucket object.
+
+ .. note::
+ This will not make an HTTP request; it simply instantiates
+ a bucket object owned by this client.
+
+ :type bucket_name: string
+ :param bucket_name: The name of the bucket to be instantiated.
+
+ :rtype: :class:`gcloud.storage.bucket.Bucket`
+ :returns: The bucket object created.
+ """
+ returnBucket(client=self,name=bucket_name)
+
+
[docs]defbatch(self):
+ """Factory constructor for batch object.
+
+ .. note::
+ This will not make an HTTP request; it simply instantiates
+ a batch object owned by this client.
+
+ :rtype: :class:`gcloud.storage.batch.Batch`
+ :returns: The batch object created.
+ """
+ returnBatch(client=self)
+
+
[docs]defget_bucket(self,bucket_name):
+ """Get a bucket by name.
+
+ If the bucket isn't found, this will raise a
+ :class:`gcloud.storage.exceptions.NotFound`.
+
+ For example::
+
+ >>> try:
+ >>> bucket = client.get_bucket('my-bucket')
+ >>> except gcloud.exceptions.NotFound:
+ >>> print 'Sorry, that bucket does not exist!'
+
+ This implements "storage.buckets.get".
+
+ :type bucket_name: string
+ :param bucket_name: The name of the bucket to get.
+
+ :rtype: :class:`gcloud.storage.bucket.Bucket`
+ :returns: The bucket matching the name provided.
+ :raises: :class:`gcloud.exceptions.NotFound`
+ """
+ bucket=Bucket(self,name=bucket_name)
+ bucket.reload(client=self)
+ returnbucket
+
+
[docs]deflookup_bucket(self,bucket_name):
+ """Get a bucket by name, returning None if not found.
+
+ You can use this if you would rather check for a None value
+ than catching an exception::
+
+ >>> bucket = client.lookup_bucket('doesnt-exist')
+ >>> print bucket
+ None
+ >>> bucket = client.lookup_bucket('my-bucket')
+ >>> print bucket
+ <Bucket: my-bucket>
+
+ :type bucket_name: string
+ :param bucket_name: The name of the bucket to get.
+
+ :rtype: :class:`gcloud.storage.bucket.Bucket`
+ :returns: The bucket matching the name provided or None if not found.
+ """
+ try:
+ returnself.get_bucket(bucket_name)
+ exceptNotFound:
+ returnNone
+
+
[docs]defcreate_bucket(self,bucket_name):
+ """Create a new bucket.
+
+ For example::
+
+ >>> bucket = client.create_bucket('my-bucket')
+ >>> print bucket
+ <Bucket: my-bucket>
+
+ This implements "storage.buckets.insert".
+
+ If the bucket already exists, will raise
+ :class:`gcloud.exceptions.Conflict`.
+
+ :type bucket_name: string
+ :param bucket_name: The bucket name to create.
+
+ :rtype: :class:`gcloud.storage.bucket.Bucket`
+ :returns: The newly created bucket.
+ """
+ bucket=Bucket(self,name=bucket_name)
+ bucket.create(client=self)
+ returnbucket
+
+
[docs]deflist_buckets(self,max_results=None,page_token=None,prefix=None,
+ projection='noAcl',fields=None):
+ """Get all buckets in the project associated to the client.
+
+ This will not populate the list of blobs available in each
+ bucket.
+
+ >>> for bucket in client.list_buckets():
+ >>> print bucket
+
+ This implements "storage.buckets.list".
+
+ :type max_results: integer or ``NoneType``
+ :param max_results: Optional. Maximum number of buckets to return.
+
+ :type page_token: string or ``NoneType``
+ :param page_token: Optional. Opaque marker for the next "page" of
+ buckets. If not passed, will return the first page
+ of buckets.
+
+ :type prefix: string or ``NoneType``
+ :param prefix: Optional. Filter results to buckets whose names begin
+ with this prefix.
+
+ :type projection: string or ``NoneType``
+ :param projection: If used, must be 'full' or 'noAcl'. Defaults to
+ 'noAcl'. Specifies the set of properties to return.
+
+ :type fields: string or ``NoneType``
+ :param fields: Selector specifying which fields to include in a
+ partial response. Must be a list of fields. For example
+ to get a partial response with just the next page token
+ and the language of each bucket returned:
+ 'items/id,nextPageToken'
+
+ :rtype: iterable of :class:`gcloud.storage.bucket.Bucket` objects.
+ :returns: All buckets belonging to this project.
+ """
+ extra_params={'project':self.project}
+
+ ifmax_resultsisnotNone:
+ extra_params['maxResults']=max_results
+
+ ifprefixisnotNone:
+ extra_params['prefix']=prefix
+
+ extra_params['projection']=projection
+
+ iffieldsisnotNone:
+ extra_params['fields']=fields
+
+ result=_BucketIterator(client=self,
+ extra_params=extra_params)
+ # Page token must be handled specially since the base `Iterator`
+ # class has it as a reserved property.
+ ifpage_tokenisnotNone:
+ result.next_page_token=page_token
+ returnresult
+
+
+class_BucketIterator(Iterator):
+ """An iterator listing all buckets.
+
+ You shouldn't have to use this directly, but instead should use the
+ helper methods on :class:`gcloud.storage.connection.Connection`
+ objects.
+
+ :type client: :class:`gcloud.storage.client.Client`
+ :param client: The client to use for making connections.
+
+ :type extra_params: dict or ``NoneType``
+ :param extra_params: Extra query string parameters for the API call.
+ """
+
+ def__init__(self,client,extra_params=None):
+ super(_BucketIterator,self).__init__(client=client,path='/b',
+ extra_params=extra_params)
+
+ defget_items_from_response(self,response):
+ """Factory method which yields :class:`.Bucket` items from a response.
+
+ :type response: dict
+ :param response: The JSON API response for a page of buckets.
+ """
+ foriteminresponse.get('items',[]):
+ name=item.get('name')
+ bucket=Bucket(self.client,name)
+ bucket._set_properties(item)
+ yieldbucket
+
+# Copyright 2014 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with gcloud storage connections."""
+
+fromgcloudimportconnectionasbase_connection
+
+
+
[docs]classConnection(base_connection.JSONConnection):
+ """A connection to Google Cloud Storage via the JSON REST API.
+
+ :type credentials: :class:`oauth2client.client.OAuth2Credentials`
+ :param credentials: (Optional) The OAuth2 Credentials to use for this
+ connection.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: (Optional) HTTP object to make requests.
+ """
+
+ API_BASE_URL=base_connection.API_BASE_URL
+ """The base of the API call URL."""
+
+ API_VERSION='v1'
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE='{api_base_url}/storage/{api_version}{path}'
+ """A template for the URL of a particular API call."""
+
+ SCOPE=('https://www.googleapis.com/auth/devstorage.full_control',
+ 'https://www.googleapis.com/auth/devstorage.read_only',
+ 'https://www.googleapis.com/auth/devstorage.read_write')
+ """The scopes required for authenticating as a Cloud Storage consumer."""
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Client for interacting with the Google Cloud Translate API."""
+
+
+importhttplib2
+importsix
+
+fromgcloud._helpersimport_to_bytes
+fromgcloud.translate.connectionimportConnection
+
+
+ENGLISH_ISO_639='en'
+"""ISO 639-1 language code for English."""
+
+
+
[docs]classClient(object):
+ """Client to bundle configuration needed for API requests.
+
+ :type api_key: str
+ :param api_key: The key used to send with requests as a query
+ parameter.
+
+ :type http: :class:`httplib2.Http` or class that defines ``request()``.
+ :param http: (Optional) HTTP object to make requests. If not
+ passed, an :class:`httplib.Http` object is created.
+
+ :type target_language: str
+ :param target_language: (Optional) The target language used for
+ translations and language names. (Defaults to
+ :data:`ENGLISH_ISO_639`.)
+ """
+
+ def__init__(self,api_key,http=None,target_language=ENGLISH_ISO_639):
+ self.api_key=api_key
+ ifhttpisNone:
+ http=httplib2.Http()
+ self.connection=Connection(http=http)
+ self.target_language=target_language
+
+
[docs]defget_languages(self,target_language=None):
+ """Get list of supported languages for translation.
+
+ Response
+
+ See: https://cloud.google.com/translate/v2/\
+ discovering-supported-languages-with-rest
+
+ :type target_language: str
+ :param target_language: (Optional) The language used to localize
+ returned language names. Defaults to the
+ target language on the current client.
+
+ :rtype: list
+ :returns: List of dictionaries. Each dictionary contains a supported
+ ISO 639-1 language code (using the dictionary key
+ ``language``). If ``target_language`` is passed, each
+ dictionary will also contain the name of each supported
+ language (localized to the target language).
+ """
+ query_params={'key':self.api_key}
+ iftarget_languageisNone:
+ target_language=self.target_language
+ iftarget_languageisnotNone:
+ query_params['target']=target_language
+ response=self.connection.api_request(
+ method='GET',path='/languages',query_params=query_params)
+ returnresponse.get('data',{}).get('languages',())
+
+
[docs]defdetect_language(self,values):
+ """Detect the language of a string or list of strings.
+
+ See: https://cloud.google.com/translate/v2/\
+ detecting-language-with-rest
+
+ :type values: str or list
+ :param values: String or list of strings that will have
+ language detected.
+
+ :rtype: str or list
+ :returns: A list of dictionaries for each queried value. Each
+ dictionary typically contains three keys
+
+ * ``confidence``: The confidence in language detection, a
+ float between 0 and 1.
+ * ``input``: The corresponding input value.
+ * ``language``: The detected language (as an ISO 639-1
+ language code).
+
+ though the key ``confidence`` may not always be present.
+
+ If only a single value is passed, then only a single
+ dictionary will be returned.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the number of
+ detections is not equal to the number of values.
+ :class:`ValueError <exceptions.ValueError>` if a value
+ produces a list of detections with 0 or multiple results
+ in it.
+ """
+ single_value=False
+ ifisinstance(values,six.string_types):
+ single_value=True
+ values=[values]
+
+ query_params=[('key',self.api_key)]
+ query_params.extend(('q',_to_bytes(value,'utf-8'))
+ forvalueinvalues)
+ response=self.connection.api_request(
+ method='GET',path='/detect',query_params=query_params)
+ detections=response.get('data',{}).get('detections',())
+
+ iflen(values)!=len(detections):
+ raiseValueError('Expected same number of values and detections',
+ values,detections)
+
+ forindex,valueinenumerate(values):
+ # Empirically, even clearly ambiguous text like "no" only returns
+ # a single detection, so we replace the list of detections with
+ # the single detection contained.
+ iflen(detections[index])==1:
+ detections[index]=detections[index][0]
+ else:
+ message=('Expected a single detection per value, API '
+ 'returned %d')%(len(detections[index]),)
+ raiseValueError(message,value,detections[index])
+
+ detections[index]['input']=value
+ # The ``isReliable`` field is deprecated.
+ detections[index].pop('isReliable',None)
+
+ ifsingle_value:
+ returndetections[0]
+ else:
+ returndetections
+
+
[docs]deftranslate(self,values,target_language=None,format_=None,
+ source_language=None,customization_ids=()):
+ """Translate a string or list of strings.
+
+ See: https://cloud.google.com/translate/v2/\
+ translating-text-with-rest
+
+ :type values: str or list
+ :param values: String or list of strings to translate.
+
+ :type target_language: str
+ :param target_language: The language to translate results into. This
+ is required by the API and defaults to
+ the target language of the current instance.
+
+ :type format_: str
+ :param format_: (Optional) One of ``text`` or ``html``, to specify
+ if the input text is plain text or HTML.
+
+ :type source_language: str
+ :param source_language: (Optional) The language of the text to
+ be translated.
+
+ :type customization_ids: str or list
+ :param customization_ids: (Optional) ID or list of customization IDs
+ for translation. Sets the ``cid`` parameter
+ in the query.
+
+ :rtype: str or list list
+ :returns: A list of dictionaries for each queried value. Each
+ dictionary typically contains three keys (though not
+ all will be present in all cases)
+
+ * ``detectedSourceLanguage``: The detected language (as an
+ ISO 639-1 language code) of the text.
+ * ``translatedText``: The translation of the text into the
+ target language.
+ * ``input``: The corresponding input value.
+
+ If only a single value is passed, then only a single
+ dictionary will be returned.
+ :raises: :class:`ValueError <exceptions.ValueError>` if the number of
+ values and translations differ.
+ """
+ single_value=False
+ ifisinstance(values,six.string_types):
+ single_value=True
+ values=[values]
+
+ iftarget_languageisNone:
+ target_language=self.target_language
+ ifisinstance(customization_ids,six.string_types):
+ customization_ids=[customization_ids]
+
+ query_params=[('key',self.api_key),('target',target_language)]
+ query_params.extend(('q',_to_bytes(value,'utf-8'))
+ forvalueinvalues)
+ query_params.extend(('cid',cid)forcidincustomization_ids)
+ ifformat_isnotNone:
+ query_params.append(('format',format_))
+ ifsource_languageisnotNone:
+ query_params.append(('source',source_language))
+
+ response=self.connection.api_request(
+ method='GET',path='',query_params=query_params)
+
+ translations=response.get('data',{}).get('translations',())
+ iflen(values)!=len(translations):
+ raiseValueError('Expected iterations to have same length',
+ values,translations)
+ forvalue,translationinsix.moves.zip(values,translations):
+ translation['input']=value
+
+ ifsingle_value:
+ returntranslations[0]
+ else:
+ returntranslations
+# Copyright 2016 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create / interact with Google Cloud Translate connections."""
+
+fromgcloudimportconnectionasbase_connection
+
+
+
[docs]classConnection(base_connection.JSONConnection):
+ """A connection to Google Cloud Translate via the JSON REST API."""
+
+ API_BASE_URL='https://www.googleapis.com'
+ """The base of the API call URL."""
+
+ API_VERSION='v2'
+ """The version of the API, used in building the API call's URL."""
+
+ API_URL_TEMPLATE='{api_base_url}/language/translate/{api_version}{path}'
+ """A template for the URL of a particular API call."""