diff --git a/gcloud/storage/_helpers.py b/gcloud/storage/_helpers.py index 44433335368a..8f074ee79bd6 100644 --- a/gcloud/storage/_helpers.py +++ b/gcloud/storage/_helpers.py @@ -45,15 +45,32 @@ def __init__(self, name=None): self._properties = {} self._changes = set() - def reload(self, connection=None): + @staticmethod + def _client_or_connection(client): + """Temporary method to get a connection from a client. + + If the client is null, gets the connection from the environment. + + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. + + :rtype: :class:`gcloud.storage.connection.Connection` + :returns: The connection determined from the ``client`` or environment. + """ + if client is None: + return _require_connection() + else: + return client.connection + + def reload(self, client=None): """Reload properties from Cloud Storage. - :type connection: :class:`gcloud.storage.connection.Connection` - :param connection: An explicit connection to use for the API request. - If not passed, use the connection assigned to - the object in its constructor. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. """ - connection = _require_connection(connection) + connection = self._client_or_connection(client) # Pass only '?projection=noAcl' here because 'acl' and related # are handled via custom endpoints. query_params = {'projection': 'noAcl'} @@ -90,17 +107,16 @@ def _set_properties(self, value): # If the values are reset, the changes must as well. self._changes = set() - def patch(self, connection=None): + def patch(self, client=None): """Sends all changed properties in a PATCH request. Updates the ``_properties`` with the response from the backend. - :type connection: :class:`gcloud.storage.connection.Connection` - :param connection: An explicit connection to use for the API request. - If not passed, use the connection assigned to - the object in its constructor. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. """ - connection = _require_connection(connection) + connection = self._client_or_connection(client) # Pass '?projection=full' here because 'PATCH' documented not # to work properly w/ 'noAcl'. update_properties = dict((key, self._properties[key]) diff --git a/gcloud/storage/acl.py b/gcloud/storage/acl.py index 673aa05d744d..688e6194c635 100644 --- a/gcloud/storage/acl.py +++ b/gcloud/storage/acl.py @@ -351,15 +351,33 @@ def get_entities(self): self._ensure_loaded() return list(self.entities.values()) - def reload(self, connection=None): + @staticmethod + def _client_or_connection(client): + """Temporary method to get a connection from a client. + + If the client is null, gets the connection from the environment. + + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. + + :rtype: :class:`gcloud.storage.connection.Connection` + :returns: The connection determined from the ``client`` or environment. + """ + if client is None: + return _require_connection() + else: + return client.connection + + def reload(self, client=None): """Reload the ACL data from Cloud Storage. - :type connection: :class:`gcloud.storage.connection.Connection` or None - :param connection: explicit connection to use for API request; - defaults to instance property. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. """ path = self.reload_path - connection = _require_connection(connection) + connection = self._client_or_connection(client) self.entities.clear() @@ -368,16 +386,16 @@ def reload(self, connection=None): for entry in found.get('items', ()): self.add_entity(self.entity_from_dict(entry)) - def save(self, acl=None, connection=None): + def save(self, acl=None, client=None): """Save this ACL for the current bucket. :type acl: :class:`gcloud.storage.acl.ACL`, or a compatible list. :param acl: The ACL object to save. If left blank, this will save current entries. - :type connection: :class:`gcloud.storage.connection.Connection` or None - :param connection: explicit connection to use for API request; - defaults to instance property. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. """ if acl is None: acl = self @@ -387,7 +405,7 @@ def save(self, acl=None, connection=None): if save_to_backend: path = self.save_path - connection = _require_connection(connection) + connection = self._client_or_connection(client) result = connection.api_request( method='PATCH', path=path, @@ -398,7 +416,7 @@ def save(self, acl=None, connection=None): self.add_entity(self.entity_from_dict(entry)) self.loaded = True - def clear(self, connection=None): + def clear(self, client=None): """Remove all ACL entries. Note that this won't actually remove *ALL* the rules, but it @@ -406,11 +424,11 @@ def clear(self, connection=None): have access to a bucket that you created even after you clear ACL rules with this method. - :type connection: :class:`gcloud.storage.connection.Connection` or None - :param connection: explicit connection to use for API request; - defaults to instance property. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. """ - self.save([], connection) + self.save([], client=client) class BucketACL(ACL): diff --git a/gcloud/storage/blob.py b/gcloud/storage/blob.py index 48dedf409310..bc817ffcb6ef 100644 --- a/gcloud/storage/blob.py +++ b/gcloud/storage/blob.py @@ -154,7 +154,7 @@ def public_url(self): quoted_name=quote(self.name, safe='')) def generate_signed_url(self, expiration, method='GET', - connection=None, credentials=None): + client=None, credentials=None): """Generates a signed URL for this blob. If you have a blob that you want to allow access to for a set @@ -171,10 +171,9 @@ def generate_signed_url(self, expiration, method='GET', :type method: string :param method: The HTTP verb that will be used when requesting the URL. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to the ``connection`` stored on the blob's bucket. :type credentials: :class:`oauth2client.client.OAuth2Credentials` or :class:`NoneType` @@ -183,32 +182,53 @@ def generate_signed_url(self, expiration, method='GET', :rtype: string :returns: A signed URL you can use to access the resource until expiration. + :raises: :class:`ValueError` if no credentials could be determined + from the arguments. """ resource = '/{bucket_name}/{quoted_name}'.format( bucket_name=self.bucket.name, quoted_name=quote(self.name, safe='')) if credentials is None: - connection = _require_connection(connection) - credentials = connection.credentials + if client is not None: + credentials = client.connection.credentials + else: + raise ValueError('Credentials could be determined.') return generate_signed_url( credentials, resource=resource, api_access_endpoint=_API_ACCESS_ENDPOINT, expiration=expiration, method=method) - def exists(self, connection=None): + @staticmethod + def _client_or_connection(client): + """Temporary method to get a connection from a client. + + If the client is null, gets the connection from the environment. + + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. + + :rtype: :class:`gcloud.storage.connection.Connection` + :returns: The connection determined from the ``client`` or environment. + """ + if client is None: + return _require_connection() + else: + return client.connection + + def exists(self, client=None): """Determines whether or not this blob exists. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :rtype: boolean :returns: True if the blob exists in Cloud Storage. """ - connection = _require_connection(connection) + connection = self._client_or_connection(client) try: # We only need the status code (200 or not) so we seek to # minimize the returned payload. @@ -225,7 +245,7 @@ def exists(self, connection=None): except NotFound: return False - def rename(self, new_name, connection=None): + def rename(self, new_name, client=None): """Renames this blob using copy and delete operations. Effectively, copies blob to the same bucket with a new name, then @@ -239,27 +259,24 @@ def rename(self, new_name, connection=None): :type new_name: string :param new_name: The new name for this blob. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :rtype: :class:`Blob` :returns: The newly-copied blob. """ - connection = _require_connection(connection) new_blob = self.bucket.copy_blob(self, self.bucket, new_name, - connection=connection) - self.delete(connection=connection) + client=client) + self.delete(client=client) return new_blob - def delete(self, connection=None): + def delete(self, client=None): """Deletes a blob from Cloud Storage. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :rtype: :class:`Blob` :returns: The blob that was just deleted. @@ -267,23 +284,21 @@ def delete(self, connection=None): (propagated from :meth:`gcloud.storage.bucket.Bucket.delete_blob`). """ - connection = _require_connection(connection) - return self.bucket.delete_blob(self.name, connection=connection) + return self.bucket.delete_blob(self.name, client=client) - def download_to_file(self, file_obj, connection=None): + def download_to_file(self, file_obj, client=None): """Download the contents of this blob into a file-like object. :type file_obj: file :param file_obj: A file handle to which to write the blob's data. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :raises: :class:`gcloud.exceptions.NotFound` """ - connection = _require_connection(connection) + connection = self._client_or_connection(client) download_url = self.media_link # Use apitools 'Download' facility. @@ -302,43 +317,41 @@ def download_to_file(self, file_obj, connection=None): download.StreamInChunks(callback=lambda *args: None, finish_callback=lambda *args: None) - def download_to_filename(self, filename, connection=None): + def download_to_filename(self, filename, client=None): """Download the contents of this blob into a named file. :type filename: string :param filename: A filename to be passed to ``open``. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :raises: :class:`gcloud.exceptions.NotFound` """ with open(filename, 'wb') as file_obj: - self.download_to_file(file_obj, connection=connection) + self.download_to_file(file_obj, client=client) mtime = time.mktime(self.updated.timetuple()) os.utime(file_obj.name, (mtime, mtime)) - def download_as_string(self, connection=None): + def download_as_string(self, client=None): """Download the contents of this blob as a string. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :rtype: bytes :returns: The data stored in this blob. :raises: :class:`gcloud.exceptions.NotFound` """ string_buffer = BytesIO() - self.download_to_file(string_buffer, connection=connection) + self.download_to_file(string_buffer, client=client) return string_buffer.getvalue() def upload_from_file(self, file_obj, rewind=False, size=None, - content_type=None, num_retries=6, connection=None): + content_type=None, num_retries=6, client=None): """Upload the contents of this blob from a file-like object. The content type of the upload will either be @@ -376,15 +389,14 @@ def upload_from_file(self, file_obj, rewind=False, size=None, :type num_retries: integer :param num_retries: Number of upload retries. Defaults to 6. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :raises: :class:`ValueError` if size is not passed in and can not be determined """ - connection = _require_connection(connection) + connection = self._client_or_connection(client) content_type = (content_type or self._properties.get('contentType') or 'application/octet-stream') @@ -447,7 +459,7 @@ def upload_from_file(self, file_obj, rewind=False, size=None, self._set_properties(json.loads(response_content)) def upload_from_filename(self, filename, content_type=None, - connection=None): + client=None): """Upload this blob's contents from the content of a named file. The content type of the upload will either be @@ -472,10 +484,9 @@ def upload_from_filename(self, filename, content_type=None, :type content_type: string or ``NoneType`` :param content_type: Optional type of content being uploaded. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. """ content_type = content_type or self._properties.get('contentType') if content_type is None: @@ -483,10 +494,10 @@ def upload_from_filename(self, filename, content_type=None, with open(filename, 'rb') as file_obj: self.upload_from_file(file_obj, content_type=content_type, - connection=connection) + client=client) def upload_from_string(self, data, content_type='text/plain', - connection=None): + client=None): """Upload contents of this blob from the provided string. .. note:: @@ -508,10 +519,9 @@ def upload_from_string(self, data, content_type='text/plain', :param content_type: Optional type of content being uploaded. Defaults to ``'text/plain'``. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. """ if isinstance(data, six.text_type): data = data.encode('utf-8') @@ -519,18 +529,17 @@ def upload_from_string(self, data, content_type='text/plain', string_buffer.write(data) self.upload_from_file(file_obj=string_buffer, rewind=True, size=len(data), content_type=content_type, - connection=connection) + client=client) - def make_public(self, connection=None): + def make_public(self, client=None): """Make this blob public giving all users read access. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to the ``connection`` stored on the blob's bucket. """ self.acl.all().grant_read() - self.acl.save(connection=connection) + self.acl.save(client=client) cache_control = _scalar_property('cacheControl') """HTTP 'Cache-Control' header for this object. diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index a20cbe408d5c..480bb46960ba 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -95,18 +95,35 @@ def __init__(self, name=None): def __repr__(self): return '' % self.name - def exists(self, connection=None): + @staticmethod + def _client_or_connection(client): + """Temporary method to get a connection from a client. + + If the client is null, gets the connection from the environment. + + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. + + :rtype: :class:`gcloud.storage.connection.Connection` + :returns: The connection determined from the ``client`` or environment. + """ + if client is None: + return _require_connection() + else: + return client.connection + + def exists(self, client=None): """Determines whether or not this bucket exists. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :rtype: boolean :returns: True if the bucket exists in Cloud Storage. """ - connection = _require_connection(connection) + connection = self._client_or_connection(client) try: # We only need the status code (200 or not) so we seek to # minimize the returned payload. @@ -188,7 +205,7 @@ def path(self): return self.path_helper(self.name) - def get_blob(self, blob_name, connection=None): + def get_blob(self, blob_name, client=None): """Get a blob object by name. This will return None if the blob doesn't exist:: @@ -204,15 +221,14 @@ def get_blob(self, blob_name, connection=None): :type blob_name: string :param blob_name: The name of the blob to retrieve. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :rtype: :class:`gcloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ - connection = _require_connection(connection) + connection = self._client_or_connection(client) blob = Blob(bucket=self, name=blob_name) try: response = connection.api_request( @@ -295,7 +311,7 @@ def list_blobs(self, max_results=None, page_token=None, prefix=None, result.next_page_token = page_token return result - def delete(self, force=False, connection=None): + def delete(self, force=False, client=None): """Delete this bucket. The bucket **must** be empty in order to submit a delete request. If @@ -314,15 +330,14 @@ def delete(self, force=False, connection=None): :type force: boolean :param force: If True, empties the bucket's objects then deletes it. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket contains more than 256 objects / blobs. """ - connection = _require_connection(connection) + connection = self._client_or_connection(client) if force: blobs = list(self.list_blobs( max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, @@ -338,7 +353,7 @@ def delete(self, force=False, connection=None): # Ignore 404 errors on delete. self.delete_blobs(blobs, on_error=lambda blob: None, - connection=connection) + client=client) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or @@ -346,7 +361,7 @@ def delete(self, force=False, connection=None): connection.api_request(method='DELETE', path=self.path, _target_object=None) - def delete_blob(self, blob_name, connection=None): + def delete_blob(self, blob_name, client=None): """Deletes a blob from the current bucket. If the blob isn't found (backend 404), raises a @@ -369,10 +384,9 @@ def delete_blob(self, blob_name, connection=None): :type blob_name: string :param blob_name: A blob name to delete. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :raises: :class:`gcloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op @@ -380,7 +394,7 @@ def delete_blob(self, blob_name, connection=None): >>> bucket.delete_blobs([blob], on_error=lambda blob: None) """ - connection = _require_connection(connection) + connection = self._client_or_connection(client) blob_path = Blob.path_helper(self.path, blob_name) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or @@ -388,7 +402,7 @@ def delete_blob(self, blob_name, connection=None): connection.api_request(method='DELETE', path=blob_path, _target_object=None) - def delete_blobs(self, blobs, on_error=None, connection=None): + def delete_blobs(self, blobs, on_error=None, client=None): """Deletes a list of blobs from the current bucket. Uses :func:`Bucket.delete_blob` to delete each individual blob. @@ -401,30 +415,27 @@ def delete_blobs(self, blobs, on_error=None, connection=None): :class:`gcloud.exceptions.NotFound`; otherwise, the exception is propagated. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :raises: :class:`gcloud.exceptions.NotFound` (if `on_error` is not passed). """ - connection = _require_connection(connection) for blob in blobs: try: blob_name = blob if not isinstance(blob_name, six.string_types): blob_name = blob.name - self.delete_blob(blob_name, connection=connection) + self.delete_blob(blob_name, client=client) except NotFound: if on_error is not None: on_error(blob) else: raise - @staticmethod - def copy_blob(blob, destination_bucket, new_name=None, - connection=None): + def copy_blob(self, blob, destination_bucket, new_name=None, + client=None): """Copy the given blob to the given bucket, optionally with a new name. :type blob: string or :class:`gcloud.storage.blob.Blob` @@ -437,15 +448,14 @@ def copy_blob(blob, destination_bucket, new_name=None, :type new_name: string :param new_name: (optional) the new name for the copied file. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :rtype: :class:`gcloud.storage.blob.Blob` :returns: The new Blob. """ - connection = _require_connection(connection) + connection = self._client_or_connection(client) if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) @@ -455,7 +465,7 @@ def copy_blob(blob, destination_bucket, new_name=None, new_blob._set_properties(copy_result) return new_blob - def upload_file(self, filename, blob_name=None, connection=None): + def upload_file(self, filename, blob_name=None, client=None): """Shortcut method to upload a file into this bucket. Use this method to quickly put a local file in Cloud Storage. @@ -488,10 +498,9 @@ def upload_file(self, filename, blob_name=None, connection=None): of the bucket with the same name as on your local file system. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :rtype: :class:`Blob` :returns: The updated Blob object. @@ -499,10 +508,10 @@ def upload_file(self, filename, blob_name=None, connection=None): if blob_name is None: blob_name = os.path.basename(filename) blob = Blob(bucket=self, name=blob_name) - blob.upload_from_filename(filename, connection=connection) + blob.upload_from_filename(filename, client=client) return blob - def upload_file_object(self, file_obj, blob_name=None, connection=None): + def upload_file_object(self, file_obj, blob_name=None, client=None): """Shortcut method to upload a file object into this bucket. Use this method to quickly put a local file in Cloud Storage. @@ -535,10 +544,9 @@ def upload_file_object(self, file_obj, blob_name=None, connection=None): of the bucket with the same name as on your local file system. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. :rtype: :class:`Blob` :returns: The updated Blob object. @@ -546,7 +554,7 @@ def upload_file_object(self, file_obj, blob_name=None, connection=None): if blob_name is None: blob_name = os.path.basename(file_obj.name) blob = Blob(bucket=self, name=blob_name) - blob.upload_from_file(file_obj, connection=connection) + blob.upload_from_file(file_obj, client=client) return blob @property @@ -822,7 +830,7 @@ def disable_website(self): """ return self.configure_website(None, None) - def make_public(self, recursive=False, future=False, connection=None): + def make_public(self, recursive=False, future=False, client=None): """Make a bucket public. If ``recursive=True`` and the bucket contains more than 256 @@ -837,22 +845,21 @@ def make_public(self, recursive=False, future=False, connection=None): :param future: If True, this will make all objects created in the future public as well. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to default connection. """ - connection = _require_connection(connection) + connection = self._client_or_connection(client) self.acl.all().grant_read() - self.acl.save(connection=connection) + self.acl.save(client=client) if future: doa = self.default_object_acl if not doa.loaded: - doa.reload(connection=connection) + doa.reload(client=client) doa.all().grant_read() - doa.save(connection=connection) + doa.save(client=client) if recursive: blobs = list(self.list_blobs( @@ -870,4 +877,4 @@ def make_public(self, recursive=False, future=False, connection=None): for blob in blobs: blob.acl.all().grant_read() - blob.acl.save(connection=connection) + blob.acl.save(client=client) diff --git a/gcloud/storage/client.py b/gcloud/storage/client.py index ac5ffb4bd227..fd0402d232b6 100644 --- a/gcloud/storage/client.py +++ b/gcloud/storage/client.py @@ -68,7 +68,7 @@ def get_bucket(self, bucket_name): :raises: :class:`gcloud.exceptions.NotFound` """ bucket = Bucket(bucket_name) - bucket.reload(connection=self.connection) + bucket.reload(client=self) return bucket def lookup_bucket(self, bucket_name): diff --git a/gcloud/storage/test__helpers.py b/gcloud/storage/test__helpers.py index 104ecb38bd05..c2e72cc54213 100644 --- a/gcloud/storage/test__helpers.py +++ b/gcloud/storage/test__helpers.py @@ -60,10 +60,11 @@ def test_reload_w_implicit_connection(self): def test_reload_w_explicit_connection(self): connection = _Connection({'foo': 'Foo'}) + client = _Client(connection) derived = self._derivedClass('/path')() # Make sure changes is not a set, so we can observe a change. derived._changes = object() - derived.reload(connection) + derived.reload(client=client) self.assertEqual(derived._properties, {'foo': 'Foo'}) kw = connection._requested self.assertEqual(len(kw), 1) @@ -108,13 +109,14 @@ def test_patch_w_implicit_connection(self): def test_patch_w_explicit_connection(self): connection = _Connection({'foo': 'Foo'}) + client = _Client(connection) derived = self._derivedClass('/path')() # Make sure changes is non-empty, so we can observe a change. BAR = object() BAZ = object() derived._properties = {'bar': BAR, 'baz': BAZ} derived._changes = set(['bar']) # Ignore baz. - derived.patch(connection) + derived.patch(client=client) self.assertEqual(derived._properties, {'foo': 'Foo'}) kw = connection._requested self.assertEqual(len(kw), 1) @@ -301,3 +303,9 @@ def __enter__(self): def __exit__(self, *args): from gcloud.storage.batch import _BATCHES _BATCHES.pop() + + +class _Client(object): + + def __init__(self, connection): + self.connection = connection diff --git a/gcloud/storage/test_acl.py b/gcloud/storage/test_acl.py index 7c0b9fb197e1..d4d1607f2920 100644 --- a/gcloud/storage/test_acl.py +++ b/gcloud/storage/test_acl.py @@ -526,11 +526,12 @@ def test_reload_missing_w_explicit_connection(self): # https://github.com/GoogleCloudPlatform/gcloud-python/issues/652 ROLE = 'role' connection = _Connection({}) + client = _Client(connection) acl = self._makeOne() acl.reload_path = '/testing/acl' acl.loaded = True acl.entity('allUsers', ROLE) - acl.reload(connection=connection) + acl.reload(client=client) self.assertEqual(list(acl), []) kw = connection._requested self.assertEqual(len(kw), 1) @@ -557,11 +558,12 @@ def test_reload_empty_result_clears_local_w_implicit_connection(self): def test_reload_empty_result_clears_local_w_explicit_connection(self): ROLE = 'role' connection = _Connection({'items': []}) + client = _Client(connection) acl = self._makeOne() acl.reload_path = '/testing/acl' acl.loaded = True acl.entity('allUsers', ROLE) - acl.reload(connection=connection) + acl.reload(client=client) self.assertTrue(acl.loaded) self.assertEqual(list(acl), []) kw = connection._requested @@ -590,10 +592,11 @@ def test_reload_nonempty_result_w_explicit_connection(self): ROLE = 'role' connection = _Connection( {'items': [{'entity': 'allUsers', 'role': ROLE}]}) + client = _Client(connection) acl = self._makeOne() acl.reload_path = '/testing/acl' acl.loaded = True - acl.reload(connection=connection) + acl.reload(client=client) self.assertTrue(acl.loaded) self.assertEqual(list(acl), [{'entity': 'allUsers', 'role': ROLE}]) kw = connection._requested @@ -614,9 +617,10 @@ def test_save_none_set_none_passed_w_implicit_connection(self): def test_save_none_set_none_passed_w_explicit_connection(self): connection = _Connection() + client = _Client(connection) acl = self._makeOne() acl.save_path = '/testing' - acl.save(connection=connection) + acl.save(client=client) kw = connection._requested self.assertEqual(len(kw), 0) @@ -638,10 +642,11 @@ def test_save_existing_missing_none_passed_w_implicit_connection(self): def test_save_existing_missing_none_passed_w_explicit_connection(self): connection = _Connection({}) + client = _Client(connection) acl = self._makeOne() acl.save_path = '/testing' acl.loaded = True - acl.save(connection=connection) + acl.save(client=client) self.assertEqual(list(acl), []) kw = connection._requested self.assertEqual(len(kw), 1) @@ -673,11 +678,12 @@ def test_save_no_arg_w_explicit_connection(self): ROLE = 'role' AFTER = [{'entity': 'allUsers', 'role': ROLE}] connection = _Connection({'acl': AFTER}) + client = _Client(connection) acl = self._makeOne() acl.save_path = '/testing' acl.loaded = True acl.entity('allUsers').grant(ROLE) - acl.save(connection=connection) + acl.save(client=client) self.assertEqual(list(acl), AFTER) kw = connection._requested self.assertEqual(len(kw), 1) @@ -715,10 +721,11 @@ def test_save_w_arg_w_explicit_connection(self): STICKY = {'entity': 'allUsers', 'role': ROLE2} new_acl = [{'entity': 'allUsers', 'role': ROLE1}] connection = _Connection({'acl': [STICKY] + new_acl}) + client = _Client(connection) acl = self._makeOne() acl.save_path = '/testing' acl.loaded = True - acl.save(new_acl, connection) + acl.save(new_acl, client=client) entries = list(acl) self.assertEqual(len(entries), 2) self.assertTrue(STICKY in entries) @@ -755,11 +762,12 @@ def test_clear_w_explicit_connection(self): ROLE2 = 'role2' STICKY = {'entity': 'allUsers', 'role': ROLE2} connection = _Connection({'acl': [STICKY]}) + client = _Client(connection) acl = self._makeOne() acl.save_path = '/testing' acl.loaded = True acl.entity('allUsers', ROLE1) - acl.clear(connection=connection) + acl.clear(client=client) self.assertEqual(list(acl), [STICKY]) kw = connection._requested self.assertEqual(len(kw), 1) @@ -870,3 +878,9 @@ def api_request(self, **kw): raise NotFound('miss') else: return response + + +class _Client(object): + + def __init__(self, connection): + self.connection = connection diff --git a/gcloud/storage/test_blob.py b/gcloud/storage/test_blob.py index 6203fb4be4b4..96200fd47734 100644 --- a/gcloud/storage/test_blob.py +++ b/gcloud/storage/test_blob.py @@ -146,37 +146,31 @@ def test_public_url_w_slash_in_name(self): blob.public_url, 'https://storage.googleapis.com/name/parent%2Fchild') - def _basic_generate_signed_url_helper(self, credentials=None): + def _basic_generate_signed_url_helper(self, credentials=None, + use_client=True): from gcloud._testing import _Monkey from gcloud.storage import blob as MUT BLOB_NAME = 'blob-name' EXPIRATION = '2014-10-16T20:34:37.000Z' connection = _Connection() + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' '&Expiration=2014-10-16T20:34:37.000Z') - _called_require = [] - - def mock_require(connection): - _called_require.append(connection) - return connection - SIGNER = _Signer() - with _Monkey(MUT, generate_signed_url=SIGNER, - _require_connection=mock_require): - signed_uri = blob.generate_signed_url(EXPIRATION, - connection=connection, - credentials=credentials) + with _Monkey(MUT, generate_signed_url=SIGNER): + if use_client: + signed_uri = blob.generate_signed_url(EXPIRATION, + client=client, + credentials=credentials) + else: + signed_uri = blob.generate_signed_url(EXPIRATION, + credentials=credentials) self.assertEqual(signed_uri, URI) - if credentials is None: - self.assertEqual(_called_require, [connection]) - else: - self.assertEqual(_called_require, []) - PATH = '/name/%s' % (BLOB_NAME,) if credentials is None: EXPECTED_ARGS = (_Connection.credentials,) @@ -190,6 +184,10 @@ def mock_require(connection): } self.assertEqual(SIGNER._signed, [(EXPECTED_ARGS, EXPECTED_KWARGS)]) + def test_generate_signed_url_w_no_creds(self): + with self.assertRaises(ValueError): + self._basic_generate_signed_url_helper(use_client=False) + def test_generate_signed_url_w_default_method(self): self._basic_generate_signed_url_helper() @@ -204,6 +202,7 @@ def test_generate_signed_url_w_slash_in_name(self): BLOB_NAME = 'parent/child' EXPIRATION = '2014-10-16T20:34:37.000Z' connection = _Connection() + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' @@ -212,7 +211,7 @@ def test_generate_signed_url_w_slash_in_name(self): SIGNER = _Signer() with _Monkey(MUT, generate_signed_url=SIGNER): signed_url = blob.generate_signed_url(EXPIRATION, - connection=connection) + client=client) self.assertEqual(signed_url, URI) EXPECTED_ARGS = (_Connection.credentials,) @@ -231,6 +230,7 @@ def test_generate_signed_url_w_explicit_method(self): BLOB_NAME = 'blob-name' EXPIRATION = '2014-10-16T20:34:37.000Z' connection = _Connection() + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) URI = ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' @@ -239,7 +239,7 @@ def test_generate_signed_url_w_explicit_method(self): SIGNER = _Signer() with _Monkey(MUT, generate_signed_url=SIGNER): signed_uri = blob.generate_signed_url(EXPIRATION, method='POST', - connection=connection) + client=client) self.assertEqual(signed_uri, URI) PATH = '/name/%s' % (BLOB_NAME,) @@ -257,48 +257,46 @@ def test_exists_miss(self): NONESUCH = 'nonesuch' not_found_response = {'status': NOT_FOUND} connection = _Connection(not_found_response) + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(NONESUCH, bucket=bucket) - self.assertFalse(blob.exists(connection=connection)) + self.assertFalse(blob.exists(client=client)) + + def test_exists_implicit(self): + from gcloud.storage._testing import _monkey_defaults + from six.moves.http_client import NOT_FOUND + NONESUCH = 'nonesuch' + not_found_response = {'status': NOT_FOUND} + connection = _Connection(not_found_response) + bucket = _Bucket() + blob = self._makeOne(NONESUCH, bucket=bucket) + with _monkey_defaults(connection=connection): + self.assertFalse(blob.exists()) def test_exists_hit(self): from six.moves.http_client import OK BLOB_NAME = 'blob-name' found_response = {'status': OK} connection = _Connection(found_response) + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) bucket._blobs[BLOB_NAME] = 1 - self.assertTrue(blob.exists(connection=connection)) + self.assertTrue(blob.exists(client=client)) - def test_rename_w_implicit_connection(self): - from gcloud.storage._testing import _monkey_defaults - BLOB_NAME = 'blob-name' - NEW_NAME = 'new-name' - connection = _Connection() - bucket = _Bucket() - blob = self._makeOne(BLOB_NAME, bucket=bucket) - bucket._blobs[BLOB_NAME] = 1 - with _monkey_defaults(connection=connection): - new_blob = blob.rename(NEW_NAME) - self.assertEqual(blob.name, BLOB_NAME) - self.assertEqual(new_blob.name, NEW_NAME) - self.assertFalse(BLOB_NAME in bucket._blobs) - self.assertEqual(bucket._deleted, [(BLOB_NAME, connection)]) - self.assertTrue(NEW_NAME in bucket._blobs) - - def test_rename_w_explicit_connection(self): + def test_rename(self): BLOB_NAME = 'blob-name' NEW_NAME = 'new-name' connection = _Connection() + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) bucket._blobs[BLOB_NAME] = 1 - new_blob = blob.rename(NEW_NAME, connection=connection) + new_blob = blob.rename(NEW_NAME, client=client) self.assertEqual(blob.name, BLOB_NAME) self.assertEqual(new_blob.name, NEW_NAME) self.assertFalse(BLOB_NAME in bucket._blobs) - self.assertEqual(bucket._deleted, [(BLOB_NAME, connection)]) + self.assertEqual(bucket._deleted, [(BLOB_NAME, client)]) self.assertTrue(NEW_NAME in bucket._blobs) def test_delete_w_implicit_connection(self): @@ -307,25 +305,27 @@ def test_delete_w_implicit_connection(self): BLOB_NAME = 'blob-name' not_found_response = {'status': NOT_FOUND} connection = _Connection(not_found_response) + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) bucket._blobs[BLOB_NAME] = 1 with _monkey_defaults(connection=connection): blob.delete() - self.assertFalse(blob.exists(connection=connection)) - self.assertEqual(bucket._deleted, [(BLOB_NAME, connection)]) + self.assertFalse(blob.exists(client=client)) + self.assertEqual(bucket._deleted, [(BLOB_NAME, None)]) def test_delete_w_explicit_connection(self): from six.moves.http_client import NOT_FOUND BLOB_NAME = 'blob-name' not_found_response = {'status': NOT_FOUND} connection = _Connection(not_found_response) + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) bucket._blobs[BLOB_NAME] = 1 - blob.delete(connection=connection) - self.assertFalse(blob.exists(connection=connection)) - self.assertEqual(bucket._deleted, [(BLOB_NAME, connection)]) + blob.delete(client=client) + self.assertFalse(blob.exists(client=client)) + self.assertEqual(bucket._deleted, [(BLOB_NAME, client)]) def _download_to_file_helper(self, chunk_size=None): from six.moves.http_client import OK @@ -340,6 +340,7 @@ def _download_to_file_helper(self, chunk_size=None): (chunk1_response, b'abc'), (chunk2_response, b'def'), ) + client = _Client(connection) bucket = _Bucket() MEDIA_LINK = 'http://example.com/media/' properties = {'mediaLink': MEDIA_LINK} @@ -348,7 +349,7 @@ def _download_to_file_helper(self, chunk_size=None): blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = chunk_size fh = BytesIO() - blob.download_to_file(fh, connection=connection) + blob.download_to_file(fh, client=client) self.assertEqual(fh.getvalue(), b'abcdef') def test_download_to_file_default(self): @@ -372,6 +373,7 @@ def test_download_to_filename(self): (chunk1_response, b'abc'), (chunk2_response, b'def'), ) + client = _Client(connection) bucket = _Bucket() MEDIA_LINK = 'http://example.com/media/' properties = {'mediaLink': MEDIA_LINK, @@ -380,7 +382,7 @@ def test_download_to_filename(self): blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 3 with NamedTemporaryFile() as f: - blob.download_to_filename(f.name, connection=connection) + blob.download_to_filename(f.name, client=client) f.flush() with open(f.name, 'rb') as g: wrote = g.read() @@ -401,13 +403,14 @@ def test_download_as_string(self): (chunk1_response, b'abc'), (chunk2_response, b'def'), ) + client = _Client(connection) bucket = _Bucket() MEDIA_LINK = 'http://example.com/media/' properties = {'mediaLink': MEDIA_LINK} blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 3 - fetched = blob.download_as_string(connection=connection) + fetched = blob.download_as_string(client=client) self.assertEqual(fetched, b'abcdef') def test_upload_from_file_size_failure(self): @@ -416,9 +419,9 @@ def test_upload_from_file_size_failure(self): blob = self._makeOne(BLOB_NAME, bucket=bucket) file_obj = object() connection = _Connection() + client = _Client(connection) with self.assertRaises(ValueError): - blob.upload_from_file(file_obj, size=None, - connection=connection) + blob.upload_from_file(file_obj, size=None, client=client) def _upload_from_file_simple_test_helper(self, properties=None, content_type_arg=None, @@ -433,6 +436,7 @@ def _upload_from_file_simple_test_helper(self, properties=None, connection = _Connection( (response, b'{}'), ) + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties) blob._CHUNK_SIZE_MULTIPLE = 1 @@ -442,7 +446,7 @@ def _upload_from_file_simple_test_helper(self, properties=None, fh.flush() blob.upload_from_file(fh, rewind=True, content_type=content_type_arg, - connection=connection) + client=client) rq = connection.http._requested self.assertEqual(len(rq), 1) self.assertEqual(rq[0]['method'], 'POST') @@ -503,6 +507,7 @@ def test_upload_from_file_resumable(self): (chunk1_response, b''), (chunk2_response, b'{}'), ) + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 @@ -512,7 +517,7 @@ def test_upload_from_file_resumable(self): with NamedTemporaryFile() as fh: fh.write(DATA) fh.flush() - blob.upload_from_file(fh, rewind=True, connection=connection) + blob.upload_from_file(fh, rewind=True, client=client) rq = connection.http._requested self.assertEqual(len(rq), 3) self.assertEqual(rq[0]['method'], 'POST') @@ -561,6 +566,7 @@ def test_upload_from_file_w_slash_in_name(self): (chunk1_response, ''), (chunk2_response, ''), ) + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 @@ -568,7 +574,7 @@ def test_upload_from_file_w_slash_in_name(self): with NamedTemporaryFile() as fh: fh.write(DATA) fh.flush() - blob.upload_from_file(fh, rewind=True, connection=connection) + blob.upload_from_file(fh, rewind=True, client=client) self.assertEqual(fh.tell(), len(DATA)) rq = connection.http._requested self.assertEqual(len(rq), 1) @@ -608,6 +614,7 @@ def _upload_from_filename_test_helper(self, properties=None, (chunk1_response, ''), (chunk2_response, ''), ) + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket, properties=properties) @@ -617,7 +624,7 @@ def _upload_from_filename_test_helper(self, properties=None, fh.write(DATA) fh.flush() blob.upload_from_filename(fh.name, content_type=content_type_arg, - connection=connection) + client=client) rq = connection.http._requested self.assertEqual(len(rq), 1) self.assertEqual(rq[0]['method'], 'POST') @@ -674,11 +681,12 @@ def test_upload_from_string_w_bytes(self): (chunk1_response, ''), (chunk2_response, ''), ) + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 5 - blob.upload_from_string(DATA, connection=connection) + blob.upload_from_string(DATA, client=client) rq = connection.http._requested self.assertEqual(len(rq), 1) self.assertEqual(rq[0]['method'], 'POST') @@ -713,11 +721,12 @@ def test_upload_from_string_w_text(self): (chunk1_response, ''), (chunk2_response, ''), ) + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 5 - blob.upload_from_string(DATA, connection=connection) + blob.upload_from_string(DATA, client=client) rq = connection.http._requested self.assertEqual(len(rq), 1) self.assertEqual(rq[0]['method'], 'POST') @@ -760,10 +769,11 @@ def test_make_public_w_explicit_connection(self): permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] after = {'acl': permissive} connection = _Connection(after) + client = _Client(connection) bucket = _Bucket() blob = self._makeOne(BLOB_NAME, bucket=bucket) blob.acl.loaded = True - blob.make_public(connection=connection) + blob.make_public(client=client) self.assertEqual(list(blob.acl), permissive) kw = connection._requested self.assertEqual(len(kw), 1) @@ -1133,14 +1143,14 @@ def __init__(self): self._copied = [] self._deleted = [] - def copy_blob(self, blob, destination_bucket, new_name, connection=None): - self._copied.append((blob, destination_bucket, new_name, connection)) + def copy_blob(self, blob, destination_bucket, new_name, client=None): + self._copied.append((blob, destination_bucket, new_name, client)) destination_bucket._blobs[new_name] = self._blobs[blob.name] return blob.__class__(new_name, bucket=destination_bucket) - def delete_blob(self, blob_name, connection=None): + def delete_blob(self, blob_name, client=None): del self._blobs[blob_name] - self._deleted.append((blob_name, connection)) + self._deleted.append((blob_name, client)) class _Signer(object): @@ -1152,3 +1162,9 @@ def __call__(self, *args, **kwargs): self._signed.append((args, kwargs)) return ('http://example.com/abucket/a-blob-name?Signature=DEADBEEF' '&Expiration=%s' % kwargs.get('expiration')) + + +class _Client(object): + + def __init__(self, connection): + self.connection = connection diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index 5bfd7e57d7bb..a19d817b391c 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -125,6 +125,23 @@ def test_ctor_explicit(self): self.assertFalse(bucket._default_object_acl.loaded) self.assertTrue(bucket._default_object_acl.bucket is bucket) + def test__client_or_connection_implicit(self): + from gcloud._testing import _Monkey + from gcloud.storage import bucket as MUT + bucket = self._makeOne() + num_mock_require_calls = [0] + cnxn = object() + + def mock_require(): + num_mock_require_calls[0] += 1 + return cnxn + + with _Monkey(MUT, _require_connection=mock_require): + result = bucket._client_or_connection(None) + + self.assertTrue(result is cnxn) + self.assertEqual(num_mock_require_calls, [1]) + def test_exists_miss(self): from gcloud.exceptions import NotFound @@ -139,7 +156,8 @@ def api_request(cls, *args, **kwargs): BUCKET_NAME = 'bucket-name' bucket = self._makeOne(BUCKET_NAME) - self.assertFalse(bucket.exists(connection=_FakeConnection)) + client = _Client(_FakeConnection) + self.assertFalse(bucket.exists(client=client)) expected_called_kwargs = { 'method': 'GET', 'path': bucket.path, @@ -164,7 +182,8 @@ def api_request(cls, *args, **kwargs): BUCKET_NAME = 'bucket-name' bucket = self._makeOne(BUCKET_NAME) - self.assertTrue(bucket.exists(connection=_FakeConnection)) + client = _Client(_FakeConnection) + self.assertTrue(bucket.exists(client=client)) expected_called_kwargs = { 'method': 'GET', 'path': bucket.path, @@ -242,8 +261,9 @@ def test_get_blob_miss(self): NAME = 'name' NONESUCH = 'nonesuch' connection = _Connection() + client = _Client(connection) bucket = self._makeOne(NAME) - result = bucket.get_blob(NONESUCH, connection=connection) + result = bucket.get_blob(NONESUCH, client=client) self.assertTrue(result is None) kw, = connection._requested self.assertEqual(kw['method'], 'GET') @@ -253,8 +273,9 @@ def test_get_blob_hit(self): NAME = 'name' BLOB_NAME = 'blob-name' connection = _Connection({'name': BLOB_NAME}) + client = _Client(connection) bucket = self._makeOne(NAME) - blob = bucket.get_blob(BLOB_NAME, connection=connection) + blob = bucket.get_blob(BLOB_NAME, client=client) self.assertTrue(blob.bucket is bucket) self.assertEqual(blob.name, BLOB_NAME) kw, = connection._requested @@ -326,8 +347,9 @@ def test_delete_default_miss(self): from gcloud.exceptions import NotFound NAME = 'name' connection = _Connection() + client = _Client(connection) bucket = self._makeOne(NAME) - self.assertRaises(NotFound, bucket.delete, connection=connection) + self.assertRaises(NotFound, bucket.delete, client=client) expected_cw = [{ 'method': 'DELETE', 'path': bucket.path, @@ -340,8 +362,9 @@ def test_delete_explicit_hit(self): GET_BLOBS_RESP = {'items': []} connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True + client = _Client(connection) bucket = self._makeOne(NAME) - result = bucket.delete(force=True, connection=connection) + result = bucket.delete(force=True, client=client) self.assertTrue(result is None) expected_cw = [{ 'method': 'DELETE', @@ -364,8 +387,9 @@ def test_delete_explicit_force_delete_blobs(self): connection = _Connection(GET_BLOBS_RESP, DELETE_BLOB1_RESP, DELETE_BLOB2_RESP) connection._delete_bucket = True + client = _Client(connection) bucket = self._makeOne(NAME) - result = bucket.delete(force=True, connection=connection) + result = bucket.delete(force=True, client=client) self.assertTrue(result is None) expected_cw = [{ 'method': 'DELETE', @@ -381,8 +405,9 @@ def test_delete_explicit_force_miss_blobs(self): # Note the connection does not have a response for the blob. connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True + client = _Client(connection) bucket = self._makeOne(NAME) - result = bucket.delete(force=True, connection=connection) + result = bucket.delete(force=True, client=client) self.assertTrue(result is None) expected_cw = [{ 'method': 'DELETE', @@ -403,12 +428,13 @@ def test_delete_explicit_too_many(self): } connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True + client = _Client(connection) bucket = self._makeOne(NAME) # Make the Bucket refuse to delete with 2 objects. bucket._MAX_OBJECTS_FOR_ITERATION = 1 self.assertRaises(ValueError, bucket.delete, force=True, - connection=connection) + client=client) self.assertEqual(connection._deleted_buckets, []) def test_delete_blob_miss(self): @@ -416,9 +442,10 @@ def test_delete_blob_miss(self): NAME = 'name' NONESUCH = 'nonesuch' connection = _Connection() + client = _Client(connection) bucket = self._makeOne(NAME) self.assertRaises(NotFound, bucket.delete_blob, NONESUCH, - connection=connection) + client=client) kw, = connection._requested self.assertEqual(kw['method'], 'DELETE') self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, NONESUCH)) @@ -427,8 +454,9 @@ def test_delete_blob_hit(self): NAME = 'name' BLOB_NAME = 'blob-name' connection = _Connection({}) + client = _Client(connection) bucket = self._makeOne(NAME) - result = bucket.delete_blob(BLOB_NAME, connection=connection) + result = bucket.delete_blob(BLOB_NAME, client=client) self.assertTrue(result is None) kw, = connection._requested self.assertEqual(kw['method'], 'DELETE') @@ -437,16 +465,18 @@ def test_delete_blob_hit(self): def test_delete_blobs_empty(self): NAME = 'name' connection = _Connection() + client = _Client(connection) bucket = self._makeOne(NAME) - bucket.delete_blobs([], connection=connection) + bucket.delete_blobs([], client=client) self.assertEqual(connection._requested, []) def test_delete_blobs_hit(self): NAME = 'name' BLOB_NAME = 'blob-name' connection = _Connection({}) + client = _Client(connection) bucket = self._makeOne(NAME) - bucket.delete_blobs([BLOB_NAME], connection=connection) + bucket.delete_blobs([BLOB_NAME], client=client) kw = connection._requested self.assertEqual(len(kw), 1) self.assertEqual(kw[0]['method'], 'DELETE') @@ -458,9 +488,10 @@ def test_delete_blobs_miss_no_on_error(self): BLOB_NAME = 'blob-name' NONESUCH = 'nonesuch' connection = _Connection({}) + client = _Client(connection) bucket = self._makeOne(NAME) self.assertRaises(NotFound, bucket.delete_blobs, [BLOB_NAME, NONESUCH], - connection=connection) + client=client) kw = connection._requested self.assertEqual(len(kw), 2) self.assertEqual(kw[0]['method'], 'DELETE') @@ -473,10 +504,11 @@ def test_delete_blobs_miss_w_on_error(self): BLOB_NAME = 'blob-name' NONESUCH = 'nonesuch' connection = _Connection({}) + client = _Client(connection) bucket = self._makeOne(NAME) errors = [] bucket.delete_blobs([BLOB_NAME, NONESUCH], errors.append, - connection=connection) + client=client) self.assertEqual(errors, [NONESUCH]) kw = connection._requested self.assertEqual(len(kw), 2) @@ -495,10 +527,11 @@ class _Blob(object): path = '/b/%s/o/%s' % (SOURCE, BLOB_NAME) connection = _Connection({}) + client = _Client(connection) source = self._makeOne(SOURCE) dest = self._makeOne(DEST) blob = _Blob() - new_blob = source.copy_blob(blob, dest, connection=connection) + new_blob = source.copy_blob(blob, dest, client=client) self.assertTrue(new_blob.bucket is dest) self.assertEqual(new_blob.name, BLOB_NAME) kw, = connection._requested @@ -518,11 +551,12 @@ class _Blob(object): path = '/b/%s/o/%s' % (SOURCE, BLOB_NAME) connection = _Connection({}) + client = _Client(connection) source = self._makeOne(SOURCE) dest = self._makeOne(DEST) blob = _Blob() new_blob = source.copy_blob(blob, dest, NEW_NAME, - connection=connection) + client=client) self.assertTrue(new_blob.bucket is dest) self.assertEqual(new_blob.name, NEW_NAME) kw, = connection._requested @@ -544,9 +578,9 @@ def __init__(self, bucket, name): self._bucket = bucket self._name = name - def upload_from_filename(self, filename, connection=None): + def upload_from_filename(self, filename, client=None): _uploaded.append((self._bucket, self._name, filename, - connection)) + client)) bucket = self._makeOne() with _Monkey(MUT, Blob=_Blob): @@ -566,9 +600,9 @@ def __init__(self, bucket, name): self._bucket = bucket self._name = name - def upload_from_filename(self, filename, connection=None): + def upload_from_filename(self, filename, client=None): _uploaded.append((self._bucket, self._name, filename, - connection)) + client)) bucket = self._makeOne() with _Monkey(MUT, Blob=_Blob): @@ -588,8 +622,8 @@ def __init__(self, bucket, name): self._bucket = bucket self._name = name - def upload_from_file(self, fh, connection=None): - _uploaded.append((self._bucket, self._name, fh, connection)) + def upload_from_file(self, fh, client=None): + _uploaded.append((self._bucket, self._name, fh, client)) bucket = self._makeOne() with _Monkey(MUT, Blob=_Blob): @@ -613,8 +647,8 @@ def __init__(self, bucket, name): self._bucket = bucket self._name = name - def upload_from_file(self, fh, connection=None): - _uploaded.append((self._bucket, self._name, fh, connection)) + def upload_from_file(self, fh, client=None): + _uploaded.append((self._bucket, self._name, fh, client)) bucket = self._makeOne() with _Monkey(MUT, Blob=_Blob): @@ -941,9 +975,9 @@ def all(self): def grant_read(self): self._granted = True - def save(self, connection=None): + def save(self, client=None): _saved.append( - (self._bucket, self._name, self._granted, connection)) + (self._bucket, self._name, self._granted, client)) class _Iterator(_BlobIterator): def get_items_from_response(self, response): @@ -963,7 +997,7 @@ def get_items_from_response(self, response): bucket.make_public(recursive=True) self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), []) - self.assertEqual(_saved, [(bucket, BLOB_NAME, True, connection)]) + self.assertEqual(_saved, [(bucket, BLOB_NAME, True, None)]) kw = connection._requested self.assertEqual(len(kw), 2) self.assertEqual(kw[0]['method'], 'PATCH') @@ -992,6 +1026,7 @@ def test_make_public_recursive_too_many(self): ], } connection = _Connection(AFTER, GET_BLOBS_RESP) + client = _Client(connection) bucket = self._makeOne(NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = True @@ -999,7 +1034,7 @@ def test_make_public_recursive_too_many(self): # Make the Bucket refuse to make_public with 2 objects. bucket._MAX_OBJECTS_FOR_ITERATION = 1 self.assertRaises(ValueError, bucket.make_public, recursive=True, - connection=connection) + client=client) class _Connection(object): @@ -1049,3 +1084,9 @@ class MockFile(io.StringIO): def __init__(self, name, buffer_=None): super(MockFile, self).__init__(buffer_) self.name = name + + +class _Client(object): + + def __init__(self, connection): + self.connection = connection diff --git a/system_tests/storage.py b/system_tests/storage.py index dcfcbd444a3b..b0551b3a87fe 100644 --- a/system_tests/storage.py +++ b/system_tests/storage.py @@ -321,7 +321,8 @@ def tearDown(self): def test_create_signed_read_url(self): blob = storage.Blob(bucket=self.bucket, name='LogoToSign.jpg') expiration = int(time.time() + 5) - signed_url = blob.generate_signed_url(expiration, method='GET') + signed_url = blob.generate_signed_url(expiration, method='GET', + client=CLIENT) response, content = HTTP.request(signed_url, method='GET') self.assertEqual(response.status, 200) @@ -331,7 +332,8 @@ def test_create_signed_delete_url(self): blob = storage.Blob(bucket=self.bucket, name='LogoToSign.jpg') expiration = int(time.time() + 283473274) signed_delete_url = blob.generate_signed_url(expiration, - method='DELETE') + method='DELETE', + client=CLIENT) response, content = HTTP.request(signed_delete_url, method='DELETE') self.assertEqual(response.status, 204)