diff --git a/gcloud/storage/__init__.py b/gcloud/storage/__init__.py index 8fa382865df2..170e1be1f0fa 100644 --- a/gcloud/storage/__init__.py +++ b/gcloud/storage/__init__.py @@ -78,7 +78,7 @@ def set_default_bucket(bucket=None): bucket_name = os.getenv(_BUCKET_ENV_VAR_NAME) if bucket_name is not None: - bucket = Bucket(bucket_name) + bucket = Bucket(None, name=bucket_name) if bucket is not None: _implicit_environ._DEFAULTS.bucket = bucket diff --git a/gcloud/storage/batch.py b/gcloud/storage/batch.py index 3d6806fb603e..8404bef1c587 100644 --- a/gcloud/storage/batch.py +++ b/gcloud/storage/batch.py @@ -28,7 +28,6 @@ from gcloud._helpers import _LocalStack from gcloud.exceptions import make_exception -from gcloud.storage import _implicit_environ from gcloud.storage.connection import Connection @@ -130,17 +129,14 @@ def __setitem__(self, key, value): class Batch(Connection): """Proxy an underlying connection, batching up change operations. - :type connection: :class:`gcloud.storage.connection.Connection` - :param connection: the connection for which the batch proxies. + :type client: :class:`gcloud.storage.client.Client` + :param client: The client to use for making connections. """ _MAX_BATCH_SIZE = 1000 - def __init__(self, connection=None): - if connection is None: - connection = _implicit_environ.get_default_connection() - + def __init__(self, client): super(Batch, self).__init__() - self._connection = connection + self._client = client self._requests = [] self._target_objects = [] @@ -248,7 +244,7 @@ def finish(self): url = '%s/batch' % self.API_BASE_URL - response, content = self._connection._make_request( + response, content = self._client.connection._make_request( 'POST', url, data=body, headers=headers) responses = list(_unpack_batch_response(response, content)) self._finish_futures(responses) diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index 480bb46960ba..7bb0a9ab1303 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -25,7 +25,6 @@ from gcloud.exceptions import NotFound from gcloud.iterator import Iterator from gcloud.storage._helpers import _PropertyMixin -from gcloud.storage._helpers import _require_connection from gcloud.storage._helpers import _scalar_property from gcloud.storage.acl import BucketACL from gcloud.storage.acl import DefaultObjectACL @@ -45,17 +44,18 @@ class _BlobIterator(Iterator): :type extra_params: dict or None :param extra_params: Extra query string parameters for the API call. - :type connection: :class:`gcloud.storage.connection.Connection` - :param connection: The connection to use when sending requests. Defaults - to the bucket's connection + :type client: :class:`gcloud.storage.client.Client` + :param client: Optional. The client to use for making connections. + Defaults to the bucket's client. """ - def __init__(self, bucket, extra_params=None, connection=None): - connection = _require_connection(connection) + def __init__(self, bucket, extra_params=None, client=None): + if client is None: + client = bucket.client self.bucket = bucket self.prefixes = set() self._current_prefixes = None super(_BlobIterator, self).__init__( - connection=connection, path=bucket.path + '/o', + connection=client.connection, path=bucket.path + '/o', extra_params=extra_params) def get_items_from_response(self, response): @@ -76,6 +76,10 @@ def get_items_from_response(self, response): class Bucket(_PropertyMixin): """A class representing a Bucket on Cloud Storage. + :type client: :class:`gcloud.storage.client.Client` + :param client: A client which holds credentials and project configuration + for the bucket (which requires a project). + :type name: string :param name: The name of the bucket. """ @@ -87,52 +91,49 @@ class Bucket(_PropertyMixin): This is used in Bucket.delete() and Bucket.make_public(). """ - def __init__(self, name=None): + def __init__(self, client, name=None): super(Bucket, self).__init__(name=name) + self._client = client self._acl = BucketACL(self) self._default_object_acl = DefaultObjectACL(self) def __repr__(self): return '' % self.name - @staticmethod - def _client_or_connection(client): - """Temporary method to get a connection from a client. - - If the client is null, gets the connection from the environment. + def _require_client(self, client): + """Check client or verify over-ride. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` - :param client: Optional. The client to use. If not passed, falls back - to default connection. + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current object. - :rtype: :class:`gcloud.storage.connection.Connection` - :returns: The connection determined from the ``client`` or environment. + :rtype: :class:`gcloud.storage.client.Client` + :returns: The client passed in or the currently bound client. """ if client is None: - return _require_connection() - else: - return client.connection + client = self.client + return client def exists(self, client=None): """Determines whether or not this bucket exists. :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the current bucket. :rtype: boolean :returns: True if the bucket exists in Cloud Storage. """ - connection = self._client_or_connection(client) + client = self._require_client(client) try: # We only need the status code (200 or not) so we seek to # minimize the returned payload. query_params = {'fields': 'name'} # We intentionally pass `_target_object=None` since fields=name # would limit the local properties. - connection.api_request(method='GET', path=self.path, - query_params=query_params, - _target_object=None) + client.connection.api_request(method='GET', path=self.path, + query_params=query_params, + _target_object=None) # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. @@ -140,7 +141,7 @@ def exists(self, client=None): except NotFound: return False - def create(self, project=None, connection=None): + def create(self, project=None, client=None): """Creates current bucket. If the bucket already exists, will raise @@ -152,17 +153,16 @@ def create(self, project=None, connection=None): :param project: Optional. The project to use when creating bucket. If not provided, falls back to default. - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to the ``client`` stored on the current bucket. :rtype: :class:`gcloud.storage.bucket.Bucket` :returns: The newly created bucket. :raises: :class:`EnvironmentError` if the project is not given and can't be inferred. """ - connection = _require_connection(connection) + client = self._require_client(client) if project is None: project = get_default_project() if project is None: @@ -170,7 +170,7 @@ def create(self, project=None, connection=None): 'from environment.') query_params = {'project': project} - api_response = connection.api_request( + api_response = client.connection.api_request( method='POST', path='/b', query_params=query_params, data={'name': self.name}, _target_object=self) self._set_properties(api_response) @@ -205,6 +205,11 @@ def path(self): return self.path_helper(self.name) + @property + def client(self): + """The client bound to this bucket.""" + return self._client + def get_blob(self, blob_name, client=None): """Get a blob object by name. @@ -223,15 +228,15 @@ def get_blob(self, blob_name, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the current bucket. :rtype: :class:`gcloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ - connection = self._client_or_connection(client) + client = self._require_client(client) blob = Blob(bucket=self, name=blob_name) try: - response = connection.api_request( + response = client.connection.api_request( method='GET', path=blob.path, _target_object=blob) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) @@ -244,7 +249,7 @@ def get_blob(self, blob_name, client=None): def list_blobs(self, max_results=None, page_token=None, prefix=None, delimiter=None, versions=None, - projection='noAcl', fields=None, connection=None): + projection='noAcl', fields=None, client=None): """Return an iterator used to find blobs in the bucket. :type max_results: integer or ``NoneType`` @@ -276,10 +281,9 @@ def list_blobs(self, max_results=None, page_token=None, prefix=None, and the language of each blob returned: 'items/contentLanguage,nextPageToken' - :type connection: :class:`gcloud.storage.connection.Connection` or - ``NoneType`` - :param connection: Optional. The connection to use when sending - requests. If not provided, falls back to default. + :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to the ``client`` stored on the current bucket. :rtype: :class:`_BlobIterator`. :returns: An iterator of blobs. @@ -304,7 +308,7 @@ def list_blobs(self, max_results=None, page_token=None, prefix=None, extra_params['fields'] = fields result = self._iterator_class( - self, extra_params=extra_params, connection=connection) + self, extra_params=extra_params, client=client) # Page token must be handled specially since the base `Iterator` # class has it as a reserved property. if page_token is not None: @@ -332,16 +336,16 @@ def delete(self, force=False, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the current bucket. :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket contains more than 256 objects / blobs. """ - connection = self._client_or_connection(client) + client = self._require_client(client) if force: blobs = list(self.list_blobs( max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, - connection=connection)) + client=client)) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: message = ( 'Refusing to delete bucket with more than ' @@ -358,8 +362,8 @@ def delete(self, force=False, client=None): # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). - connection.api_request(method='DELETE', path=self.path, - _target_object=None) + client.connection.api_request(method='DELETE', path=self.path, + _target_object=None) def delete_blob(self, blob_name, client=None): """Deletes a blob from the current bucket. @@ -386,7 +390,7 @@ def delete_blob(self, blob_name, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the current bucket. :raises: :class:`gcloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op @@ -394,13 +398,13 @@ def delete_blob(self, blob_name, client=None): >>> bucket.delete_blobs([blob], on_error=lambda blob: None) """ - connection = self._client_or_connection(client) + client = self._require_client(client) blob_path = Blob.path_helper(self.path, blob_name) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). - connection.api_request(method='DELETE', path=blob_path, - _target_object=None) + client.connection.api_request(method='DELETE', path=blob_path, + _target_object=None) def delete_blobs(self, blobs, on_error=None, client=None): """Deletes a list of blobs from the current bucket. @@ -417,7 +421,7 @@ def delete_blobs(self, blobs, on_error=None, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the current bucket. :raises: :class:`gcloud.exceptions.NotFound` (if `on_error` is not passed). @@ -450,18 +454,18 @@ def copy_blob(self, blob, destination_bucket, new_name=None, :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the current bucket. :rtype: :class:`gcloud.storage.blob.Blob` :returns: The new Blob. """ - connection = self._client_or_connection(client) + client = self._require_client(client) if new_name is None: new_name = blob.name new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + '/copyTo' + new_blob.path - copy_result = connection.api_request(method='POST', path=api_path, - _target_object=new_blob) + copy_result = client.connection.api_request( + method='POST', path=api_path, _target_object=new_blob) new_blob._set_properties(copy_result) return new_blob @@ -500,7 +504,7 @@ def upload_file(self, filename, blob_name=None, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the current bucket. :rtype: :class:`Blob` :returns: The updated Blob object. @@ -546,7 +550,7 @@ def upload_file_object(self, file_obj, blob_name=None, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the current bucket. :rtype: :class:`Blob` :returns: The updated Blob object. @@ -847,10 +851,8 @@ def make_public(self, recursive=False, future=False, client=None): :type client: :class:`gcloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back - to default connection. + to the ``client`` stored on the current bucket. """ - connection = self._client_or_connection(client) - self.acl.all().grant_read() self.acl.save(client=client) @@ -865,7 +867,7 @@ def make_public(self, recursive=False, future=False, client=None): blobs = list(self.list_blobs( projection='full', max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, - connection=connection)) + client=client)) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: message = ( 'Refusing to make public recursively with more than ' diff --git a/gcloud/storage/client.py b/gcloud/storage/client.py index fd0402d232b6..df18da35e119 100644 --- a/gcloud/storage/client.py +++ b/gcloud/storage/client.py @@ -67,7 +67,7 @@ def get_bucket(self, bucket_name): :returns: The bucket matching the name provided. :raises: :class:`gcloud.exceptions.NotFound` """ - bucket = Bucket(bucket_name) + bucket = Bucket(self, name=bucket_name) bucket.reload(client=self) return bucket @@ -115,8 +115,8 @@ def create_bucket(self, bucket_name): :rtype: :class:`gcloud.storage.bucket.Bucket` :returns: The newly created bucket. """ - bucket = Bucket(bucket_name) - bucket.create(self.project, connection=self.connection) + bucket = Bucket(self, name=bucket_name) + bucket.create(self.project, client=self) return bucket def list_buckets(self, max_results=None, page_token=None, prefix=None, @@ -205,6 +205,6 @@ def get_items_from_response(self, response): """ for item in response.get('items', []): name = item.get('name') - bucket = Bucket(name) + bucket = Bucket(None, name) bucket._set_properties(item) yield bucket diff --git a/gcloud/storage/test_batch.py b/gcloud/storage/test_batch.py index 94695671090e..d9b332dbf776 100644 --- a/gcloud/storage/test_batch.py +++ b/gcloud/storage/test_batch.py @@ -86,20 +86,9 @@ def _makeOne(self, *args, **kw): def test_ctor_w_explicit_connection(self): http = _HTTP() connection = _Connection(http=http) - batch = self._makeOne(connection) - self.assertTrue(batch._connection is connection) - self.assertEqual(len(batch._requests), 0) - self.assertEqual(len(batch._target_objects), 0) - - def test_ctor_w_implicit_connection(self): - from gcloud.storage._testing import _monkey_defaults - - http = _HTTP() - connection = _Connection(http=http) - with _monkey_defaults(connection=connection): - batch = self._makeOne() - - self.assertTrue(batch._connection is connection) + client = _Client(connection) + batch = self._makeOne(client) + self.assertTrue(batch._client is client) self.assertEqual(len(batch._requests), 0) self.assertEqual(len(batch._target_objects), 0) @@ -262,7 +251,8 @@ def test_finish_nonempty(self): expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="' http = _HTTP((expected, _THREE_PART_MIME_RESPONSE)) connection = _Connection(http=http) - batch = self._makeOne(connection) + client = _Client(connection) + batch = self._makeOne(client) batch.API_BASE_URL = 'http://api.example.com' batch._do_request('POST', URL, {}, {'foo': 1, 'bar': 2}, None) batch._do_request('PATCH', URL, {}, {'bar': 3}, None) @@ -311,7 +301,8 @@ def test_finish_responses_mismatch(self): expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="' http = _HTTP((expected, _TWO_PART_MIME_RESPONSE_WITH_FAIL)) connection = _Connection(http=http) - batch = self._makeOne(connection) + client = _Client(connection) + batch = self._makeOne(client) batch.API_BASE_URL = 'http://api.example.com' batch._requests.append(('GET', URL, {}, None)) self.assertRaises(ValueError, batch.finish) @@ -323,7 +314,8 @@ def test_finish_nonempty_with_status_failure(self): expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="' http = _HTTP((expected, _TWO_PART_MIME_RESPONSE_WITH_FAIL)) connection = _Connection(http=http) - batch = self._makeOne(connection) + client = _Client(connection) + batch = self._makeOne(client) batch.API_BASE_URL = 'http://api.example.com' target1 = _MockObject() target2 = _MockObject() @@ -363,7 +355,8 @@ def test_finish_nonempty_non_multipart_response(self): expected['content-type'] = 'text/plain' http = _HTTP((expected, 'NOT A MIME_RESPONSE')) connection = _Connection(http=http) - batch = self._makeOne(connection) + client = _Client(connection) + batch = self._makeOne(client) batch._requests.append(('POST', URL, {}, {'foo': 1, 'bar': 2})) batch._requests.append(('PATCH', URL, {}, {'bar': 3})) batch._requests.append(('DELETE', URL, {}, None)) @@ -376,13 +369,14 @@ def test_as_context_mgr_wo_error(self): expected['content-type'] = 'multipart/mixed; boundary="DEADBEEF="' http = _HTTP((expected, _THREE_PART_MIME_RESPONSE)) connection = _Connection(http=http) + client = _Client(connection) self.assertEqual(list(_BATCHES), []) target1 = _MockObject() target2 = _MockObject() target3 = _MockObject() - with self._makeOne(connection) as batch: + with self._makeOne(client) as batch: self.assertEqual(list(_BATCHES), [batch]) batch._make_request('POST', URL, {'foo': 1, 'bar': 2}, target_object=target1) @@ -596,3 +590,9 @@ def request(self, method, uri, headers, body): class _MockObject(object): pass + + +class _Client(object): + + def __init__(self, connection): + self.connection = connection diff --git a/gcloud/storage/test_bucket.py b/gcloud/storage/test_bucket.py index a19d817b391c..fcfacf9bde13 100644 --- a/gcloud/storage/test_bucket.py +++ b/gcloud/storage/test_bucket.py @@ -26,23 +26,11 @@ def _getTargetClass(self): def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) - def test_ctor_w_implicit_connection(self): - from gcloud.storage._testing import _monkey_defaults - connection = _Connection() - bucket = _Bucket() - with _monkey_defaults(connection=connection): - iterator = self._makeOne(bucket) - self.assertTrue(iterator.bucket is bucket) - self.assertTrue(iterator.connection is connection) - self.assertEqual(iterator.path, '%s/o' % bucket.path) - self.assertEqual(iterator.page_number, 0) - self.assertEqual(iterator.next_page_token, None) - self.assertEqual(iterator.prefixes, set()) - def test_ctor_w_explicit_connection(self): connection = _Connection() + client = _Client(connection) bucket = _Bucket() - iterator = self._makeOne(bucket, connection=connection) + iterator = self._makeOne(bucket, client=client) self.assertTrue(iterator.bucket is bucket) self.assertTrue(iterator.connection is connection) self.assertEqual(iterator.path, '%s/o' % bucket.path) @@ -52,8 +40,9 @@ def test_ctor_w_explicit_connection(self): def test_get_items_from_response_empty(self): connection = _Connection() + client = _Client(connection) bucket = _Bucket() - iterator = self._makeOne(bucket, connection=connection) + iterator = self._makeOne(bucket, client=client) blobs = list(iterator.get_items_from_response({})) self.assertEqual(blobs, []) self.assertEqual(iterator.prefixes, set()) @@ -63,8 +52,9 @@ def test_get_items_from_response_non_empty(self): BLOB_NAME = 'blob-name' response = {'items': [{'name': BLOB_NAME}], 'prefixes': ['foo']} connection = _Connection() + client = _Client(connection) bucket = _Bucket() - iterator = self._makeOne(bucket, connection=connection) + iterator = self._makeOne(bucket, client=client) blobs = list(iterator.get_items_from_response(response)) self.assertEqual(len(blobs), 1) blob = blobs[0] @@ -81,8 +71,9 @@ def test_get_items_from_response_cumulative_prefixes(self): 'prefixes': ['foo', 'bar'], } connection = _Connection() + client = _Client(connection) bucket = _Bucket() - iterator = self._makeOne(bucket, connection=connection) + iterator = self._makeOne(bucket, client=client) # Parse first response. blobs = list(iterator.get_items_from_response(response1)) self.assertEqual(len(blobs), 1) @@ -98,10 +89,12 @@ def test_get_items_from_response_cumulative_prefixes(self): class Test_Bucket(unittest2.TestCase): - def _makeOne(self, *args, **kw): + def _makeOne(self, client=None, name=None, properties=None): from gcloud.storage.bucket import Bucket - properties = kw.pop('properties', None) - bucket = Bucket(*args, **kw) + if client is None: + connection = _Connection() + client = _Client(connection) + bucket = Bucket(client, name=name) bucket._properties = properties or {} return bucket @@ -117,7 +110,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): NAME = 'name' properties = {'key': 'value'} - bucket = self._makeOne(NAME, properties=properties) + bucket = self._makeOne(name=NAME, properties=properties) self.assertEqual(bucket.name, NAME) self.assertEqual(bucket._properties, properties) self.assertFalse(bucket._acl.loaded) @@ -125,23 +118,6 @@ def test_ctor_explicit(self): self.assertFalse(bucket._default_object_acl.loaded) self.assertTrue(bucket._default_object_acl.bucket is bucket) - def test__client_or_connection_implicit(self): - from gcloud._testing import _Monkey - from gcloud.storage import bucket as MUT - bucket = self._makeOne() - num_mock_require_calls = [0] - cnxn = object() - - def mock_require(): - num_mock_require_calls[0] += 1 - return cnxn - - with _Monkey(MUT, _require_connection=mock_require): - result = bucket._client_or_connection(None) - - self.assertTrue(result is cnxn) - self.assertEqual(num_mock_require_calls, [1]) - def test_exists_miss(self): from gcloud.exceptions import NotFound @@ -155,7 +131,7 @@ def api_request(cls, *args, **kwargs): raise NotFound(args) BUCKET_NAME = 'bucket-name' - bucket = self._makeOne(BUCKET_NAME) + bucket = self._makeOne(name=BUCKET_NAME) client = _Client(_FakeConnection) self.assertFalse(bucket.exists(client=client)) expected_called_kwargs = { @@ -181,7 +157,7 @@ def api_request(cls, *args, **kwargs): return object() BUCKET_NAME = 'bucket-name' - bucket = self._makeOne(BUCKET_NAME) + bucket = self._makeOne(name=BUCKET_NAME) client = _Client(_FakeConnection) self.assertTrue(bucket.exists(client=client)) expected_called_kwargs = { @@ -198,19 +174,21 @@ def api_request(cls, *args, **kwargs): def test_create_no_project(self): from gcloud._testing import _monkey_defaults BUCKET_NAME = 'bucket-name' - bucket = self._makeOne(BUCKET_NAME) - CONNECTION = object() + bucket = self._makeOne(name=BUCKET_NAME) + connection = _Connection() + client = _Client(connection) with _monkey_defaults(project=None): self.assertRaises(EnvironmentError, bucket.create, - connection=CONNECTION) + client=client) def test_create_hit_explicit_project(self): BUCKET_NAME = 'bucket-name' DATA = {'name': BUCKET_NAME} connection = _Connection(DATA) + client = _Client(connection) PROJECT = 'PROJECT' - bucket = self._makeOne(BUCKET_NAME) - bucket.create(PROJECT, connection=connection) + bucket = self._makeOne(name=BUCKET_NAME) + bucket.create(PROJECT, client=client) kw, = connection._requested self.assertEqual(kw['method'], 'POST') @@ -223,10 +201,11 @@ def test_create_hit_implicit_project(self): BUCKET_NAME = 'bucket-name' DATA = {'name': BUCKET_NAME} connection = _Connection(DATA) + client = _Client(connection) PROJECT = 'PROJECT' - bucket = self._makeOne(BUCKET_NAME) + bucket = self._makeOne(name=BUCKET_NAME) with _monkey_defaults(project=PROJECT): - bucket.create(connection=connection) + bucket.create(client=client) kw, = connection._requested self.assertEqual(kw['method'], 'POST') @@ -254,7 +233,7 @@ def test_path_no_name(self): def test_path_w_name(self): NAME = 'name' - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) self.assertEqual(bucket.path, '/b/%s' % NAME) def test_get_blob_miss(self): @@ -262,7 +241,7 @@ def test_get_blob_miss(self): NONESUCH = 'nonesuch' connection = _Connection() client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) result = bucket.get_blob(NONESUCH, client=client) self.assertTrue(result is None) kw, = connection._requested @@ -274,7 +253,7 @@ def test_get_blob_hit(self): BLOB_NAME = 'blob-name' connection = _Connection({'name': BLOB_NAME}) client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) blob = bucket.get_blob(BLOB_NAME, client=client) self.assertTrue(blob.bucket is bucket) self.assertEqual(blob.name, BLOB_NAME) @@ -285,8 +264,9 @@ def test_get_blob_hit(self): def test_list_blobs_defaults(self): NAME = 'name' connection = _Connection({'items': []}) - bucket = self._makeOne(NAME) - iterator = bucket.list_blobs(connection=connection) + client = _Client(connection) + bucket = self._makeOne(client=client, name=NAME) + iterator = bucket.list_blobs(client=client) blobs = list(iterator) self.assertEqual(blobs, []) kw, = connection._requested @@ -313,7 +293,8 @@ def test_list_blobs_explicit(self): 'fields': FIELDS, } connection = _Connection({'items': []}) - bucket = self._makeOne(NAME) + client = _Client(connection) + bucket = self._makeOne(name=NAME) iterator = bucket.list_blobs( max_results=MAX_RESULTS, page_token=PAGE_TOKEN, @@ -322,7 +303,7 @@ def test_list_blobs_explicit(self): versions=VERSIONS, projection=PROJECTION, fields=FIELDS, - connection=connection, + client=client, ) blobs = list(iterator) self.assertEqual(blobs, []) @@ -334,8 +315,9 @@ def test_list_blobs_explicit(self): def test_list_blobs_w_explicit_connection(self): NAME = 'name' connection = _Connection({'items': []}) - bucket = self._makeOne(NAME) - iterator = bucket.list_blobs(connection=connection) + client = _Client(connection) + bucket = self._makeOne(client=client, name=NAME) + iterator = bucket.list_blobs() blobs = list(iterator) self.assertEqual(blobs, []) kw, = connection._requested @@ -348,8 +330,8 @@ def test_delete_default_miss(self): NAME = 'name' connection = _Connection() client = _Client(connection) - bucket = self._makeOne(NAME) - self.assertRaises(NotFound, bucket.delete, client=client) + bucket = self._makeOne(client=client, name=NAME) + self.assertRaises(NotFound, bucket.delete) expected_cw = [{ 'method': 'DELETE', 'path': bucket.path, @@ -363,7 +345,7 @@ def test_delete_explicit_hit(self): connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) result = bucket.delete(force=True, client=client) self.assertTrue(result is None) expected_cw = [{ @@ -388,7 +370,7 @@ def test_delete_explicit_force_delete_blobs(self): DELETE_BLOB2_RESP) connection._delete_bucket = True client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) result = bucket.delete(force=True, client=client) self.assertTrue(result is None) expected_cw = [{ @@ -406,7 +388,7 @@ def test_delete_explicit_force_miss_blobs(self): connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) result = bucket.delete(force=True, client=client) self.assertTrue(result is None) expected_cw = [{ @@ -429,7 +411,7 @@ def test_delete_explicit_too_many(self): connection = _Connection(GET_BLOBS_RESP) connection._delete_bucket = True client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) # Make the Bucket refuse to delete with 2 objects. bucket._MAX_OBJECTS_FOR_ITERATION = 1 @@ -443,7 +425,7 @@ def test_delete_blob_miss(self): NONESUCH = 'nonesuch' connection = _Connection() client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) self.assertRaises(NotFound, bucket.delete_blob, NONESUCH, client=client) kw, = connection._requested @@ -455,7 +437,7 @@ def test_delete_blob_hit(self): BLOB_NAME = 'blob-name' connection = _Connection({}) client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) result = bucket.delete_blob(BLOB_NAME, client=client) self.assertTrue(result is None) kw, = connection._requested @@ -466,7 +448,7 @@ def test_delete_blobs_empty(self): NAME = 'name' connection = _Connection() client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) bucket.delete_blobs([], client=client) self.assertEqual(connection._requested, []) @@ -475,7 +457,7 @@ def test_delete_blobs_hit(self): BLOB_NAME = 'blob-name' connection = _Connection({}) client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) bucket.delete_blobs([BLOB_NAME], client=client) kw = connection._requested self.assertEqual(len(kw), 1) @@ -489,7 +471,7 @@ def test_delete_blobs_miss_no_on_error(self): NONESUCH = 'nonesuch' connection = _Connection({}) client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) self.assertRaises(NotFound, bucket.delete_blobs, [BLOB_NAME, NONESUCH], client=client) kw = connection._requested @@ -505,7 +487,7 @@ def test_delete_blobs_miss_w_on_error(self): NONESUCH = 'nonesuch' connection = _Connection({}) client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) errors = [] bucket.delete_blobs([BLOB_NAME, NONESUCH], errors.append, client=client) @@ -528,8 +510,8 @@ class _Blob(object): connection = _Connection({}) client = _Client(connection) - source = self._makeOne(SOURCE) - dest = self._makeOne(DEST) + source = self._makeOne(name=SOURCE) + dest = self._makeOne(name=DEST) blob = _Blob() new_blob = source.copy_blob(blob, dest, client=client) self.assertTrue(new_blob.bucket is dest) @@ -552,8 +534,8 @@ class _Blob(object): connection = _Connection({}) client = _Client(connection) - source = self._makeOne(SOURCE) - dest = self._makeOne(DEST) + source = self._makeOne(name=SOURCE) + dest = self._makeOne(name=DEST) blob = _Blob() new_blob = source.copy_blob(blob, dest, NEW_NAME, client=client) @@ -673,12 +655,12 @@ def test_id(self): def test_location_getter(self): NAME = 'name' before = {'location': 'AS'} - bucket = self._makeOne(NAME, properties=before) + bucket = self._makeOne(name=NAME, properties=before) self.assertEqual(bucket.location, 'AS') def test_location_setter(self): NAME = 'name' - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) self.assertEqual(bucket.location, None) bucket.location = 'AS' self.assertEqual(bucket.location, 'AS') @@ -688,7 +670,7 @@ def test_lifecycle_rules_getter(self): LC_RULE = {'action': {'type': 'Delete'}, 'condition': {'age': 42}} rules = [LC_RULE] properties = {'lifecycle': {'rule': rules}} - bucket = self._makeOne(NAME, properties=properties) + bucket = self._makeOne(name=NAME, properties=properties) self.assertEqual(bucket.lifecycle_rules, rules) # Make sure it's a copy self.assertFalse(bucket.lifecycle_rules is rules) @@ -697,7 +679,7 @@ def test_lifecycle_rules_setter(self): NAME = 'name' LC_RULE = {'action': {'type': 'Delete'}, 'condition': {'age': 42}} rules = [LC_RULE] - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) self.assertEqual(bucket.lifecycle_rules, []) bucket.lifecycle_rules = rules self.assertEqual(bucket.lifecycle_rules, rules) @@ -711,7 +693,7 @@ def test_cors_getter(self): 'responseHeader': ['Content-Type'], } properties = {'cors': [CORS_ENTRY, {}]} - bucket = self._makeOne(NAME, properties=properties) + bucket = self._makeOne(name=NAME, properties=properties) entries = bucket.cors self.assertEqual(len(entries), 2) self.assertEqual(entries[0], CORS_ENTRY) @@ -727,7 +709,7 @@ def test_cors_setter(self): 'origin': ['127.0.0.1'], 'responseHeader': ['Content-Type'], } - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) self.assertEqual(bucket.cors, []) bucket.cors = [CORS_ENTRY] @@ -743,7 +725,7 @@ def test_get_logging_w_prefix(self): 'logObjectPrefix': LOG_PREFIX, }, } - bucket = self._makeOne(NAME, properties=before) + bucket = self._makeOne(name=NAME, properties=before) info = bucket.get_logging() self.assertEqual(info['logBucket'], LOG_BUCKET) self.assertEqual(info['logObjectPrefix'], LOG_PREFIX) @@ -752,7 +734,7 @@ def test_enable_logging_defaults(self): NAME = 'name' LOG_BUCKET = 'logs' before = {'logging': None} - bucket = self._makeOne(NAME, properties=before) + bucket = self._makeOne(name=NAME, properties=before) self.assertTrue(bucket.get_logging() is None) bucket.enable_logging(LOG_BUCKET) info = bucket.get_logging() @@ -764,7 +746,7 @@ def test_enable_logging_explicit(self): LOG_BUCKET = 'logs' LOG_PFX = 'pfx' before = {'logging': None} - bucket = self._makeOne(NAME, properties=before) + bucket = self._makeOne(name=NAME, properties=before) self.assertTrue(bucket.get_logging() is None) bucket.enable_logging(LOG_BUCKET, LOG_PFX) info = bucket.get_logging() @@ -774,7 +756,7 @@ def test_enable_logging_explicit(self): def test_disable_logging(self): NAME = 'name' before = {'logging': {'logBucket': 'logs', 'logObjectPrefix': 'pfx'}} - bucket = self._makeOne(NAME, properties=before) + bucket = self._makeOne(name=NAME, properties=before) self.assertTrue(bucket.get_logging() is not None) bucket.disable_logging() self.assertTrue(bucket.get_logging() is None) @@ -847,18 +829,18 @@ def test_time_created_unset(self): def test_versioning_enabled_getter_missing(self): NAME = 'name' - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) self.assertEqual(bucket.versioning_enabled, False) def test_versioning_enabled_getter(self): NAME = 'name' before = {'versioning': {'enabled': True}} - bucket = self._makeOne(NAME, properties=before) + bucket = self._makeOne(name=NAME, properties=before) self.assertEqual(bucket.versioning_enabled, True) def test_versioning_enabled_setter(self): NAME = 'name' - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) self.assertFalse(bucket.versioning_enabled) bucket.versioning_enabled = True self.assertTrue(bucket.versioning_enabled) @@ -867,7 +849,7 @@ def test_configure_website_defaults(self): NAME = 'name' UNSET = {'website': {'mainPageSuffix': None, 'notFoundPage': None}} - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) bucket.configure_website() self.assertEqual(bucket._properties, UNSET) @@ -875,7 +857,7 @@ def test_configure_website_explicit(self): NAME = 'name' WEBSITE_VAL = {'website': {'mainPageSuffix': 'html', 'notFoundPage': '404.html'}} - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) bucket.configure_website('html', '404.html') self.assertEqual(bucket._properties, WEBSITE_VAL) @@ -883,22 +865,21 @@ def test_disable_website(self): NAME = 'name' UNSET = {'website': {'mainPageSuffix': None, 'notFoundPage': None}} - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) bucket.disable_website() self.assertEqual(bucket._properties, UNSET) def test_make_public_defaults(self): from gcloud.storage.acl import _ACLEntity - from gcloud.storage._testing import _monkey_defaults NAME = 'name' permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] after = {'acl': permissive, 'defaultObjectAcl': []} connection = _Connection(after) - bucket = self._makeOne(NAME) + client = _Client(connection) + bucket = self._makeOne(client=client, name=NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = True - with _monkey_defaults(connection=connection): - bucket.make_public() + bucket.make_public(client=client) self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), []) kw = connection._requested @@ -910,7 +891,6 @@ def test_make_public_defaults(self): def _make_public_w_future_helper(self, default_object_acl_loaded=True): from gcloud.storage.acl import _ACLEntity - from gcloud.storage._testing import _monkey_defaults NAME = 'name' permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] after1 = {'acl': permissive, 'defaultObjectAcl': []} @@ -923,11 +903,11 @@ def _make_public_w_future_helper(self, default_object_acl_loaded=True): # We return the same value for default_object_acl.reload() # to consume. connection = _Connection(after1, after1, after2) - bucket = self._makeOne(NAME) + client = _Client(connection) + bucket = self._makeOne(client=client, name=NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = default_object_acl_loaded - with _monkey_defaults(connection=connection): - bucket.make_public(future=True) + bucket.make_public(future=True, client=client) self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), permissive) kw = connection._requested @@ -954,7 +934,6 @@ def test_make_public_w_future_reload_default(self): def test_make_public_recursive(self): from gcloud.storage.acl import _ACLEntity from gcloud.storage.bucket import _BlobIterator - from gcloud.storage._testing import _monkey_defaults _saved = [] class _Blob(object): @@ -989,15 +968,15 @@ def get_items_from_response(self, response): permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}] after = {'acl': permissive, 'defaultObjectAcl': []} connection = _Connection(after, {'items': [{'name': BLOB_NAME}]}) - bucket = self._makeOne(NAME) + client = _Client(connection) + bucket = self._makeOne(client=client, name=NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = True bucket._iterator_class = _Iterator - with _monkey_defaults(connection=connection): - bucket.make_public(recursive=True) + bucket.make_public(recursive=True, client=client) self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), []) - self.assertEqual(_saved, [(bucket, BLOB_NAME, True, None)]) + self.assertEqual(_saved, [(bucket, BLOB_NAME, True, client)]) kw = connection._requested self.assertEqual(len(kw), 2) self.assertEqual(kw[0]['method'], 'PATCH') @@ -1027,7 +1006,7 @@ def test_make_public_recursive_too_many(self): } connection = _Connection(AFTER, GET_BLOBS_RESP) client = _Client(connection) - bucket = self._makeOne(NAME) + bucket = self._makeOne(name=NAME) bucket.acl.loaded = True bucket.default_object_acl.loaded = True @@ -1077,6 +1056,10 @@ class _Bucket(object): path = '/b/name' name = 'name' + def __init__(self): + connection = _Connection() + self.client = _Client(connection) + class MockFile(io.StringIO): name = None diff --git a/system_tests/storage.py b/system_tests/storage.py index b0551b3a87fe..67fb2fc59a09 100644 --- a/system_tests/storage.py +++ b/system_tests/storage.py @@ -51,9 +51,15 @@ def setUp(self): self.case_buckets_to_delete = [] def tearDown(self): - with storage.Batch(): + with storage.Batch(CLIENT) as batch: + # Stop-gap measure to support batches during transation to + # to clients from implicit behavior. + batch_client = storage.Client( + project=CLIENT.project, + credentials=CLIENT.connection.credentials) + batch_client.connection = batch for bucket_name in self.case_buckets_to_delete: - storage.Bucket(bucket_name).delete() + storage.Bucket(batch_client, name=bucket_name).delete() def test_create_bucket(self): new_bucket_name = 'a-new-bucket'