diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py index ea2345719..674dced79 100644 --- a/google/cloud/storage/_helpers.py +++ b/google/cloud/storage/_helpers.py @@ -339,7 +339,7 @@ def patch( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, override_unlocked_retention=False, ): """Sends all changed properties in a PATCH request. diff --git a/google/cloud/storage/acl.py b/google/cloud/storage/acl.py index d20ca135b..1384a5075 100644 --- a/google/cloud/storage/acl.py +++ b/google/cloud/storage/acl.py @@ -752,3 +752,185 @@ def save_path(self): def user_project(self): """Compute the user project charged for API requests for this ACL.""" return self.blob.user_project + + def save( + self, + acl=None, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, + ): + """Save this ACL for the current object. + + If :attr:`user_project` is set, bills the API request to that project. + + :type acl: :class:`google.cloud.storage.acl.ACL`, or a compatible list. + :param acl: The ACL object to save. If left blank, this will save + current entries. + + :type client: :class:`~google.cloud.storage.client.Client` or + ``NoneType`` + :param client: (Optional) The client to use. If not passed, falls back + to the ``client`` stored on the ACL's parent. + + :type if_generation_match: long + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` + + :type if_generation_not_match: long + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` + + :type if_metageneration_match: long + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` + """ + super().save( + acl=acl, + client=client, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, + retry=retry, + ) + + def save_predefined( + self, + predefined, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, + ): + """Save this ACL for the current object using a predefined ACL. + + If :attr:`user_project` is set, bills the API request to that project. + + :type predefined: str + :param predefined: An identifier for a predefined ACL. Must be one + of the keys in :attr:`PREDEFINED_JSON_ACLS` + or :attr:`PREDEFINED_XML_ACLS` (which will be + aliased to the corresponding JSON name). + If passed, `acl` must be None. + + :type client: :class:`~google.cloud.storage.client.Client` or + ``NoneType`` + :param client: (Optional) The client to use. If not passed, falls back + to the ``client`` stored on the ACL's parent. + + :type if_generation_match: long + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` + + :type if_generation_not_match: long + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` + + :type if_metageneration_match: long + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` + """ + super().save_predefined( + predefined=predefined, + client=client, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, + retry=retry, + ) + + def clear( + self, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, + ): + """Remove all ACL entries. + + If :attr:`user_project` is set, bills the API request to that project. + + Note that this won't actually remove *ALL* the rules, but it + will remove all the non-default rules. In short, you'll still + have access to a bucket that you created even after you clear + ACL rules with this method. + + :type client: :class:`~google.cloud.storage.client.Client` or + ``NoneType`` + :param client: (Optional) The client to use. If not passed, falls back + to the ``client`` stored on the ACL's parent. + + :type if_generation_match: long + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` + + :type if_generation_not_match: long + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` + + :type if_metageneration_match: long + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` + + :type if_metageneration_not_match: long + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` + """ + super().clear( + client=client, + if_generation_match=if_generation_match, + if_generation_not_match=if_generation_not_match, + if_metageneration_match=if_metageneration_match, + if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, + retry=retry, + ) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index ef58e8b0e..3cda582ca 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -76,7 +76,6 @@ from google.cloud.storage.retry import DEFAULT_RETRY from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED -from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED from google.cloud.storage.fileio import BlobReader from google.cloud.storage.fileio import BlobWriter @@ -791,7 +790,7 @@ def delete( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Deletes a blob from Cloud Storage. @@ -825,14 +824,21 @@ def delete( for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: - (Optional) How to retry the RPC. - The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, a conditional retry - policy which will only enable retries if ``if_generation_match`` or ``generation`` - is set, in order to ensure requests are idempotent before retrying them. - Change the value to ``DEFAULT_RETRY`` or another `google.api_core.retry.Retry` object - to enable retries regardless of generation precondition setting. - See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_generation_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. :raises: :class:`google.cloud.exceptions.NotFound` (propagated from @@ -2507,7 +2513,7 @@ def _prep_and_do_upload( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="auto", - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, command=None, ): """Upload the contents of this blob from a file-like object. @@ -2664,7 +2670,7 @@ def upload_from_file( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="auto", - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Upload the contents of this blob from a file-like object. @@ -2757,13 +2763,21 @@ def upload_from_file( back to md5 otherwise. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. - The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, a conditional retry - policy which will only enable retries if ``if_generation_match`` or ``generation`` - is set, in order to ensure requests are idempotent before retrying them. - Change the value to ``DEFAULT_RETRY`` or another `google.api_core.retry.Retry` object - to enable retries regardless of generation precondition setting. - See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_generation_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the upload response returns an error status. @@ -2821,7 +2835,7 @@ def upload_from_filename( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="auto", - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Upload this blob's contents from the content of a named file. @@ -2901,13 +2915,21 @@ def upload_from_filename( back to md5 otherwise. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. - The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, a conditional retry - policy which will only enable retries if ``if_generation_match`` or ``generation`` - is set, in order to ensure requests are idempotent before retrying them. - Change the value to ``DEFAULT_RETRY`` or another `google.api_core.retry.Retry` object - to enable retries regardless of generation precondition setting. - See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_generation_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. """ self._handle_filename_and_upload( @@ -2937,7 +2959,7 @@ def upload_from_string( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="auto", - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Upload contents of this blob from the provided string. @@ -3009,13 +3031,21 @@ def upload_from_string( back to md5 otherwise. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. - The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, a conditional retry - policy which will only enable retries if ``if_generation_match`` or ``generation`` - is set, in order to ensure requests are idempotent before retrying them. - Change the value to ``DEFAULT_RETRY`` or another `google.api_core.retry.Retry` object - to enable retries regardless of generation precondition setting. - See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_generation_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. """ data = _to_bytes(data, encoding="utf-8") string_buffer = BytesIO(data) @@ -3048,7 +3078,7 @@ def create_resumable_upload_session( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Create a resumable upload session. @@ -3144,13 +3174,21 @@ def create_resumable_upload_session( (Optional) See :ref:`using-if-metageneration-not-match` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. - The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, a conditional retry - policy which will only enable retries if ``if_generation_match`` or ``generation`` - is set, in order to ensure requests are idempotent before retrying them. - Change the value to ``DEFAULT_RETRY`` or another `google.api_core.retry.Retry` object - to enable retries regardless of generation precondition setting. - See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_generation_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. :rtype: str :returns: The resumable upload session URL. The upload can be @@ -3400,7 +3438,7 @@ def make_public( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Update blob's ACL, granting read access to anonymous users. @@ -3454,7 +3492,7 @@ def make_private( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Update blob's ACL, revoking read access for anonymous users. diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index e6de1ac42..10156c795 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -1725,7 +1725,7 @@ def delete_blob( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Deletes a blob from the current bucket. @@ -1765,14 +1765,21 @@ def delete_blob( for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: - (Optional) How to retry the RPC. - The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, a conditional retry - policy which will only enable retries if ``if_generation_match`` or ``generation`` - is set, in order to ensure requests are idempotent before retrying them. - Change the value to ``DEFAULT_RETRY`` or another `google.api_core.retry.Retry` object - to enable retries regardless of generation precondition setting. - See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_generation_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. :raises: :class:`google.cloud.exceptions.NotFound` Raises a NotFound if the blob isn't found. To suppress @@ -1813,7 +1820,7 @@ def delete_blobs( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Deletes a list of blobs from the current bucket. @@ -1873,14 +1880,21 @@ def delete_blobs( for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: - (Optional) How to retry the RPC. - The default value is ``DEFAULT_RETRY_IF_GENERATION_SPECIFIED``, a conditional retry - policy which will only enable retries if ``if_generation_match`` or ``generation`` - is set, in order to ensure requests are idempotent before retrying them. - Change the value to ``DEFAULT_RETRY`` or another `google.api_core.retry.Retry` object - to enable retries regardless of generation precondition setting. - See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout). + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_generation_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. :raises: :class:`~google.cloud.exceptions.NotFound` (if `on_error` is not passed). diff --git a/google/cloud/storage/fileio.py b/google/cloud/storage/fileio.py index 7ffd24d45..2b4754648 100644 --- a/google/cloud/storage/fileio.py +++ b/google/cloud/storage/fileio.py @@ -18,7 +18,6 @@ from google.api_core.exceptions import RequestRangeNotSatisfiable from google.cloud.storage.retry import DEFAULT_RETRY -from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED from google.cloud.storage.retry import ConditionalRetryPolicy @@ -297,7 +296,7 @@ def __init__( blob, chunk_size=None, ignore_flush=False, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, **upload_kwargs, ): for kwarg in upload_kwargs: diff --git a/tests/conformance/retry_strategy_test_data.json b/tests/conformance/retry_strategy_test_data.json index d718f09b1..e50018081 100644 --- a/tests/conformance/retry_strategy_test_data.json +++ b/tests/conformance/retry_strategy_test_data.json @@ -36,6 +36,9 @@ {"name": "storage.object_acl.list", "resources": ["BUCKET", "OBJECT"]}, {"name": "storage.objects.get", "resources": ["BUCKET", "OBJECT"]}, {"name": "storage.objects.list", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.delete", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.insert", "resources": ["BUCKET"]}, + {"name": "storage.objects.patch", "resources": ["BUCKET", "OBJECT"]}, {"name": "storage.serviceaccount.get", "resources": []} ], "preconditionProvided": false, @@ -62,7 +65,6 @@ {"name": "storage.hmacKey.update", "resources": ["HMAC_KEY"]}, {"name": "storage.objects.compose", "resources": ["BUCKET", "OBJECT"]}, {"name": "storage.objects.copy", "resources": ["BUCKET", "OBJECT"]}, - {"name": "storage.objects.delete", "resources": ["BUCKET", "OBJECT"]}, {"name": "storage.objects.insert", "resources": ["BUCKET"]}, {"name": "storage.objects.patch", "resources": ["BUCKET", "OBJECT"]}, {"name": "storage.objects.rewrite", "resources": ["BUCKET", "OBJECT"]}, @@ -89,9 +91,6 @@ {"name": "storage.hmacKey.update", "resources": ["HMAC_KEY"]}, {"name": "storage.objects.compose", "resources": ["BUCKET", "OBJECT"]}, {"name": "storage.objects.copy", "resources": ["BUCKET", "OBJECT"]}, - {"name": "storage.objects.delete", "resources": ["BUCKET", "OBJECT"]}, - {"name": "storage.objects.insert", "resources": ["BUCKET"]}, - {"name": "storage.objects.patch", "resources": ["BUCKET", "OBJECT"]}, {"name": "storage.objects.rewrite", "resources": ["BUCKET", "OBJECT"]}, {"name": "storage.objects.update", "resources": ["BUCKET", "OBJECT"]} ], diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py index 45c0cb51e..819218d24 100644 --- a/tests/conformance/test_conformance.py +++ b/tests/conformance/test_conformance.py @@ -774,6 +774,11 @@ def object_acl_clear(client, _preconditions, **resources): blobreader_read, ], "storage.objects.list": [client_list_blobs, bucket_list_blobs, bucket_delete], + "storage.objects.delete": [ + bucket_delete_blob, + bucket_delete_blobs, + blob_delete, + ], "storage.serviceaccount.get": [client_get_service_account_email], # S1 end "storage.buckets.patch": [ bucket_patch, @@ -791,12 +796,6 @@ def object_acl_clear(client, _preconditions, **resources): "storage.hmacKey.update": [hmac_key_update], "storage.objects.compose": [blob_compose], "storage.objects.copy": [bucket_copy_blob, bucket_rename_blob], - "storage.objects.delete": [ - bucket_delete_blob, - bucket_delete_blobs, - blob_delete, - bucket_rename_blob, - ], "storage.objects.insert": [ blob_upload_from_string_multipart, blobwriter_write_multipart, diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index 224f4841b..d628bfddb 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -359,7 +359,7 @@ def test_patch_w_defaults(self): expected_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, _target_object=derived, ) @@ -437,7 +437,7 @@ def test_patch_w_user_project_w_explicit_client(self): expected_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, _target_object=derived, ) diff --git a/tests/unit/test_acl.py b/tests/unit/test_acl.py index 8d2fa39f5..bce716c74 100644 --- a/tests/unit/test_acl.py +++ b/tests/unit/test_acl.py @@ -1070,6 +1070,59 @@ def test_user_project(self): blob.user_project = USER_PROJECT self.assertEqual(acl.user_project, USER_PROJECT) + def test_passthrough_methods(self): + NAME = "name" + BLOB_NAME = "blob-name" + bucket = _Bucket(NAME) + blob = _Blob(bucket, BLOB_NAME) + acl = self._make_one(blob) + + client = mock.Mock() + + with mock.patch("google.cloud.storage.acl.ACL.clear") as m: + kwargs = { + "client": client, + "if_generation_match": 1, + "if_generation_not_match": 2, + "if_metageneration_match": 3, + "if_metageneration_not_match": 4, + "timeout": 60, + "retry": None, + } + + acl.clear(**kwargs) + m.assert_called_once_with(**kwargs) + + with mock.patch("google.cloud.storage.acl.ACL.save") as m: + kwargs = { + "acl": [], + "client": client, + "if_generation_match": 1, + "if_generation_not_match": 2, + "if_metageneration_match": 3, + "if_metageneration_not_match": 4, + "timeout": 60, + "retry": None, + } + + acl.save(**kwargs) + m.assert_called_once_with(**kwargs) + + with mock.patch("google.cloud.storage.acl.ACL.save_predefined") as m: + kwargs = { + "predefined": "predef", + "client": client, + "if_generation_match": 1, + "if_generation_not_match": 2, + "if_metageneration_match": 3, + "if_metageneration_not_match": 4, + "timeout": 60, + "retry": None, + } + + acl.save_predefined(**kwargs) + m.assert_called_once_with(**kwargs) + class _Blob(object): user_project = None diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 707ff4a70..06ba62220 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -36,10 +36,7 @@ from google.cloud.storage._helpers import _UTC from google.cloud.storage.exceptions import DataCorruption from google.cloud.storage.exceptions import InvalidResponse -from google.cloud.storage.retry import ( - DEFAULT_RETRY, - DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, -) +from google.cloud.storage.retry import DEFAULT_RETRY from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED from tests.unit.test__helpers import GCCL_INVOCATION_TEST_CONST @@ -901,7 +898,7 @@ def test_delete_wo_generation(self): None, None, None, - DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + DEFAULT_RETRY, ) ], ) @@ -928,7 +925,7 @@ def test_delete_w_generation(self): None, None, None, - DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + DEFAULT_RETRY, ) ], ) @@ -955,7 +952,7 @@ def test_delete_w_generation_match(self): None, None, None, - DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + DEFAULT_RETRY, ) ], ) @@ -3394,8 +3391,7 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): if_generation_not_match = kwargs.get("if_generation_not_match", None) if_metageneration_match = kwargs.get("if_metageneration_match", None) if_metageneration_not_match = kwargs.get("if_metageneration_not_match", None) - default_retry = DEFAULT_RETRY_IF_GENERATION_SPECIFIED - retry = kwargs.get("retry", default_retry) + retry = kwargs.get("retry", DEFAULT_RETRY) ret_val = blob.upload_from_file( stream, size=len(data), @@ -3485,7 +3481,7 @@ def _do_upload_mock_call_helper( expected_timeout = self._get_default_timeout() if timeout is None else timeout if not retry: - retry = DEFAULT_RETRY_IF_GENERATION_SPECIFIED + retry = DEFAULT_RETRY self.assertEqual( kwargs, { @@ -3617,6 +3613,8 @@ def _upload_from_string_helper(self, data, **kwargs): extra_kwargs = {} if "retry" in kwargs: extra_kwargs["retry"] = kwargs["retry"] + else: + extra_kwargs["retry"] = DEFAULT_RETRY # Check the mock. payload = _to_bytes(data, encoding="utf-8") stream = self._do_upload_mock_call_helper( @@ -4145,7 +4143,7 @@ def test_make_public_w_defaults(self): expected_patch_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) def test_make_public_w_timeout(self): @@ -4172,7 +4170,7 @@ def test_make_public_w_timeout(self): expected_patch_data, query_params=expected_query_params, timeout=timeout, - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) def test_make_public_w_preconditions(self): @@ -4202,7 +4200,7 @@ def test_make_public_w_preconditions(self): expected_patch_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) def test_make_private_w_defaults(self): @@ -4226,7 +4224,7 @@ def test_make_private_w_defaults(self): expected_patch_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) def test_make_private_w_timeout(self): @@ -4251,7 +4249,7 @@ def test_make_private_w_timeout(self): expected_patch_data, query_params=expected_query_params, timeout=timeout, - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) def test_make_private_w_preconditions(self): @@ -4279,7 +4277,7 @@ def test_make_private_w_preconditions(self): expected_patch_data, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) def test_compose_wo_content_type_set(self): diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 9818a9045..7129232a0 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -1641,7 +1641,7 @@ def test_delete_blob_miss_w_defaults(self): expected_path, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, _target_object=None, ) @@ -1664,7 +1664,7 @@ def test_delete_blob_hit_w_user_project_w_timeout(self): expected_path, query_params=expected_query_params, timeout=timeout, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, _target_object=None, ) @@ -1717,7 +1717,7 @@ def test_delete_blob_hit_w_generation_match(self): expected_path, query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, _target_object=None, ) @@ -1749,7 +1749,7 @@ def test_delete_blobs_hit_w_explicit_client_w_timeout(self): if_metageneration_match=None, if_metageneration_not_match=None, timeout=timeout, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) def test_delete_blobs_w_generation_match_wrong_len(self): @@ -1833,7 +1833,7 @@ def test_delete_blobs_w_generation_match_none(self): if_metageneration_match=None, if_metageneration_not_match=None, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) call_2 = mock.call( blob_name2, @@ -1844,7 +1844,7 @@ def test_delete_blobs_w_generation_match_none(self): if_metageneration_match=None, if_metageneration_not_match=None, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) bucket.delete_blob.assert_has_calls([call_1, call_2]) @@ -1917,7 +1917,7 @@ def test_delete_blobs_miss_wo_on_error(self): if_metageneration_match=None, if_metageneration_not_match=None, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) call_2 = mock.call( blob_name2, @@ -1928,7 +1928,7 @@ def test_delete_blobs_miss_wo_on_error(self): if_metageneration_match=None, if_metageneration_not_match=None, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) bucket.delete_blob.assert_has_calls([call_1, call_2]) @@ -1957,7 +1957,7 @@ def test_delete_blobs_miss_w_on_error(self): if_metageneration_match=None, if_metageneration_not_match=None, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) call_2 = mock.call( blob_name2, @@ -1968,7 +1968,7 @@ def test_delete_blobs_miss_w_on_error(self): if_metageneration_match=None, if_metageneration_not_match=None, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) bucket.delete_blob.assert_has_calls([call_1, call_2]) @@ -2252,7 +2252,7 @@ def test_copy_blob_w_preserve_acl_false_w_explicit_client(self): expected_patch_data, query_params=expected_patch_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ) def test_copy_blob_w_name_and_user_project(self): diff --git a/tests/unit/test_fileio.py b/tests/unit/test_fileio.py index 57581b98a..8da25d9e3 100644 --- a/tests/unit/test_fileio.py +++ b/tests/unit/test_fileio.py @@ -23,6 +23,7 @@ from google.api_core.exceptions import RequestRangeNotSatisfiable from google.cloud.storage.fileio import CHUNK_SIZE_MULTIPLE from google.cloud.storage.retry import DEFAULT_RETRY +from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED TEST_TEXT_DATA = string.ascii_lowercase + "\n" + string.ascii_uppercase + "\n" TEST_BINARY_DATA = TEST_TEXT_DATA.encode("utf-8") @@ -378,7 +379,7 @@ def test_write(self, mock_warn): PLAIN_CONTENT_TYPE, None, chunk_size=chunk_size, - retry=None, + retry=DEFAULT_RETRY, **upload_kwargs ) upload.transmit_next_chunk.assert_called_with(transport, timeout=timeout) @@ -469,7 +470,7 @@ def test_seek_fails(self): with self.assertRaises(io.UnsupportedOperation): writer.seek(0) - def test_conditional_retry_failure(self): + def test_retry_enabled(self): blob = mock.Mock() upload = mock.Mock() @@ -486,6 +487,7 @@ def test_conditional_retry_failure(self): blob, chunk_size=chunk_size, content_type=PLAIN_CONTENT_TYPE, + if_generation_match=123456, ) # The transmit_next_chunk method must actually consume bytes from the @@ -501,7 +503,7 @@ def test_conditional_retry_failure(self): # Write over chunk_size. This should result in upload initialization # and multiple chunks uploaded. - # Due to the condition not being fulfilled, retry should be None. + # Retry should be DEFAULT_RETRY. writer.write(TEST_BINARY_DATA[4:32]) blob._initiate_resumable_upload.assert_called_once_with( blob.bucket.client, @@ -509,7 +511,8 @@ def test_conditional_retry_failure(self): PLAIN_CONTENT_TYPE, None, # size chunk_size=chunk_size, - retry=None, + retry=DEFAULT_RETRY, + if_generation_match=123456, ) upload.transmit_next_chunk.assert_called_with(transport) self.assertEqual(upload.transmit_next_chunk.call_count, 4) @@ -519,7 +522,7 @@ def test_conditional_retry_failure(self): writer.close() self.assertEqual(upload.transmit_next_chunk.call_count, 5) - def test_conditional_retry_pass(self): + def test_forced_default_retry(self): blob = mock.Mock() upload = mock.Mock() @@ -536,7 +539,7 @@ def test_conditional_retry_pass(self): blob, chunk_size=chunk_size, content_type=PLAIN_CONTENT_TYPE, - if_generation_match=123456, + retry=DEFAULT_RETRY, ) # The transmit_next_chunk method must actually consume bytes from the @@ -552,7 +555,6 @@ def test_conditional_retry_pass(self): # Write over chunk_size. This should result in upload initialization # and multiple chunks uploaded. - # Due to the condition being fulfilled, retry should be DEFAULT_RETRY. writer.write(TEST_BINARY_DATA[4:32]) blob._initiate_resumable_upload.assert_called_once_with( blob.bucket.client, @@ -561,7 +563,6 @@ def test_conditional_retry_pass(self): None, # size chunk_size=chunk_size, retry=DEFAULT_RETRY, - if_generation_match=123456, ) upload.transmit_next_chunk.assert_called_with(transport) self.assertEqual(upload.transmit_next_chunk.call_count, 4) @@ -571,7 +572,13 @@ def test_conditional_retry_pass(self): writer.close() self.assertEqual(upload.transmit_next_chunk.call_count, 5) - def test_forced_default_retry(self): + def test_rejects_invalid_kwargs(self): + blob = mock.Mock() + with self.assertRaises(ValueError): + self._make_blob_writer(blob, invalid_kwarg=1) + + def test_conditional_retry_w_condition(self): + # Not the default, but still supported in the signature for compatibility. blob = mock.Mock() upload = mock.Mock() @@ -588,7 +595,8 @@ def test_forced_default_retry(self): blob, chunk_size=chunk_size, content_type=PLAIN_CONTENT_TYPE, - retry=DEFAULT_RETRY, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + if_generation_match=100, ) # The transmit_next_chunk method must actually consume bytes from the @@ -612,19 +620,52 @@ def test_forced_default_retry(self): None, # size chunk_size=chunk_size, retry=DEFAULT_RETRY, + if_generation_match=100, ) - upload.transmit_next_chunk.assert_called_with(transport) - self.assertEqual(upload.transmit_next_chunk.call_count, 4) - # Write another byte, finalize and close. - writer.write(TEST_BINARY_DATA[32:33]) - writer.close() - self.assertEqual(upload.transmit_next_chunk.call_count, 5) - - def test_rejects_invalid_kwargs(self): + def test_conditional_retry_wo_condition(self): + # Not the default, but still supported in the signature for compatibility. blob = mock.Mock() - with self.assertRaises(ValueError): - self._make_blob_writer(blob, invalid_kwarg=1) + + upload = mock.Mock() + transport = mock.Mock() + + blob._initiate_resumable_upload.return_value = (upload, transport) + + with mock.patch("google.cloud.storage.fileio.CHUNK_SIZE_MULTIPLE", 1): + # Create a writer. + # It would be normal to use a context manager here, but not doing so + # gives us more control over close() for test purposes. + chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. + writer = self._make_blob_writer( + blob, + chunk_size=chunk_size, + content_type=PLAIN_CONTENT_TYPE, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + + # The transmit_next_chunk method must actually consume bytes from the + # sliding buffer for the flush() feature to work properly. + upload.transmit_next_chunk.side_effect = lambda _: writer._buffer.read( + chunk_size + ) + + # Write under chunk_size. This should be buffered and the upload not + # initiated. + writer.write(TEST_BINARY_DATA[0:4]) + blob._initiate_resumable_upload.assert_not_called() + + # Write over chunk_size. This should result in upload initialization + # and multiple chunks uploaded. + writer.write(TEST_BINARY_DATA[4:32]) + blob._initiate_resumable_upload.assert_called_once_with( + blob.bucket.client, + writer._buffer, + PLAIN_CONTENT_TYPE, + None, # size + chunk_size=chunk_size, + retry=None, + ) class Test_SlidingBuffer(unittest.TestCase): @@ -896,6 +937,6 @@ def test_write(self, mock_warn): PLAIN_CONTENT_TYPE, None, chunk_size=chunk_size, - retry=None, + retry=DEFAULT_RETRY, ) upload.transmit_next_chunk.assert_called_with(transport)