diff --git a/CHANGELOG.md b/CHANGELOG.md index 491baccaf..f236724be 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-storage/#history +### [1.35.1](https://www.github.com/googleapis/python-storage/compare/v1.35.0...v1.35.1) (2021-01-28) + + +### Bug Fixes + +* address incorrect usage of request preconditions ([#366](https://www.github.com/googleapis/python-storage/issues/366)) ([321658c](https://www.github.com/googleapis/python-storage/commit/321658c3b9ccaf22d08dd881c93206590f8275b7)) +* Amend default retry behavior for bucket operations on client ([#358](https://www.github.com/googleapis/python-storage/issues/358)) ([b91e57d](https://www.github.com/googleapis/python-storage/commit/b91e57d6ca314ac4feaec30bf355fcf7ac4468c0)) + ## [1.35.0](https://www.github.com/googleapis/python-storage/compare/v1.34.0...v1.35.0) (2020-12-14) diff --git a/docs/snippets.py b/docs/snippets.py index 8835ae9ed..89f92a20b 100644 --- a/docs/snippets.py +++ b/docs/snippets.py @@ -95,7 +95,7 @@ def download_to_file(to_delete): blob = Blob("secure-data", bucket, encryption_key=encryption_key) blob.upload_from_string("my secret message.") with open("/tmp/my-secure-file", "wb") as file_obj: - blob.download_to_file(file_obj) + client.download_to_file(blob, file_obj) # [END download_to_file] to_delete.append(blob) @@ -140,7 +140,7 @@ def delete_blob(to_delete): client = storage.Client() bucket = client.get_bucket("my-bucket") - blobs = list(bucket.list_blobs()) + blobs = list(client.list_blobs(bucket)) assert len(blobs) > 0 # [] bucket.delete_blob("my-file.txt") diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index f7bf720c8..8564f8e0d 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -597,7 +597,7 @@ def exists( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Determines whether or not this blob exists. @@ -1178,11 +1178,12 @@ def download_to_filename( :raises: :class:`google.cloud.exceptions.NotFound` """ + client = self._require_client(client) try: with open(filename, "wb") as file_obj: - self.download_to_file( + client.download_blob_to_file( + self, file_obj, - client=client, start=start, end=end, raw_download=raw_download, @@ -1285,10 +1286,11 @@ def download_as_bytes( :raises: :class:`google.cloud.exceptions.NotFound` """ + client = self._require_client(client) string_buffer = BytesIO() - self.download_to_file( + client.download_blob_to_file( + self, string_buffer, - client=client, start=start, end=end, raw_download=raw_download, diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 3b51d9f82..31a188134 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -723,7 +723,7 @@ def exists( timeout=_DEFAULT_TIMEOUT, if_metageneration_match=None, if_metageneration_not_match=None, - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Determines whether or not this bucket exists. @@ -1108,7 +1108,7 @@ def get_blob( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, **kwargs ): """Get a blob object by name. @@ -2022,6 +2022,9 @@ def rename_blob( This method will first duplicate the data and then delete the old blob. This means that with very large objects renaming could be a very (temporarily) costly or a very slow operation. + If you need more control over the copy and deletion, instead + use `google.cloud.storage.blob.Blob.copy_to` and + `google.cloud.storage.blob.Blob.delete` directly. :type blob: :class:`google.cloud.storage.blob.Blob` :param blob: The blob to be renamed. @@ -2079,25 +2082,29 @@ def rename_blob( :param if_source_generation_match: (Optional) Makes the operation conditional on whether the source object's generation matches the - given value. + given value. Also used in the + delete request. :type if_source_generation_not_match: long :param if_source_generation_not_match: (Optional) Makes the operation conditional on whether the source object's generation does not match - the given value. + the given value. Also used in the + delete request. :type if_source_metageneration_match: long :param if_source_metageneration_match: (Optional) Makes the operation conditional on whether the source object's current metageneration - matches the given value. + matches the given value.Also used in the + delete request. :type if_source_metageneration_not_match: long :param if_source_metageneration_not_match: (Optional) Makes the operation conditional on whether the source object's current metageneration does not match the given value. + Also used in the delete request. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: (Optional) How to retry the RPC. A None value will disable retries. @@ -2139,10 +2146,10 @@ def rename_blob( blob.delete( client=client, timeout=timeout, - if_generation_match=if_generation_match, - if_generation_not_match=if_generation_not_match, - if_metageneration_match=if_metageneration_match, - if_metageneration_not_match=if_metageneration_not_match, + if_generation_match=if_source_generation_match, + if_generation_not_match=if_source_generation_not_match, + if_metageneration_match=if_source_metageneration_match, + if_metageneration_not_match=if_source_metageneration_not_match, retry=retry, ) return new_blob diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 42358ef68..8812dc32e 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -52,7 +52,6 @@ from google.cloud.storage.acl import DefaultObjectACL from google.cloud.storage.constants import _DEFAULT_TIMEOUT from google.cloud.storage.retry import DEFAULT_RETRY -from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED _marker = object() @@ -320,7 +319,7 @@ def get_bucket( timeout=_DEFAULT_TIMEOUT, if_metageneration_match=None, if_metageneration_not_match=None, - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """API call: retrieve a bucket via a GET request. @@ -407,7 +406,7 @@ def lookup_bucket( timeout=_DEFAULT_TIMEOUT, if_metageneration_match=None, if_metageneration_not_match=None, - retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + retry=DEFAULT_RETRY, ): """Get a bucket by name, returning None if not found. @@ -865,7 +864,7 @@ def list_blobs( path = bucket.path + "/o" api_request = functools.partial( - self._connection.api_request, timeout=timeout, retry=DEFAULT_RETRY + self._connection.api_request, timeout=timeout, retry=retry ) iterator = page_iterator.HTTPIterator( client=self, diff --git a/google/cloud/storage/version.py b/google/cloud/storage/version.py index d9a170205..1aeb3949b 100644 --- a/google/cloud/storage/version.py +++ b/google/cloud/storage/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.35.0" +__version__ = "1.35.1" diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 63e881bf2..1ff17a61f 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -63,9 +63,9 @@ def _bad_copy(bad_request): retry_bad_copy = RetryErrors(exceptions.BadRequest, error_predicate=_bad_copy) -def _empty_bucket(bucket): +def _empty_bucket(client, bucket): """Empty a bucket of all existing blobs (including multiple versions).""" - for blob in list(bucket.list_blobs(versions=True)): + for blob in list(client.list_blobs(bucket, versions=True)): try: blob.delete() except exceptions.NotFound: @@ -96,7 +96,7 @@ def setUpModule(): def tearDownModule(): errors = (exceptions.Conflict, exceptions.TooManyRequests) retry = RetryErrors(errors, max_tries=15) - retry(_empty_bucket)(Config.TEST_BUCKET) + retry(_empty_bucket)(Config.CLIENT, Config.TEST_BUCKET) retry(Config.TEST_BUCKET.delete)(force=True) @@ -622,7 +622,7 @@ def test_large_encrypted_file_write_from_stream(self): with tempfile.NamedTemporaryFile() as temp_f: with open(temp_f.name, "wb") as file_obj: - blob.download_to_file(file_obj) + Config.CLIENT.download_blob_to_file(blob, file_obj) with open(temp_f.name, "rb") as file_obj: md5_temp_hash = _base64_md5hash(file_obj) @@ -718,11 +718,15 @@ def test_crud_blob_w_user_project(self): self.assertIsNone(blob1.metadata) finally: # Exercise 'objects.delete' (metadata) w/ userProject. - blobs = with_user_project.list_blobs(prefix=blob.name, versions=True) + blobs = Config.CLIENT.list_blobs( + with_user_project, prefix=blob.name, versions=True + ) self.assertEqual([each.generation for each in blobs], [gen0, gen1]) blob0.delete() - blobs = with_user_project.list_blobs(prefix=blob.name, versions=True) + blobs = Config.CLIENT.list_blobs( + with_user_project, prefix=blob.name, versions=True + ) self.assertEqual([each.generation for each in blobs], [gen1]) blob1.delete() @@ -859,7 +863,7 @@ def test_direct_write_and_read_into_file(self): with tempfile.NamedTemporaryFile() as temp_f: with open(temp_f.name, "wb") as file_obj: - same_blob.download_to_file(file_obj) + Config.CLIENT.download_blob_to_file(same_blob, file_obj) with open(temp_f.name, "rb") as file_obj: stored_contents = file_obj.read() @@ -881,11 +885,12 @@ def test_download_w_generation_match(self): with open(temp_f.name, "wb") as file_obj: with self.assertRaises(google.api_core.exceptions.PreconditionFailed): - same_blob.download_to_file( - file_obj, if_generation_match=WRONG_GENERATION_NUMBER + Config.CLIENT.download_blob_to_file( + same_blob, file_obj, if_generation_match=WRONG_GENERATION_NUMBER ) - same_blob.download_to_file( + Config.CLIENT.download_blob_to_file( + same_blob, file_obj, if_generation_match=blob.generation, if_metageneration_match=blob.metageneration, @@ -1068,7 +1073,7 @@ class TestStorageListFiles(TestStorageFiles): def setUpClass(cls): super(TestStorageListFiles, cls).setUpClass() # Make sure bucket empty before beginning. - _empty_bucket(cls.bucket) + _empty_bucket(Config.CLIENT, cls.bucket) logo_path = cls.FILES["logo"]["path"] blob = storage.Blob(cls.FILENAMES[0], bucket=cls.bucket) @@ -1089,7 +1094,7 @@ def tearDownClass(cls): @RetryErrors(unittest.TestCase.failureException) def test_list_files(self): - all_blobs = list(self.bucket.list_blobs()) + all_blobs = list(Config.CLIENT.list_blobs(self.bucket)) self.assertEqual( sorted(blob.name for blob in all_blobs), sorted(self.FILENAMES) ) @@ -1100,7 +1105,7 @@ def test_list_files_with_user_project(self): with_user_project = Config.CLIENT.bucket( self.bucket.name, user_project=USER_PROJECT ) - all_blobs = list(with_user_project.list_blobs()) + all_blobs = list(Config.CLIENT.list_blobs(with_user_project)) self.assertEqual( sorted(blob.name for blob in all_blobs), sorted(self.FILENAMES) ) @@ -1109,7 +1114,7 @@ def test_list_files_with_user_project(self): def test_paginate_files(self): truncation_size = 1 count = len(self.FILENAMES) - truncation_size - iterator = self.bucket.list_blobs(max_results=count) + iterator = Config.CLIENT.list_blobs(self.bucket, max_results=count) page_iter = iterator.pages page1 = six.next(page_iter) @@ -1133,7 +1138,8 @@ def test_paginate_files_with_offset(self): exclusive_end_offset = self.FILENAMES[-1] desired_files = self.FILENAMES[1:-1] count = len(desired_files) - truncation_size - iterator = self.bucket.list_blobs( + iterator = Config.CLIENT.list_blobs( + self.bucket, max_results=count, start_offset=inclusive_start_offset, end_offset=exclusive_end_offset, @@ -1173,7 +1179,7 @@ class TestStoragePseudoHierarchy(TestStorageFiles): def setUpClass(cls): super(TestStoragePseudoHierarchy, cls).setUpClass() # Make sure bucket empty before beginning. - _empty_bucket(cls.bucket) + _empty_bucket(Config.CLIENT, cls.bucket) cls.suite_blobs_to_delete = [] simple_path = cls.FILES["simple"]["path"] @@ -1197,7 +1203,7 @@ def test_blob_get_w_delimiter(self): @RetryErrors(unittest.TestCase.failureException) def test_root_level_w_delimiter(self): - iterator = self.bucket.list_blobs(delimiter="/") + iterator = Config.CLIENT.list_blobs(self.bucket, delimiter="/") page = six.next(iterator.pages) blobs = list(page) self.assertEqual([blob.name for blob in blobs], ["file01.txt"]) @@ -1206,7 +1212,9 @@ def test_root_level_w_delimiter(self): @RetryErrors(unittest.TestCase.failureException) def test_first_level(self): - iterator = self.bucket.list_blobs(delimiter="/", prefix="parent/") + iterator = Config.CLIENT.list_blobs( + self.bucket, delimiter="/", prefix="parent/" + ) page = six.next(iterator.pages) blobs = list(page) self.assertEqual( @@ -1219,7 +1227,9 @@ def test_first_level(self): def test_second_level(self): expected_names = ["parent/child/file21.txt", "parent/child/file22.txt"] - iterator = self.bucket.list_blobs(delimiter="/", prefix="parent/child/") + iterator = Config.CLIENT.list_blobs( + self.bucket, delimiter="/", prefix="parent/child/" + ) page = six.next(iterator.pages) blobs = list(page) self.assertEqual([blob.name for blob in blobs], expected_names) @@ -1234,7 +1244,9 @@ def test_third_level(self): # of 1024 characters in the UTF-8 encoded name: # https://cloud.google.com/storage/docs/bucketnaming#objectnames # Exercise a layer deeper to illustrate this. - iterator = self.bucket.list_blobs(delimiter="/", prefix="parent/child/grand/") + iterator = Config.CLIENT.list_blobs( + self.bucket, delimiter="/", prefix="parent/child/grand/" + ) page = six.next(iterator.pages) blobs = list(page) self.assertEqual( @@ -1245,8 +1257,8 @@ def test_third_level(self): @RetryErrors(unittest.TestCase.failureException) def test_include_trailing_delimiter(self): - iterator = self.bucket.list_blobs( - delimiter="/", include_trailing_delimiter=True + iterator = Config.CLIENT.list_blobs( + self.bucket, delimiter="/", include_trailing_delimiter=True ) page = six.next(iterator.pages) blobs = list(page) @@ -1273,7 +1285,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - _empty_bucket(cls.bucket) + _empty_bucket(Config.CLIENT, cls.bucket) errors = (exceptions.Conflict, exceptions.TooManyRequests) retry = RetryErrors(errors, max_tries=6) retry(cls.bucket.delete)(force=True) @@ -1961,7 +1973,7 @@ class TestAnonymousClient(unittest.TestCase): def test_access_to_public_bucket(self): anonymous = storage.Client.create_anonymous_client() bucket = anonymous.bucket(self.PUBLIC_BUCKET) - (blob,) = retry_429_503(bucket.list_blobs)(max_results=1) + (blob,) = retry_429_503(anonymous.list_blobs)(bucket, max_results=1) with tempfile.TemporaryFile() as stream: retry_429_503(blob.download_to_file)(stream) @@ -1988,7 +2000,7 @@ def _kms_key_name(self, key_name=None): @classmethod def setUpClass(cls): super(TestKMSIntegration, cls).setUpClass() - _empty_bucket(cls.bucket) + _empty_bucket(Config.CLIENT, cls.bucket) def setUp(self): super(TestKMSIntegration, self).setUp() @@ -2048,7 +2060,7 @@ def test_blob_w_explicit_kms_key_name(self): # We don't know the current version of the key. self.assertTrue(blob.kms_key_name.startswith(kms_key_name)) - (listed,) = list(self.bucket.list_blobs()) + (listed,) = list(Config.CLIENT.list_blobs(self.bucket)) self.assertTrue(listed.kms_key_name.startswith(kms_key_name)) @RetryErrors(unittest.TestCase.failureException) diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index e7caa90a2..cd6ecafa0 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -669,7 +669,7 @@ def test_exists_miss(self): "query_params": {"fields": "name"}, "_target_object": None, "timeout": 42, - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + "retry": DEFAULT_RETRY, }, ) @@ -692,7 +692,7 @@ def test_exists_hit_w_user_project(self): "query_params": {"fields": "name", "userProject": USER_PROJECT}, "_target_object": None, "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + "retry": DEFAULT_RETRY, }, ) @@ -715,7 +715,7 @@ def test_exists_hit_w_generation(self): "query_params": {"fields": "name", "generation": GENERATION}, "_target_object": None, "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + "retry": DEFAULT_RETRY, }, ) @@ -749,7 +749,7 @@ def test_exists_w_generation_match(self): }, "_target_object": None, "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + "retry": DEFAULT_RETRY, }, ) @@ -1627,18 +1627,18 @@ def test_download_as_bytes_w_generation_match(self): GENERATION_NUMBER = 6 MEDIA_LINK = "http://example.com/media/" - client = mock.Mock(spec=["_http"]) + client = self._make_client() blob = self._make_one( "blob-name", bucket=_Bucket(client), properties={"mediaLink": MEDIA_LINK} ) - blob.download_to_file = mock.Mock() + client.download_blob_to_file = mock.Mock() fetched = blob.download_as_bytes(if_generation_match=GENERATION_NUMBER) self.assertEqual(fetched, b"") - blob.download_to_file.assert_called_once_with( + client.download_blob_to_file.assert_called_once_with( + blob, mock.ANY, - client=None, start=None, end=None, raw_download=False, @@ -1810,18 +1810,18 @@ def test_download_as_text_w_non_ascii_wo_explicit_encoding_w_charset(self): def test_download_as_string(self, mock_warn): MEDIA_LINK = "http://example.com/media/" - client = mock.Mock(spec=["_http"]) + client = self._make_client() blob = self._make_one( "blob-name", bucket=_Bucket(client), properties={"mediaLink": MEDIA_LINK} ) - blob.download_to_file = mock.Mock() + client.download_blob_to_file = mock.Mock() fetched = blob.download_as_string() self.assertEqual(fetched, b"") - blob.download_to_file.assert_called_once_with( + client.download_blob_to_file.assert_called_once_with( + blob, mock.ANY, - client=None, start=None, end=None, raw_download=False, diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index f3f2b4cd0..fe675701b 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -672,7 +672,7 @@ def api_request(cls, *args, **kwargs): "query_params": {"fields": "name"}, "_target_object": None, "timeout": 42, - "retry": DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + "retry": DEFAULT_RETRY, } expected_cw = [((), expected_called_kwargs)] self.assertEqual(_FakeConnection._called_with, expected_cw) @@ -707,7 +707,7 @@ def api_request(cls, *args, **kwargs): }, "_target_object": None, "timeout": 42, - "retry": DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + "retry": DEFAULT_RETRY, } expected_cw = [((), expected_called_kwargs)] self.assertEqual(_FakeConnection._called_with, expected_cw) @@ -735,7 +735,7 @@ def api_request(cls, *args, **kwargs): "query_params": {"fields": "name", "userProject": USER_PROJECT}, "_target_object": None, "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + "retry": DEFAULT_RETRY, } expected_cw = [((), expected_called_kwargs)] self.assertEqual(_FakeConnection._called_with, expected_cw) @@ -1639,7 +1639,8 @@ def test_rename_blob_with_generation_match(self): NEW_BLOB_NAME = "new-blob-name" DATA = {"name": NEW_BLOB_NAME} GENERATION_NUMBER = 6 - METAGENERATION_NUMBER = 9 + SOURCE_GENERATION_NUMBER = 7 + SOURCE_METAGENERATION_NUMBER = 9 connection = _Connection(DATA) client = _Client(connection) @@ -1652,7 +1653,8 @@ def test_rename_blob_with_generation_match(self): client=client, timeout=42, if_generation_match=GENERATION_NUMBER, - if_source_metageneration_not_match=METAGENERATION_NUMBER, + if_source_generation_match=SOURCE_GENERATION_NUMBER, + if_source_metageneration_not_match=SOURCE_METAGENERATION_NUMBER, ) self.assertIs(renamed_blob.bucket, bucket) @@ -1668,7 +1670,8 @@ def test_rename_blob_with_generation_match(self): kw["query_params"], { "ifGenerationMatch": GENERATION_NUMBER, - "ifSourceMetagenerationNotMatch": METAGENERATION_NUMBER, + "ifSourceGenerationMatch": SOURCE_GENERATION_NUMBER, + "ifSourceMetagenerationNotMatch": SOURCE_METAGENERATION_NUMBER, }, ) self.assertEqual(kw["timeout"], 42) @@ -1677,10 +1680,10 @@ def test_rename_blob_with_generation_match(self): blob.delete.assert_called_once_with( client=client, timeout=42, - if_generation_match=GENERATION_NUMBER, + if_generation_match=SOURCE_GENERATION_NUMBER, if_generation_not_match=None, if_metageneration_match=None, - if_metageneration_not_match=None, + if_metageneration_not_match=SOURCE_METAGENERATION_NUMBER, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index ee0e387dd..a4c23d7cc 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -562,6 +562,54 @@ def test_get_bucket_with_object_hit(self): parms = dict(urlparse.parse_qsl(qs)) self.assertEqual(parms["projection"], "noAcl") + def test_get_bucket_default_retry(self): + from google.cloud.storage.bucket import Bucket + from google.cloud.storage._http import Connection + + PROJECT = "PROJECT" + CREDENTIALS = _make_credentials() + client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + + bucket_name = "bucket-name" + bucket_obj = Bucket(client, bucket_name) + + with mock.patch.object(Connection, "api_request") as req: + client.get_bucket(bucket_obj) + + req.assert_called_once_with( + method="GET", + path=mock.ANY, + query_params=mock.ANY, + headers=mock.ANY, + _target_object=bucket_obj, + timeout=mock.ANY, + retry=DEFAULT_RETRY, + ) + + def test_get_bucket_respects_retry_override(self): + from google.cloud.storage.bucket import Bucket + from google.cloud.storage._http import Connection + + PROJECT = "PROJECT" + CREDENTIALS = _make_credentials() + client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + + bucket_name = "bucket-name" + bucket_obj = Bucket(client, bucket_name) + + with mock.patch.object(Connection, "api_request") as req: + client.get_bucket(bucket_obj, retry=None) + + req.assert_called_once_with( + method="GET", + path=mock.ANY, + query_params=mock.ANY, + headers=mock.ANY, + _target_object=bucket_obj, + timeout=mock.ANY, + retry=None, + ) + def test_lookup_bucket_miss(self): PROJECT = "PROJECT" CREDENTIALS = _make_credentials() @@ -658,6 +706,29 @@ def test_lookup_bucket_with_metageneration_match(self): self.assertEqual(parms["projection"], "noAcl") self.assertEqual(parms["ifMetagenerationMatch"], str(METAGENERATION_NUMBER)) + def test_lookup_bucket_default_retry(self): + from google.cloud.storage.bucket import Bucket + from google.cloud.storage._http import Connection + + PROJECT = "PROJECT" + CREDENTIALS = _make_credentials() + client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + + bucket_name = "bucket-name" + bucket_obj = Bucket(client, bucket_name) + + with mock.patch.object(Connection, "api_request") as req: + client.lookup_bucket(bucket_obj) + req.assert_called_once_with( + method="GET", + path=mock.ANY, + query_params=mock.ANY, + headers=mock.ANY, + _target_object=bucket_obj, + timeout=mock.ANY, + retry=DEFAULT_RETRY, + ) + def test_create_bucket_w_missing_client_project(self): credentials = _make_credentials() client = self._make_one(project=None, credentials=credentials)