diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index fb329d08d..a2563198b 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -180,10 +180,9 @@ def __init__( self.chunk_size = chunk_size # Check that setter accepts value. self._bucket = bucket self._acl = ObjectACL(self) - if encryption_key is not None and kms_key_name is not None: - raise ValueError( - "Pass at most one of 'encryption_key' " "and 'kms_key_name'" - ) + _raise_for_more_than_one_none( + encryption_key=encryption_key, kms_key_name=kms_key_name, + ) self._encryption_key = encryption_key @@ -966,7 +965,17 @@ def _get_upload_arguments(self, content_type): return headers, object_metadata, content_type def _do_multipart_upload( - self, client, stream, content_type, size, num_retries, predefined_acl + self, + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ): """Perform a multipart upload. @@ -999,6 +1008,18 @@ def _do_multipart_upload( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + :type generation_match: long + :param generation_match: (Optional) Precondition control if generation match + + :type generation_not_match: long + :param generation_not_match: (Optional) Precondition control if generation not match + + :type metageneration_match: long + :param metageneration_match: (Optional) Precondition control if metageneration match + + :type metageneration_not_match: long + :param metageneration_not_match: (Optional) Precondition control if metageneration not match + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the multipart upload request. @@ -1029,6 +1050,20 @@ def _do_multipart_upload( if predefined_acl is not None: name_value_pairs.append(("predefinedAcl", predefined_acl)) + if generation_match is not None: + name_value_pairs.append(("ifGenerationMatch", generation_match)) + + if generation_not_match is not None: + name_value_pairs.append(("ifGenerationNotMatch", generation_not_match)) + + if metageneration_match is not None: + name_value_pairs.append(("ifMetagenerationMatch", metageneration_match)) + + if metageneration_not_match is not None: + name_value_pairs.append( + ("ifMetaGenerationNotMatch", metageneration_not_match) + ) + upload_url = _add_query_parameters(base_url, name_value_pairs) upload = MultipartUpload(upload_url, headers=headers) @@ -1051,6 +1086,10 @@ def _initiate_resumable_upload( predefined_acl=None, extra_headers=None, chunk_size=None, + generation_match=None, + generation_not_match=None, + metageneration_match=None, + metageneration_not_match=None, ): """Initiate a resumable upload. @@ -1094,6 +1133,18 @@ def _initiate_resumable_upload( If not passed, will fall back to the chunk size on the current blob. + :type generation_match: long + :param generation_match: (Optional) Precondition control if generation match + + :type generation_not_match: long + :param generation_not_match: (Optional) Precondition control if generation not match + + :type metageneration_match: long + :param metageneration_match: (Optional) Precondition control if metageneration match + + :type metageneration_not_match: long + :param metageneration_not_match: (Optional) Precondition control if metageneration not match + :rtype: tuple :returns: Pair of @@ -1125,6 +1176,20 @@ def _initiate_resumable_upload( if predefined_acl is not None: name_value_pairs.append(("predefinedAcl", predefined_acl)) + if generation_match is not None: + name_value_pairs.append(("ifGenerationMatch", generation_match)) + + if generation_not_match is not None: + name_value_pairs.append(("ifGenerationNotMatch", generation_not_match)) + + if metageneration_match is not None: + name_value_pairs.append(("ifMetagenerationMatch", metageneration_match)) + + if metageneration_not_match is not None: + name_value_pairs.append( + ("ifMetaGenerationNotMatch", metageneration_not_match) + ) + upload_url = _add_query_parameters(base_url, name_value_pairs) upload = ResumableUpload(upload_url, chunk_size, headers=headers) @@ -1145,7 +1210,17 @@ def _initiate_resumable_upload( return upload, transport def _do_resumable_upload( - self, client, stream, content_type, size, num_retries, predefined_acl + self, + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ): """Perform a resumable upload. @@ -1180,6 +1255,18 @@ def _do_resumable_upload( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + :type generation_match: long + :param generation_match: (Optional) Precondition control if generation match + + :type generation_not_match: long + :param generation_not_match: (Optional) Precondition control if generation not match + + :type metageneration_match: long + :param metageneration_match: (Optional) Precondition control if metageneration match + + :type metageneration_not_match: long + :param metageneration_not_match: (Optional) Precondition control if metageneration not match + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the final chunk is uploaded. @@ -1191,6 +1278,10 @@ def _do_resumable_upload( size, num_retries, predefined_acl=predefined_acl, + generation_match=generation_match, + generation_not_match=generation_not_match, + metageneration_match=metageneration_match, + metageneration_not_match=metageneration_not_match, ) while not upload.finished: @@ -1199,7 +1290,17 @@ def _do_resumable_upload( return response def _do_upload( - self, client, stream, content_type, size, num_retries, predefined_acl + self, + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ): """Determine an upload strategy and then perform the upload. @@ -1236,6 +1337,18 @@ def _do_upload( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + :type generation_match: long + :param generation_match: (Optional) Precondition control if generation match + + :type generation_not_match: long + :param generation_not_match: (Optional) Precondition control if generation not match + + :type metageneration_match: long + :param metageneration_match: (Optional) Precondition control if metageneration match + + :type metageneration_not_match: long + :param metageneration_not_match: (Optional) Precondition control if metageneration not match + :rtype: dict :returns: The parsed JSON from the "200 OK" response. This will be the **only** response in the multipart case and it will be the @@ -1243,11 +1356,29 @@ def _do_upload( """ if size is not None and size <= _MAX_MULTIPART_SIZE: response = self._do_multipart_upload( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ) else: response = self._do_resumable_upload( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ) return response.json() @@ -1261,6 +1392,10 @@ def upload_from_file( num_retries=None, client=None, predefined_acl=None, + generation_match=None, + generation_not_match=None, + metageneration_match=None, + metageneration_not_match=None, ): """Upload the contents of this blob from a file-like object. @@ -1322,6 +1457,18 @@ def upload_from_file( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + :type generation_match: long + :param generation_match: (Optional) Precondition control if generation match + + :type generation_not_match: long + :param generation_not_match: (Optional) Precondition control if generation not match + + :type metageneration_match: long + :param metageneration_match: (Optional) Precondition control if metageneration match + + :type metageneration_not_match: long + :param metageneration_not_match: (Optional) Precondition control if metageneration not match + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the upload response returns an error status. @@ -1332,19 +1479,46 @@ def upload_from_file( if num_retries is not None: warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2) + _raise_for_more_than_one_none( + generation_match=generation_match, + generation_not_match=generation_not_match, + ) + + _raise_for_more_than_one_none( + metageneration_match=metageneration_match, + metageneration_not_match=metageneration_not_match, + ) + _maybe_rewind(file_obj, rewind=rewind) predefined_acl = ACL.validate_predefined(predefined_acl) try: created_json = self._do_upload( - client, file_obj, content_type, size, num_retries, predefined_acl + client, + file_obj, + content_type, + size, + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ) self._set_properties(created_json) except resumable_media.InvalidResponse as exc: _raise_from_invalid_response(exc) def upload_from_filename( - self, filename, content_type=None, client=None, predefined_acl=None + self, + filename, + content_type=None, + client=None, + predefined_acl=None, + generation_match=None, + generation_not_match=None, + metageneration_match=None, + metageneration_not_match=None, ): """Upload this blob's contents from the content of a named file. @@ -1382,6 +1556,18 @@ def upload_from_filename( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + + :type generation_match: long + :param generation_match: (Optional) Precondition control if generation match + + :type generation_not_match: long + :param generation_not_match: (Optional) Precondition control if generation not match + + :type metageneration_match: long + :param metageneration_match: (Optional) Precondition control if metageneration match + + :type metageneration_not_match: long + :param metageneration_not_match: (Optional) Precondition control if metageneration not match """ content_type = self._get_content_type(content_type, filename=filename) @@ -1393,10 +1579,22 @@ def upload_from_filename( client=client, size=total_bytes, predefined_acl=predefined_acl, + generation_match=generation_match, + generation_not_match=generation_not_match, + metageneration_match=metageneration_match, + metageneration_not_match=metageneration_not_match, ) def upload_from_string( - self, data, content_type="text/plain", client=None, predefined_acl=None + self, + data, + content_type="text/plain", + client=None, + predefined_acl=None, + generation_match=None, + generation_not_match=None, + metageneration_match=None, + metageneration_not_match=None, ): """Upload contents of this blob from the provided string. @@ -1429,6 +1627,18 @@ def upload_from_string( :type predefined_acl: str :param predefined_acl: (Optional) Predefined access control list + + :type generation_match: long + :param generation_match: (Optional) Precondition control if generation match + + :type generation_not_match: long + :param generation_not_match: (Optional) Precondition control if generation not match + + :type metageneration_match: long + :param metageneration_match: (Optional) Precondition control if metageneration match + + :type metageneration_not_match: long + :param metageneration_not_match: (Optional) Precondition control if metageneration not match """ data = _to_bytes(data, encoding="utf-8") string_buffer = BytesIO(data) @@ -1438,6 +1648,10 @@ def upload_from_string( content_type=content_type, client=client, predefined_acl=predefined_acl, + generation_match=generation_match, + generation_not_match=generation_not_match, + metageneration_match=metageneration_match, + metageneration_not_match=metageneration_not_match, ) def create_resumable_upload_session( @@ -2346,3 +2560,24 @@ def _add_query_parameters(base_url, name_value_pairs): query = parse_qsl(query) query.extend(name_value_pairs) return urlunsplit((scheme, netloc, path, urlencode(query), frag)) + + +def _raise_for_more_than_one_none(**kwargs): + """Raise ``ValueError`` exception if more than one parameter was set. + + :type error: :exc:`ValueError` + :param error: Description of which fields were set + + :raises: :class:`~ValueError` containing the fields that were set + """ + if sum(arg is not None for arg in kwargs.values()) > 1: + escaped_keys = ["'%s'" % name for name in kwargs.keys()] + + keys_but_last = ", ".join(escaped_keys[:-1]) + last_key = escaped_keys[-1] + + msg = "Pass at most one of {keys_but_last} and {last_key}".format( + keys_but_last=keys_but_last, last_key=last_key + ) + + raise ValueError(msg) diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index f656e6441..b01c02a92 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -1300,6 +1300,10 @@ def _do_multipart_success( num_retries=None, user_project=None, predefined_acl=None, + generation_match=None, + generation_not_match=None, + metageneration_match=None, + metageneration_not_match=None, kms_key_name=None, ): from six.moves.urllib.parse import urlencode @@ -1317,7 +1321,16 @@ def _do_multipart_success( stream = io.BytesIO(data) content_type = u"application/xml" response = blob._do_multipart_upload( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ) # Check the mocks and the returned value. @@ -1398,7 +1411,9 @@ def test__do_multipart_upload_bad_size(self): self.assertGreater(size, len(data)) with self.assertRaises(ValueError) as exc_info: - blob._do_multipart_upload(None, stream, None, size, None, None) + blob._do_multipart_upload( + None, stream, None, size, None, None, None, None, None, None + ) exc_contents = str(exc_info.exception) self.assertIn("was specified but the file-like object only had", exc_contents) @@ -1412,6 +1427,10 @@ def _initiate_resumable_helper( num_retries=None, user_project=None, predefined_acl=None, + generation_match=None, + generation_not_match=None, + metageneration_match=None, + metageneration_not_match=None, blob_chunk_size=786432, kms_key_name=None, ): @@ -1577,7 +1596,16 @@ def _make_resumable_transport(self, headers1, headers2, headers3, total_bytes): return fake_transport, responses @staticmethod - def _do_resumable_upload_call0(blob, content_type, size=None, predefined_acl=None): + def _do_resumable_upload_call0( + blob, + content_type, + size=None, + predefined_acl=None, + generation_match=None, + generation_not_match=None, + metageneration_match=None, + metageneration_not_match=None, + ): # First mock transport.request() does initiates upload. upload_url = ( "https://storage.googleapis.com/upload/storage/v1" @@ -1599,7 +1627,16 @@ def _do_resumable_upload_call0(blob, content_type, size=None, predefined_acl=Non @staticmethod def _do_resumable_upload_call1( - blob, content_type, data, resumable_url, size=None, predefined_acl=None + blob, + content_type, + data, + resumable_url, + size=None, + predefined_acl=None, + generation_match=None, + generation_not_match=None, + metageneration_match=None, + metageneration_not_match=None, ): # Second mock transport.request() does sends first chunk. if size is None: @@ -1622,7 +1659,16 @@ def _do_resumable_upload_call1( @staticmethod def _do_resumable_upload_call2( - blob, content_type, data, resumable_url, total_bytes, predefined_acl=None + blob, + content_type, + data, + resumable_url, + total_bytes, + predefined_acl=None, + generation_match=None, + generation_not_match=None, + metageneration_match=None, + metageneration_not_match=None, ): # Third mock transport.request() does sends last chunk. content_range = "bytes {:d}-{:d}/{:d}".format( @@ -1642,7 +1688,14 @@ def _do_resumable_upload_call2( ) def _do_resumable_helper( - self, use_size=False, num_retries=None, predefined_acl=None + self, + use_size=False, + num_retries=None, + predefined_acl=None, + generation_match=None, + generation_not_match=None, + metageneration_match=None, + metageneration_not_match=None, ): bucket = _Bucket(name="yesterday") blob = self._make_one(u"blob-name", bucket=bucket) @@ -1670,7 +1723,16 @@ def _do_resumable_helper( stream = io.BytesIO(data) content_type = u"text/html" response = blob._do_resumable_upload( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ) # Check the returned values. @@ -1679,7 +1741,14 @@ def _do_resumable_helper( # Check the mocks. call0 = self._do_resumable_upload_call0( - blob, content_type, size=size, predefined_acl=predefined_acl + blob, + content_type, + size=size, + predefined_acl=predefined_acl, + generation_match=generation_match, + generation_not_match=generation_not_match, + metageneration_match=metageneration_match, + metageneration_not_match=metageneration_not_match, ) call1 = self._do_resumable_upload_call1( blob, @@ -1688,6 +1757,10 @@ def _do_resumable_helper( resumable_url, size=size, predefined_acl=predefined_acl, + generation_match=generation_match, + generation_not_match=generation_not_match, + metageneration_match=metageneration_match, + metageneration_not_match=metageneration_not_match, ) call2 = self._do_resumable_upload_call2( blob, @@ -1696,6 +1769,10 @@ def _do_resumable_helper( resumable_url, total_bytes, predefined_acl=predefined_acl, + generation_match=generation_match, + generation_not_match=generation_not_match, + metageneration_match=metageneration_match, + metageneration_not_match=metageneration_not_match, ) self.assertEqual(transport.request.mock_calls, [call0, call1, call2]) @@ -1712,7 +1789,15 @@ def test__do_resumable_upload_with_predefined_acl(self): self._do_resumable_helper(predefined_acl="private") def _do_upload_helper( - self, chunk_size=None, num_retries=None, predefined_acl=None, size=None + self, + chunk_size=None, + num_retries=None, + predefined_acl=None, + generation_match=None, + generation_not_match=None, + metageneration_match=None, + metageneration_not_match=None, + size=None, ): from google.cloud.storage.blob import _MAX_MULTIPART_SIZE @@ -1738,19 +1823,46 @@ def _do_upload_helper( size = 12345654321 # Make the request and check the mocks. created_json = blob._do_upload( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ) self.assertIs(created_json, mock.sentinel.json) response.json.assert_called_once_with() if size is not None and size <= _MAX_MULTIPART_SIZE: blob._do_multipart_upload.assert_called_once_with( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ) blob._do_resumable_upload.assert_not_called() else: blob._do_multipart_upload.assert_not_called() blob._do_resumable_upload.assert_called_once_with( - client, stream, content_type, size, num_retries, predefined_acl + client, + stream, + content_type, + size, + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ) def test__do_upload_uses_multipart(self): @@ -1785,6 +1897,10 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): content_type = u"font/woff" client = mock.sentinel.client predefined_acl = kwargs.get("predefined_acl", None) + generation_match = kwargs.get("generation_match", None) + generation_not_match = kwargs.get("generation_not_match", None) + metageneration_match = kwargs.get("metageneration_match", None) + metageneration_not_match = kwargs.get("metageneration_not_match", None) ret_val = blob.upload_from_file( stream, size=len(data), content_type=content_type, client=client, **kwargs ) @@ -1797,7 +1913,16 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): # Check the mock. num_retries = kwargs.get("num_retries") blob._do_upload.assert_called_once_with( - client, stream, content_type, len(data), num_retries, predefined_acl + client, + stream, + content_type, + len(data), + num_retries, + predefined_acl, + generation_match, + generation_not_match, + metageneration_match, + metageneration_not_match, ) return stream @@ -1841,12 +1966,16 @@ def _do_upload_mock_call_helper(self, blob, client, content_type, size): mock_call = blob._do_upload.mock_calls[0] call_name, pos_args, kwargs = mock_call self.assertEqual(call_name, "") - self.assertEqual(len(pos_args), 6) + self.assertEqual(len(pos_args), 10) self.assertEqual(pos_args[0], client) self.assertEqual(pos_args[2], content_type) self.assertEqual(pos_args[3], size) self.assertIsNone(pos_args[4]) # num_retries self.assertIsNone(pos_args[5]) # predefined_acl + self.assertIsNone(pos_args[6]) # generation_match + self.assertIsNone(pos_args[7]) # generation_not_match + self.assertIsNone(pos_args[8]) # metageneration_match + self.assertIsNone(pos_args[9]) # metageneration_not_match self.assertEqual(kwargs, {}) return pos_args[1]