diff --git a/README.rst b/README.rst index ce8e70f1..a698bd8a 100644 --- a/README.rst +++ b/README.rst @@ -16,15 +16,19 @@ support at `google._async_resumable_media`. Supported Python Versions ------------------------- -Python >= 3.5 +Python >= 3.6 -Deprecated Python Versions --------------------------- -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +Unsupported Python Versions +--------------------------- + +Python == 2.7, Python == 3.5. + +The last version of this library compatible with Python 2.7 and 3.5 is +`google-resumable-media==1.3.3`. License ------- Apache 2.0 - See `the LICENSE`_ for more information. -.. _the LICENSE: https://github.com/googleapis/google-resumable-media-python/blob/master/LICENSE \ No newline at end of file +.. _the LICENSE: https://github.com/googleapis/google-resumable-media-python/blob/master/LICENSE diff --git a/google/_async_resumable_media/__init__.py b/google/_async_resumable_media/__init__.py index 8c3da244..41a2064a 100644 --- a/google/_async_resumable_media/__init__.py +++ b/google/_async_resumable_media/__init__.py @@ -52,10 +52,10 @@ __all__ = [ - u"DataCorruption", - u"InvalidResponse", - u"PERMANENT_REDIRECT", - u"RetryStrategy", - u"TOO_MANY_REQUESTS", - u"UPLOAD_CHUNK_SIZE", + "DataCorruption", + "InvalidResponse", + "PERMANENT_REDIRECT", + "RetryStrategy", + "TOO_MANY_REQUESTS", + "UPLOAD_CHUNK_SIZE", ] diff --git a/google/_async_resumable_media/_download.py b/google/_async_resumable_media/_download.py index 0c9e61ef..579b662d 100644 --- a/google/_async_resumable_media/_download.py +++ b/google/_async_resumable_media/_download.py @@ -14,12 +14,9 @@ """Virtual bases classes for downloading media from Google APIs.""" - +import http.client import re -from six.moves import http_client - - from google._async_resumable_media import _helpers from google.resumable_media import common @@ -28,9 +25,9 @@ r"bytes (?P\d+)-(?P\d+)/(?P\d+)", flags=re.IGNORECASE, ) -_ACCEPTABLE_STATUS_CODES = (http_client.OK, http_client.PARTIAL_CONTENT) -_GET = u"GET" -_ZERO_CONTENT_RANGE_HEADER = u"bytes */0" +_ACCEPTABLE_STATUS_CODES = (http.client.OK, http.client.PARTIAL_CONTENT) +_GET = "GET" +_ZERO_CONTENT_RANGE_HEADER = "bytes */0" class DownloadBase(object): @@ -79,7 +76,7 @@ def _get_status_code(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") @staticmethod def _get_headers(response): @@ -91,7 +88,7 @@ def _get_headers(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") @staticmethod def _get_body(response): @@ -103,7 +100,7 @@ def _get_body(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") class Download(DownloadBase): @@ -164,7 +161,7 @@ def _prepare_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u"A download can only be used once.") + raise ValueError("A download can only be used once.") add_bytes_range(self.start, self.end, self._headers) return _GET, self.media_url, None, self._headers @@ -205,7 +202,7 @@ def consume(self, transport, timeout=None): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") class ChunkedDownload(DownloadBase): @@ -239,7 +236,7 @@ class ChunkedDownload(DownloadBase): def __init__(self, media_url, chunk_size, stream, start=0, end=None, headers=None): if start < 0: raise ValueError( - u"On a chunked download the starting " u"value cannot be negative." + "On a chunked download the starting " "value cannot be negative." ) super(ChunkedDownload, self).__init__( media_url, stream=stream, start=start, end=end, headers=headers @@ -312,9 +309,9 @@ def _prepare_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u"Download has finished.") + raise ValueError("Download has finished.") if self.invalid: - raise ValueError(u"Download is invalid and cannot be re-used.") + raise ValueError("Download is invalid and cannot be re-used.") curr_start, curr_end = self._get_byte_range() add_bytes_range(curr_start, curr_end, self._headers) @@ -382,12 +379,12 @@ async def _process_response(self, response): response, self._get_headers, callback=self._make_invalid ) - transfer_encoding = headers.get(u"transfer-encoding") + transfer_encoding = headers.get("transfer-encoding") if transfer_encoding is None: content_length = _helpers.header_required( response, - u"content-length", + "content-length", self._get_headers, callback=self._make_invalid, ) @@ -397,10 +394,10 @@ async def _process_response(self, response): self._make_invalid() raise common.InvalidResponse( response, - u"Response is different size than content-length", - u"Expected", + "Response is different size than content-length", + "Expected", num_bytes, - u"Received", + "Received", len(response_body), ) else: @@ -434,7 +431,7 @@ def consume_next_chunk(self, transport, timeout=None): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") def add_bytes_range(start, end, headers): @@ -474,18 +471,18 @@ def add_bytes_range(start, end, headers): return else: # NOTE: This assumes ``end`` is non-negative. - bytes_range = u"0-{:d}".format(end) + bytes_range = "0-{:d}".format(end) else: if end is None: if start < 0: - bytes_range = u"{:d}".format(start) + bytes_range = "{:d}".format(start) else: - bytes_range = u"{:d}-".format(start) + bytes_range = "{:d}-".format(start) else: # NOTE: This is invalid if ``start < 0``. - bytes_range = u"{:d}-{:d}".format(start, end) + bytes_range = "{:d}-{:d}".format(start, end) - headers[_helpers.RANGE_HEADER] = u"bytes=" + bytes_range + headers[_helpers.RANGE_HEADER] = "bytes=" + bytes_range def get_range_info(response, get_headers, callback=_helpers.do_nothing): @@ -514,15 +511,15 @@ def get_range_info(response, get_headers, callback=_helpers.do_nothing): callback() raise common.InvalidResponse( response, - u"Unexpected content-range header", + "Unexpected content-range header", content_range, - u'Expected to be of the form "bytes {start}-{end}/{total}"', + 'Expected to be of the form "bytes {start}-{end}/{total}"', ) return ( - int(match.group(u"start_byte")), - int(match.group(u"end_byte")), - int(match.group(u"total_bytes")), + int(match.group("start_byte")), + int(match.group("end_byte")), + int(match.group("total_bytes")), ) @@ -541,7 +538,7 @@ def _check_for_zero_content_range(response, get_status_code, get_headers): Returns: bool: True if content range total bytes is zero, false otherwise. """ - if get_status_code(response) == http_client.REQUESTED_RANGE_NOT_SATISFIABLE: + if get_status_code(response) == http.client.REQUESTED_RANGE_NOT_SATISFIABLE: content_range = _helpers.header_required( response, _helpers.CONTENT_RANGE_HEADER, diff --git a/google/_async_resumable_media/_helpers.py b/google/_async_resumable_media/_helpers.py index b0630018..f41fc1cc 100644 --- a/google/_async_resumable_media/_helpers.py +++ b/google/_async_resumable_media/_helpers.py @@ -22,16 +22,16 @@ from google.resumable_media import common -RANGE_HEADER = u"range" -CONTENT_RANGE_HEADER = u"content-range" +RANGE_HEADER = "range" +CONTENT_RANGE_HEADER = "content-range" _SLOW_CRC32C_WARNING = ( "Currently using crcmod in pure python form. This is a slow " "implementation. Python 3 has a faster implementation, `google-crc32c`, " "which will be used if it is installed." ) -_HASH_HEADER = u"x-goog-hash" -_MISSING_CHECKSUM = u"""\ +_HASH_HEADER = "x-goog-hash" +_MISSING_CHECKSUM = """\ No {checksum_type} checksum was returned from the service while downloading {} (which happens for composite objects), so client-side content integrity checking is not being performed.""" @@ -65,7 +65,7 @@ def header_required(response, name, get_headers, callback=do_nothing): if name not in headers: callback() raise common.InvalidResponse( - response, u"Response headers must contain header", name + response, "Response headers must contain header", name ) return headers[name] @@ -94,9 +94,9 @@ def require_status_code(response, status_codes, get_status_code, callback=do_not callback() raise common.InvalidResponse( response, - u"Request failed with status code", + "Request failed with status code", status_code, - u"Expected one of", + "Expected one of", *status_codes ) return status_code diff --git a/google/_async_resumable_media/_upload.py b/google/_async_resumable_media/_upload.py index f95d91f4..fc78b015 100644 --- a/google/_async_resumable_media/_upload.py +++ b/google/_async_resumable_media/_upload.py @@ -21,15 +21,12 @@ * resumable uploads (with metadata as well) """ - +import http.client import json import os import random import sys -import six -from six.moves import http_client - from google import _async_resumable_media from google._async_resumable_media import _helpers from google.resumable_media import _helpers as sync_helpers @@ -102,7 +99,7 @@ def _process_response(self, response): # Tombstone the current upload so it cannot be used again (in either # failure or success). self._finished = True - _helpers.require_status_code(response, (http_client.OK,), self._get_status_code) + _helpers.require_status_code(response, (http.client.OK,), self._get_status_code) @staticmethod def _get_status_code(response): @@ -114,7 +111,7 @@ def _get_status_code(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") @staticmethod def _get_headers(response): @@ -126,7 +123,7 @@ def _get_headers(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") @staticmethod def _get_body(response): @@ -138,7 +135,7 @@ def _get_body(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") class SimpleUpload(UploadBase): @@ -187,10 +184,10 @@ def _prepare_request(self, data, content_type): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u"An upload can only be used once.") + raise ValueError("An upload can only be used once.") - if not isinstance(data, six.binary_type): - raise TypeError(u"`data` must be bytes, received", type(data)) + if not isinstance(data, bytes): + raise TypeError("`data` must be bytes, received", type(data)) self._headers[_CONTENT_TYPE_HEADER] = content_type return _POST, self.upload_url, data, self._headers @@ -212,7 +209,7 @@ def transmit(self, transport, data, content_type, timeout=None): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") class MultipartUpload(UploadBase): @@ -273,10 +270,10 @@ def _prepare_request(self, data, metadata, content_type): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u"An upload can only be used once.") + raise ValueError("An upload can only be used once.") - if not isinstance(data, six.binary_type): - raise TypeError(u"`data` must be bytes, received", type(data)) + if not isinstance(data, bytes): + raise TypeError("`data` must be bytes, received", type(data)) checksum_object = sync_helpers._get_checksum_object(self._checksum_type) @@ -317,7 +314,7 @@ def transmit(self, transport, data, metadata, content_type, timeout=None): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") class ResumableUpload(UploadBase, sync_upload.ResumableUpload): @@ -355,7 +352,7 @@ def __init__(self, upload_url, chunk_size, checksum=None, headers=None): super(ResumableUpload, self).__init__(upload_url, headers=headers) if chunk_size % _async_resumable_media.UPLOAD_CHUNK_SIZE != 0: raise ValueError( - u"{} KB must divide chunk size".format( + "{} KB must divide chunk size".format( _async_resumable_media.UPLOAD_CHUNK_SIZE / 1024 ) ) @@ -448,15 +445,15 @@ def _prepare_initiate_request( .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.resumable_url is not None: - raise ValueError(u"This upload has already been initiated.") + raise ValueError("This upload has already been initiated.") if stream.tell() != 0: - raise ValueError(u"Stream must be at beginning.") + raise ValueError("Stream must be at beginning.") self._stream = stream self._content_type = content_type headers = { - _CONTENT_TYPE_HEADER: u"application/json; charset=UTF-8", - u"x-upload-content-type": content_type, + _CONTENT_TYPE_HEADER: "application/json; charset=UTF-8", + "x-upload-content-type": content_type, } # Set the total bytes if possible. if total_bytes is not None: @@ -465,11 +462,11 @@ def _prepare_initiate_request( self._total_bytes = get_total_bytes(stream) # Add the total bytes to the headers if set. if self._total_bytes is not None: - content_length = u"{:d}".format(self._total_bytes) - headers[u"x-upload-content-length"] = content_length + content_length = "{:d}".format(self._total_bytes) + headers["x-upload-content-length"] = content_length headers.update(self._headers) - payload = json.dumps(metadata).encode(u"utf-8") + payload = json.dumps(metadata).encode("utf-8") return _POST, self.upload_url, payload, headers def _process_initiate_response(self, response): @@ -490,12 +487,12 @@ def _process_initiate_response(self, response): """ _helpers.require_status_code( response, - (http_client.OK,), + (http.client.OK,), self._get_status_code, callback=self._make_invalid, ) self._resumable_url = _helpers.header_required( - response, u"location", self._get_headers + response, "location", self._get_headers ) def initiate( @@ -547,7 +544,7 @@ def initiate( Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") def _prepare_request(self): """Prepare the contents of HTTP request to upload a chunk. @@ -580,15 +577,15 @@ def _prepare_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u"Upload has finished.") + raise ValueError("Upload has finished.") if self.invalid: raise ValueError( - u"Upload is in an invalid state. To recover call `recover()`." + "Upload is in an invalid state. To recover call `recover()`." ) if self.resumable_url is None: raise ValueError( - u"This upload has not been initiated. Please call " - u"initiate() before beginning to transmit chunks." + "This upload has not been initiated. Please call " + "initiate() before beginning to transmit chunks." ) start_byte, payload, content_range = get_next_chunk( @@ -638,11 +635,11 @@ async def _process_response(self, response, bytes_sent): """ status_code = _helpers.require_status_code( response, - (http_client.OK, _async_resumable_media.PERMANENT_REDIRECT), + (http.client.OK, _async_resumable_media.PERMANENT_REDIRECT), self._get_status_code, callback=self._make_invalid, ) - if status_code == http_client.OK: + if status_code == http.client.OK: # NOTE: We use the "local" information of ``bytes_sent`` to update # ``bytes_uploaded``, but do not verify this against other # state. However, there may be some other information: @@ -668,11 +665,11 @@ async def _process_response(self, response, bytes_sent): self._make_invalid() raise common.InvalidResponse( response, - u'Unexpected "range" header', + 'Unexpected "range" header', bytes_range, - u'Expected to be of the form "bytes=0-{end}"', + 'Expected to be of the form "bytes=0-{end}"', ) - self._bytes_uploaded = int(match.group(u"end_byte")) + 1 + self._bytes_uploaded = int(match.group("end_byte")) + 1 async def _validate_checksum(self, response): """Check the computed checksum, if any, against the response headers. @@ -725,7 +722,7 @@ def transmit_next_chunk(self, transport, timeout=None): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") def _prepare_recover_request(self): """Prepare the contents of HTTP request to recover from failure. @@ -753,9 +750,9 @@ def _prepare_recover_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if not self.invalid: - raise ValueError(u"Upload is not in invalid state, no need to recover.") + raise ValueError("Upload is not in invalid state, no need to recover.") - headers = {_helpers.CONTENT_RANGE_HEADER: u"bytes */*"} + headers = {_helpers.CONTENT_RANGE_HEADER: "bytes */*"} return _PUT, self.resumable_url, None, headers def _process_recover_response(self, response): @@ -789,11 +786,11 @@ def _process_recover_response(self, response): if match is None: raise common.InvalidResponse( response, - u'Unexpected "range" header', + 'Unexpected "range" header', bytes_range, - u'Expected to be of the form "bytes=0-{end}"', + 'Expected to be of the form "bytes=0-{end}"', ) - self._bytes_uploaded = int(match.group(u"end_byte")) + 1 + self._bytes_uploaded = int(match.group("end_byte")) + 1 else: # In this case, the upload has not "begun". self._bytes_uploaded = 0 @@ -818,7 +815,7 @@ def recover(self, transport): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") def get_boundary(): @@ -831,7 +828,7 @@ def get_boundary(): boundary = _BOUNDARY_FORMAT.format(random_int) # NOTE: Neither % formatting nor .format() are available for byte strings # in Python 3.4, so we must use unicode strings as templates. - return boundary.encode(u"utf-8") + return boundary.encode("utf-8") def construct_multipart_request(data, metadata, content_type): @@ -850,8 +847,8 @@ def construct_multipart_request(data, metadata, content_type): between each part. """ multipart_boundary = get_boundary() - json_bytes = json.dumps(metadata).encode(u"utf-8") - content_type = content_type.encode(u"utf-8") + json_bytes = json.dumps(metadata).encode("utf-8") + content_type = content_type.encode("utf-8") # Combine the two parts into a multipart payload. # NOTE: We'd prefer a bytes template but are restricted by Python 3.4. boundary_sep = _MULTIPART_SEP + multipart_boundary @@ -940,12 +937,12 @@ def get_next_chunk(stream, chunk_size, total_bytes): # stream to be at the beginning. if num_bytes_read != 0: raise ValueError( - u"Stream specified as empty, but produced non-empty content." + "Stream specified as empty, but produced non-empty content." ) else: if num_bytes_read == 0: raise ValueError( - u"Stream is already exhausted. There is no content remaining." + "Stream is already exhausted. There is no content remaining." ) content_range = get_content_range(start_byte, end_byte, total_bytes) diff --git a/google/_async_resumable_media/requests/__init__.py b/google/_async_resumable_media/requests/__init__.py index aaaa2836..7920f666 100644 --- a/google/_async_resumable_media/requests/__init__.py +++ b/google/_async_resumable_media/requests/__init__.py @@ -36,7 +36,7 @@ def mock_default(scopes=None): credentials = mock.Mock(spec=creds_mod.Credentials) - return credentials, u'mock-project' + return credentials, 'mock-project' # Patch the ``default`` function on the module. original_default = google.auth.default @@ -47,7 +47,7 @@ def mock_default(scopes=None): >>> import google.auth >>> import google.auth.transport.requests as tr_requests >>> - >>> ro_scope = u'https://www.googleapis.com/auth/devstorage.read_only' + >>> ro_scope = 'https://www.googleapis.com/auth/devstorage.read_only' >>> credentials, _ = google.auth.default(scopes=(ro_scope,)) >>> transport = tr_requests.AuthorizedSession(credentials) >>> transport @@ -70,14 +70,14 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client - bucket = u'bucket-foo' - blob_name = u'file.txt' + bucket = 'bucket-foo' + blob_name = 'file.txt' fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) - fake_response.headers[u'Content-Length'] = u'1364156' + fake_response.status_code = int(http.client.OK) + fake_response.headers['Content-Length'] = '1364156' fake_content = mock.MagicMock(spec=['__len__']) fake_content.__len__.return_value = 1364156 fake_response._content = fake_content @@ -90,8 +90,8 @@ def mock_default(scopes=None): >>> from google.resumable_media.requests import Download >>> >>> url_template = ( - ... u'https://www.googleapis.com/download/storage/v1/b/' - ... u'{bucket}/o/{blob_name}?alt=media') + ... 'https://www.googleapis.com/download/storage/v1/b/' + ... '{bucket}/o/{blob_name}?alt=media') >>> media_url = url_template.format( ... bucket=bucket, blob_name=blob_name) >>> @@ -101,7 +101,7 @@ def mock_default(scopes=None): True >>> response - >>> response.headers[u'Content-Length'] + >>> response.headers['Content-Length'] '1364156' >>> len(response.content) 1364156 @@ -113,20 +113,20 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client from google.resumable_media.requests import Download - media_url = u'http://test.invalid' + media_url = 'http://test.invalid' start = 4096 end = 8191 slice_size = end - start + 1 fake_response = requests.Response() - fake_response.status_code = int(http_client.PARTIAL_CONTENT) - fake_response.headers[u'Content-Length'] = u'{:d}'.format(slice_size) - content_range = u'bytes {:d}-{:d}/1364156'.format(start, end) - fake_response.headers[u'Content-Range'] = content_range + fake_response.status_code = int(http.client.PARTIAL_CONTENT) + fake_response.headers['Content-Length'] = '{:d}'.format(slice_size) + content_range = 'bytes {:d}-{:d}/1364156'.format(start, end) + fake_response.headers['Content-Range'] = content_range fake_content = mock.MagicMock(spec=['__len__']) fake_content.__len__.return_value = slice_size fake_response._content = fake_content @@ -142,9 +142,9 @@ def mock_default(scopes=None): True >>> response - >>> response.headers[u'Content-Length'] + >>> response.headers['Content-Length'] '4096' - >>> response.headers[u'Content-Range'] + >>> response.headers['Content-Range'] 'bytes 4096-8191/1364156' >>> len(response.content) 4096 @@ -172,17 +172,17 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client - media_url = u'http://test.invalid' + media_url = 'http://test.invalid' fifty_mb = 50 * 1024 * 1024 one_gb = 1024 * 1024 * 1024 fake_response = requests.Response() - fake_response.status_code = int(http_client.PARTIAL_CONTENT) - fake_response.headers[u'Content-Length'] = u'{:d}'.format(fifty_mb) - content_range = u'bytes 0-{:d}/{:d}'.format(fifty_mb - 1, one_gb) - fake_response.headers[u'Content-Range'] = content_range + fake_response.status_code = int(http.client.PARTIAL_CONTENT) + fake_response.headers['Content-Length'] = '{:d}'.format(fifty_mb) + content_range = 'bytes 0-{:d}/{:d}'.format(fifty_mb - 1, one_gb) + fake_response.headers['Content-Range'] = content_range fake_content_begin = b'The beginning of the chunk...' fake_content = fake_content_begin + b'1' * (fifty_mb - 29) fake_response._content = fake_content @@ -213,9 +213,9 @@ def mock_default(scopes=None): 1073741824 >>> response - >>> response.headers[u'Content-Length'] + >>> response.headers['Content-Length'] '52428800' - >>> response.headers[u'Content-Range'] + >>> response.headers['Content-Range'] 'bytes 0-52428799/1073741824' >>> len(response.content) == chunk_size True @@ -232,11 +232,11 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client from google.resumable_media.requests import ChunkedDownload - media_url = u'http://test.invalid' + media_url = 'http://test.invalid' fifty_mb = 50 * 1024 * 1024 one_gb = 1024 * 1024 * 1024 @@ -246,12 +246,12 @@ def mock_default(scopes=None): download._total_bytes = one_gb fake_response = requests.Response() - fake_response.status_code = int(http_client.PARTIAL_CONTENT) + fake_response.status_code = int(http.client.PARTIAL_CONTENT) slice_size = one_gb - 20 * fifty_mb - fake_response.headers[u'Content-Length'] = u'{:d}'.format(slice_size) - content_range = u'bytes {:d}-{:d}/{:d}'.format( + fake_response.headers['Content-Length'] = '{:d}'.format(slice_size) + content_range = 'bytes {:d}-{:d}/{:d}'.format( 20 * fifty_mb, one_gb - 1, one_gb) - fake_response.headers[u'Content-Range'] = content_range + fake_response.headers['Content-Range'] = content_range fake_content = mock.MagicMock(spec=['__len__']) fake_content.__len__.return_value = slice_size fake_response._content = fake_content @@ -276,9 +276,9 @@ def mock_default(scopes=None): True >>> response - >>> response.headers[u'Content-Length'] + >>> response.headers['Content-Length'] '25165824' - >>> response.headers[u'Content-Range'] + >>> response.headers['Content-Range'] 'bytes 1048576000-1073741823/1073741824' >>> len(response.content) < download.chunk_size True @@ -301,21 +301,21 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client - bucket = u'some-bucket' - blob_name = u'file.txt' + bucket = 'some-bucket' + blob_name = 'file.txt' fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) payload = { - u'bucket': bucket, - u'contentType': u'text/plain', - u'md5Hash': u'M0XLEsX9/sMdiI+4pB4CAQ==', - u'name': blob_name, - u'size': u'27', + 'bucket': bucket, + 'contentType': 'text/plain', + 'md5Hash': 'M0XLEsX9/sMdiI+4pB4CAQ==', + 'name': blob_name, + 'size': '27', } - fake_response._content = json.dumps(payload).encode(u'utf-8') + fake_response._content = json.dumps(payload).encode('utf-8') post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) @@ -326,30 +326,30 @@ def mock_default(scopes=None): >>> from google.resumable_media.requests import SimpleUpload >>> >>> url_template = ( - ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' - ... u'uploadType=media&' - ... u'name={blob_name}') + ... 'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' + ... 'uploadType=media&' + ... 'name={blob_name}') >>> upload_url = url_template.format( ... bucket=bucket, blob_name=blob_name) >>> >>> upload = SimpleUpload(upload_url) >>> data = b'Some not too large content.' - >>> content_type = u'text/plain' + >>> content_type = 'text/plain' >>> response = upload.transmit(transport, data, content_type) >>> upload.finished True >>> response >>> json_response = response.json() - >>> json_response[u'bucket'] == bucket + >>> json_response['bucket'] == bucket True - >>> json_response[u'name'] == blob_name + >>> json_response['name'] == blob_name True - >>> json_response[u'contentType'] == content_type + >>> json_response['contentType'] == content_type True - >>> json_response[u'md5Hash'] + >>> json_response['md5Hash'] 'M0XLEsX9/sMdiI+4pB4CAQ==' - >>> int(json_response[u'size']) == len(data) + >>> int(json_response['size']) == len(data) True In the rare case that an upload fails, an :exc:`.InvalidResponse` @@ -361,25 +361,25 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client from google import resumable_media from google.resumable_media import _helpers from google.resumable_media.requests import SimpleUpload as constructor - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' data = b'Some not too large content.' - content_type = u'text/plain' + content_type = 'text/plain' fake_response = requests.Response() - fake_response.status_code = int(http_client.SERVICE_UNAVAILABLE) + fake_response.status_code = int(http.client.SERVICE_UNAVAILABLE) post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) time_sleep = time.sleep def dont_sleep(seconds): - raise RuntimeError(u'No sleep', seconds) + raise RuntimeError('No sleep', seconds) def SimpleUpload(*args, **kwargs): upload = constructor(*args, **kwargs) @@ -436,21 +436,21 @@ def SimpleUpload(*args, **kwargs): import mock import requests - from six.moves import http_client + import http.client - bucket = u'some-bucket' - blob_name = u'file.txt' + bucket = 'some-bucket' + blob_name = 'file.txt' data = b'Some not too large content.' - content_type = u'text/plain' + content_type = 'text/plain' fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) payload = { - u'bucket': bucket, - u'name': blob_name, - u'metadata': {u'color': u'grurple'}, + 'bucket': bucket, + 'name': blob_name, + 'metadata': {'color': 'grurple'}, } - fake_response._content = json.dumps(payload).encode(u'utf-8') + fake_response._content = json.dumps(payload).encode('utf-8') post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) @@ -460,15 +460,15 @@ def SimpleUpload(*args, **kwargs): >>> from google.resumable_media.requests import MultipartUpload >>> >>> url_template = ( - ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' - ... u'uploadType=multipart') + ... 'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' + ... 'uploadType=multipart') >>> upload_url = url_template.format(bucket=bucket) >>> >>> upload = MultipartUpload(upload_url) >>> metadata = { - ... u'name': blob_name, - ... u'metadata': { - ... u'color': u'grurple', + ... 'name': blob_name, + ... 'metadata': { + ... 'color': 'grurple', ... }, ... } >>> response = upload.transmit(transport, data, metadata, content_type) @@ -477,11 +477,11 @@ def SimpleUpload(*args, **kwargs): >>> response >>> json_response = response.json() - >>> json_response[u'bucket'] == bucket + >>> json_response['bucket'] == bucket True - >>> json_response[u'name'] == blob_name + >>> json_response['name'] == blob_name True - >>> json_response[u'metadata'] == metadata[u'metadata'] + >>> json_response['metadata'] == metadata['metadata'] True As with the simple upload, in the case of failure an :exc:`.InvalidResponse` @@ -522,24 +522,24 @@ def SimpleUpload(*args, **kwargs): import mock import requests - from six.moves import http_client + import http.client - bucket = u'some-bucket' - blob_name = u'file.txt' + bucket = 'some-bucket' + blob_name = 'file.txt' data = b'Some resumable bytes.' - content_type = u'text/plain' + content_type = 'text/plain' fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) fake_response._content = b'' - upload_id = u'ABCdef189XY_super_serious' + upload_id = 'ABCdef189XY_super_serious' resumable_url_template = ( - u'https://www.googleapis.com/upload/storage/v1/b/{bucket}' - u'/o?uploadType=resumable&upload_id={upload_id}') + 'https://www.googleapis.com/upload/storage/v1/b/{bucket}' + '/o?uploadType=resumable&upload_id={upload_id}') resumable_url = resumable_url_template.format( bucket=bucket, upload_id=upload_id) - fake_response.headers[u'location'] = resumable_url - fake_response.headers[u'x-guploader-uploadid'] = upload_id + fake_response.headers['location'] = resumable_url + fake_response.headers['x-guploader-uploadid'] = upload_id post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) @@ -549,8 +549,8 @@ def SimpleUpload(*args, **kwargs): >>> from google.resumable_media.requests import ResumableUpload >>> >>> url_template = ( - ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' - ... u'uploadType=resumable') + ... 'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' + ... 'uploadType=resumable') >>> upload_url = url_template.format(bucket=bucket) >>> >>> chunk_size = 1024 * 1024 # 1MB @@ -559,18 +559,18 @@ def SimpleUpload(*args, **kwargs): >>> # The upload doesn't know how "big" it is until seeing a stream. >>> upload.total_bytes is None True - >>> metadata = {u'name': blob_name} + >>> metadata = {'name': blob_name} >>> response = upload.initiate(transport, stream, metadata, content_type) >>> response - >>> upload.resumable_url == response.headers[u'Location'] + >>> upload.resumable_url == response.headers['Location'] True >>> upload.total_bytes == len(data) True - >>> upload_id = response.headers[u'X-GUploader-UploadID'] + >>> upload_id = response.headers['X-GUploader-UploadID'] >>> upload_id 'ABCdef189XY_super_serious' - >>> upload.resumable_url == upload_url + u'&upload_id=' + upload_id + >>> upload.resumable_url == upload_url + '&upload_id=' + upload_id True Once a :class:`.ResumableUpload` has been initiated, the resource is @@ -583,7 +583,7 @@ def SimpleUpload(*args, **kwargs): import mock import requests - from six.moves import http_client + import http.client from google import resumable_media import google.resumable_media.requests.upload as upload_mod @@ -591,12 +591,12 @@ def SimpleUpload(*args, **kwargs): data = b'01234567891' stream = io.BytesIO(data) # Create an "already initiated" upload. - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' chunk_size = 256 * 1024 # 256KB upload = upload_mod.ResumableUpload(upload_url, chunk_size) - upload._resumable_url = u'http://test.invalid?upload_id=mocked' + upload._resumable_url = 'http://test.invalid?upload_id=mocked' upload._stream = stream - upload._content_type = u'text/plain' + upload._content_type = 'text/plain' upload._total_bytes = len(data) # After-the-fact update the chunk size so that len(data) @@ -605,22 +605,22 @@ def SimpleUpload(*args, **kwargs): # Make three fake responses. fake_response0 = requests.Response() fake_response0.status_code = resumable_media.PERMANENT_REDIRECT - fake_response0.headers[u'range'] = u'bytes=0-3' + fake_response0.headers['range'] = 'bytes=0-3' fake_response1 = requests.Response() fake_response1.status_code = resumable_media.PERMANENT_REDIRECT - fake_response1.headers[u'range'] = u'bytes=0-7' + fake_response1.headers['range'] = 'bytes=0-7' fake_response2 = requests.Response() - fake_response2.status_code = int(http_client.OK) - bucket = u'some-bucket' - blob_name = u'file.txt' + fake_response2.status_code = int(http.client.OK) + bucket = 'some-bucket' + blob_name = 'file.txt' payload = { - u'bucket': bucket, - u'name': blob_name, - u'size': u'{:d}'.format(len(data)), + 'bucket': bucket, + 'name': blob_name, + 'size': '{:d}'.format(len(data)), } - fake_response2._content = json.dumps(payload).encode(u'utf-8') + fake_response2._content = json.dumps(payload).encode('utf-8') # Use the fake responses to mock a transport. responses = [fake_response0, fake_response1, fake_response2] @@ -653,9 +653,9 @@ def SimpleUpload(*args, **kwargs): >>> upload.bytes_uploaded == upload.total_bytes True >>> json_response = response2.json() - >>> json_response[u'bucket'] == bucket + >>> json_response['bucket'] == bucket True - >>> json_response[u'name'] == blob_name + >>> json_response['name'] == blob_name True """ from google._async_resumable_media.requests.download import ChunkedDownload @@ -668,11 +668,11 @@ def SimpleUpload(*args, **kwargs): __all__ = [ - u"ChunkedDownload", - u"Download", - u"MultipartUpload", - u"RawChunkedDownload", - u"RawDownload", - u"ResumableUpload", - u"SimpleUpload", + "ChunkedDownload", + "Download", + "MultipartUpload", + "RawChunkedDownload", + "RawDownload", + "ResumableUpload", + "SimpleUpload", ] diff --git a/google/_async_resumable_media/requests/download.py b/google/_async_resumable_media/requests/download.py index 5ac97c59..55ec5841 100644 --- a/google/_async_resumable_media/requests/download.py +++ b/google/_async_resumable_media/requests/download.py @@ -133,14 +133,14 @@ async def consume(self, transport, timeout=_request_helpers._DEFAULT_TIMEOUT): method, url, payload, headers = self._prepare_request() # NOTE: We assume "payload is None" but pass it along anyway. request_kwargs = { - u"data": payload, - u"headers": headers, - u"retry_strategy": self._retry_strategy, - u"timeout": timeout, + "data": payload, + "headers": headers, + "retry_strategy": self._retry_strategy, + "timeout": timeout, } if self._stream is not None: - request_kwargs[u"stream"] = True + request_kwargs["stream"] = True result = await _request_helpers.http_request( transport, method, url, **request_kwargs @@ -425,8 +425,8 @@ def _add_decoder(response_raw, checksum): caller will no longer need to hash to decoded bytes. """ - encoding = response_raw.headers.get(u"content-encoding", u"").lower() - if encoding != u"gzip": + encoding = response_raw.headers.get("content-encoding", "").lower() + if encoding != "gzip": return checksum response_raw._decoder = _GzipDecoder(checksum) diff --git a/google/_async_resumable_media/requests/upload.py b/google/_async_resumable_media/requests/upload.py index 8a1291a5..4c6e79fb 100644 --- a/google/_async_resumable_media/requests/upload.py +++ b/google/_async_resumable_media/requests/upload.py @@ -154,15 +154,15 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): .. testsetup:: resumable-constructor - bucket = u'bucket-foo' + bucket = 'bucket-foo' .. doctest:: resumable-constructor >>> from google.resumable_media.requests import ResumableUpload >>> >>> url_template = ( - ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' - ... u'uploadType=resumable') + ... 'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' + ... 'uploadType=resumable') >>> upload_url = url_template.format(bucket=bucket) >>> >>> chunk_size = 3 * 1024 * 1024 # 3MB @@ -179,11 +179,11 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): import mock import requests - from six.moves import http_client + import http.client from google.resumable_media.requests import ResumableUpload - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' chunk_size = 3 * 1024 * 1024 # 3MB upload = ResumableUpload(upload_url, chunk_size) @@ -191,14 +191,14 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): os.close(file_desc) data = b'some bytes!' - with open(filename, u'wb') as file_obj: + with open(filename, 'wb') as file_obj: file_obj.write(data) fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) fake_response._content = b'' - resumable_url = u'http://test.invalid?upload_id=7up' - fake_response.headers[u'location'] = resumable_url + resumable_url = 'http://test.invalid?upload_id=7up' + fake_response.headers['location'] = resumable_url post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) @@ -210,11 +210,11 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): >>> upload.total_bytes is None True >>> - >>> stream = open(filename, u'rb') + >>> stream = open(filename, 'rb') >>> total_bytes = os.path.getsize(filename) - >>> metadata = {u'name': filename} + >>> metadata = {'name': filename} >>> response = upload.initiate( - ... transport, stream, metadata, u'text/plain', + ... transport, stream, metadata, 'text/plain', ... total_bytes=total_bytes) >>> response @@ -236,26 +236,26 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): import mock import requests - from six.moves import http_client + import http.client from google.resumable_media.requests import ResumableUpload - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' chunk_size = 3 * 1024 * 1024 # 3MB upload = ResumableUpload(upload_url, chunk_size) fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) fake_response._content = b'' - resumable_url = u'http://test.invalid?upload_id=7up' - fake_response.headers[u'location'] = resumable_url + resumable_url = 'http://test.invalid?upload_id=7up' + fake_response.headers['location'] = resumable_url post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) data = b'some MOAR bytes!' - metadata = {u'name': u'some-file.jpg'} - content_type = u'image/jpeg' + metadata = {'name': 'some-file.jpg'} + content_type = 'image/jpeg' .. doctest:: resumable-implicit-size @@ -277,25 +277,25 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): import mock import requests - from six.moves import http_client + import http.client from google.resumable_media.requests import ResumableUpload - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' chunk_size = 3 * 1024 * 1024 # 3MB upload = ResumableUpload(upload_url, chunk_size) fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) fake_response._content = b'' - resumable_url = u'http://test.invalid?upload_id=7up' - fake_response.headers[u'location'] = resumable_url + resumable_url = 'http://test.invalid?upload_id=7up' + fake_response.headers['location'] = resumable_url post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) - metadata = {u'name': u'some-file.jpg'} - content_type = u'application/octet-stream' + metadata = {'name': 'some-file.jpg'} + content_type = 'application/octet-stream' stream = io.BytesIO(b'data') @@ -418,24 +418,24 @@ async def transmit_next_chunk( import mock import requests - from six.moves import http_client + import http.client from google import resumable_media import google.resumable_media.requests.upload as upload_mod transport = mock.Mock(spec=['request']) fake_response = requests.Response() - fake_response.status_code = int(http_client.BAD_REQUEST) + fake_response.status_code = int(http.client.BAD_REQUEST) transport.request.return_value = fake_response - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' upload = upload_mod.ResumableUpload( upload_url, resumable_media.UPLOAD_CHUNK_SIZE) # Fake that the upload has been initiate()-d data = b'data is here' upload._stream = io.BytesIO(data) upload._total_bytes = len(data) - upload._resumable_url = u'http://test.invalid?upload_id=nope' + upload._resumable_url = 'http://test.invalid?upload_id=nope' .. doctest:: bad-response :options: +NORMALIZE_WHITESPACE diff --git a/google/resumable_media/__init__.py b/google/resumable_media/__init__.py index 8c3da244..41a2064a 100644 --- a/google/resumable_media/__init__.py +++ b/google/resumable_media/__init__.py @@ -52,10 +52,10 @@ __all__ = [ - u"DataCorruption", - u"InvalidResponse", - u"PERMANENT_REDIRECT", - u"RetryStrategy", - u"TOO_MANY_REQUESTS", - u"UPLOAD_CHUNK_SIZE", + "DataCorruption", + "InvalidResponse", + "PERMANENT_REDIRECT", + "RetryStrategy", + "TOO_MANY_REQUESTS", + "UPLOAD_CHUNK_SIZE", ] diff --git a/google/resumable_media/_download.py b/google/resumable_media/_download.py index 1b06d068..b2bac98c 100644 --- a/google/resumable_media/_download.py +++ b/google/resumable_media/_download.py @@ -15,10 +15,9 @@ """Virtual bases classes for downloading media from Google APIs.""" +import http.client import re -from six.moves import http_client - from google.resumable_media import _helpers from google.resumable_media import common @@ -27,9 +26,9 @@ r"bytes (?P\d+)-(?P\d+)/(?P\d+)", flags=re.IGNORECASE, ) -_ACCEPTABLE_STATUS_CODES = (http_client.OK, http_client.PARTIAL_CONTENT) -_GET = u"GET" -_ZERO_CONTENT_RANGE_HEADER = u"bytes */0" +_ACCEPTABLE_STATUS_CODES = (http.client.OK, http.client.PARTIAL_CONTENT) +_GET = "GET" +_ZERO_CONTENT_RANGE_HEADER = "bytes */0" class DownloadBase(object): @@ -78,7 +77,7 @@ def _get_status_code(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") @staticmethod def _get_headers(response): @@ -90,7 +89,7 @@ def _get_headers(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") @staticmethod def _get_body(response): @@ -102,7 +101,7 @@ def _get_body(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") class Download(DownloadBase): @@ -163,7 +162,7 @@ def _prepare_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u"A download can only be used once.") + raise ValueError("A download can only be used once.") add_bytes_range(self.start, self.end, self._headers) return _GET, self.media_url, None, self._headers @@ -206,7 +205,7 @@ def consume(self, transport, timeout=None): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") class ChunkedDownload(DownloadBase): @@ -240,7 +239,7 @@ class ChunkedDownload(DownloadBase): def __init__(self, media_url, chunk_size, stream, start=0, end=None, headers=None): if start < 0: raise ValueError( - u"On a chunked download the starting " u"value cannot be negative." + "On a chunked download the starting " "value cannot be negative." ) super(ChunkedDownload, self).__init__( media_url, stream=stream, start=start, end=end, headers=headers @@ -313,9 +312,9 @@ def _prepare_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u"Download has finished.") + raise ValueError("Download has finished.") if self.invalid: - raise ValueError(u"Download is invalid and cannot be re-used.") + raise ValueError("Download is invalid and cannot be re-used.") curr_start, curr_end = self._get_byte_range() add_bytes_range(curr_start, curr_end, self._headers) @@ -383,12 +382,12 @@ def _process_response(self, response): response, self._get_headers, callback=self._make_invalid ) - transfer_encoding = headers.get(u"transfer-encoding") + transfer_encoding = headers.get("transfer-encoding") if transfer_encoding is None: content_length = _helpers.header_required( response, - u"content-length", + "content-length", self._get_headers, callback=self._make_invalid, ) @@ -397,10 +396,10 @@ def _process_response(self, response): self._make_invalid() raise common.InvalidResponse( response, - u"Response is different size than content-length", - u"Expected", + "Response is different size than content-length", + "Expected", num_bytes, - u"Received", + "Received", len(response_body), ) else: @@ -437,7 +436,7 @@ def consume_next_chunk(self, transport, timeout=None): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") def add_bytes_range(start, end, headers): @@ -477,18 +476,18 @@ def add_bytes_range(start, end, headers): return else: # NOTE: This assumes ``end`` is non-negative. - bytes_range = u"0-{:d}".format(end) + bytes_range = "0-{:d}".format(end) else: if end is None: if start < 0: - bytes_range = u"{:d}".format(start) + bytes_range = "{:d}".format(start) else: - bytes_range = u"{:d}-".format(start) + bytes_range = "{:d}-".format(start) else: # NOTE: This is invalid if ``start < 0``. - bytes_range = u"{:d}-{:d}".format(start, end) + bytes_range = "{:d}-{:d}".format(start, end) - headers[_helpers.RANGE_HEADER] = u"bytes=" + bytes_range + headers[_helpers.RANGE_HEADER] = "bytes=" + bytes_range def get_range_info(response, get_headers, callback=_helpers.do_nothing): @@ -517,15 +516,15 @@ def get_range_info(response, get_headers, callback=_helpers.do_nothing): callback() raise common.InvalidResponse( response, - u"Unexpected content-range header", + "Unexpected content-range header", content_range, - u'Expected to be of the form "bytes {start}-{end}/{total}"', + 'Expected to be of the form "bytes {start}-{end}/{total}"', ) return ( - int(match.group(u"start_byte")), - int(match.group(u"end_byte")), - int(match.group(u"total_bytes")), + int(match.group("start_byte")), + int(match.group("end_byte")), + int(match.group("total_bytes")), ) @@ -544,7 +543,7 @@ def _check_for_zero_content_range(response, get_status_code, get_headers): Returns: bool: True if content range total bytes is zero, false otherwise. """ - if get_status_code(response) == http_client.REQUESTED_RANGE_NOT_SATISFIABLE: + if get_status_code(response) == http.client.REQUESTED_RANGE_NOT_SATISFIABLE: content_range = _helpers.header_required( response, _helpers.CONTENT_RANGE_HEADER, diff --git a/google/resumable_media/_helpers.py b/google/resumable_media/_helpers.py index 62fed35b..d7f877a8 100644 --- a/google/resumable_media/_helpers.py +++ b/google/resumable_media/_helpers.py @@ -26,16 +26,16 @@ from google.resumable_media import common -RANGE_HEADER = u"range" -CONTENT_RANGE_HEADER = u"content-range" +RANGE_HEADER = "range" +CONTENT_RANGE_HEADER = "content-range" _SLOW_CRC32C_WARNING = ( "Currently using crcmod in pure python form. This is a slow " "implementation. Python 3 has a faster implementation, `google-crc32c`, " "which will be used if it is installed." ) -_HASH_HEADER = u"x-goog-hash" -_MISSING_CHECKSUM = u"""\ +_HASH_HEADER = "x-goog-hash" +_MISSING_CHECKSUM = """\ No {checksum_type} checksum was returned from the service while downloading {} (which happens for composite objects), so client-side content integrity checking is not being performed.""" @@ -69,7 +69,7 @@ def header_required(response, name, get_headers, callback=do_nothing): if name not in headers: callback() raise common.InvalidResponse( - response, u"Response headers must contain header", name + response, "Response headers must contain header", name ) return headers[name] @@ -98,9 +98,9 @@ def require_status_code(response, status_codes, get_status_code, callback=do_not callback() raise common.InvalidResponse( response, - u"Request failed with status code", + "Request failed with status code", status_code, - u"Expected one of", + "Expected one of", *status_codes ) return status_code @@ -253,7 +253,7 @@ def prepare_checksum_digest(digest_bytestring): """ encoded_digest = base64.b64encode(digest_bytestring) # NOTE: ``b64encode`` returns ``bytes``, but HTTP headers expect ``str``. - return encoded_digest.decode(u"utf-8") + return encoded_digest.decode("utf-8") def _get_expected_checksum(response, get_headers, media_url, checksum_type): @@ -332,8 +332,8 @@ def _parse_checksum_header(header_value, response, checksum_label): return None matches = [] - for checksum in header_value.split(u","): - name, value = checksum.split(u"=", 1) + for checksum in header_value.split(","): + name, value = checksum.split("=", 1) # Official docs say "," is the separator, but real-world responses have encountered ", " if name.lstrip() == checksum_label: matches.append(value) @@ -345,7 +345,7 @@ def _parse_checksum_header(header_value, response, checksum_label): else: raise common.InvalidResponse( response, - u"X-Goog-Hash header had multiple ``{}`` values.".format(checksum_label), + "X-Goog-Hash header had multiple ``{}`` values.".format(checksum_label), header_value, matches, ) diff --git a/google/resumable_media/_upload.py b/google/resumable_media/_upload.py index 3a98464f..4e38bdd8 100644 --- a/google/resumable_media/_upload.py +++ b/google/resumable_media/_upload.py @@ -21,43 +21,40 @@ * resumable uploads (with metadata as well) """ - +import http.client import json import os import random import re import sys -import six -from six.moves import http_client - from google import resumable_media from google.resumable_media import _helpers from google.resumable_media import common -_CONTENT_TYPE_HEADER = u"content-type" -_CONTENT_RANGE_TEMPLATE = u"bytes {:d}-{:d}/{:d}" -_RANGE_UNKNOWN_TEMPLATE = u"bytes {:d}-{:d}/*" -_EMPTY_RANGE_TEMPLATE = u"bytes */{:d}" +_CONTENT_TYPE_HEADER = "content-type" +_CONTENT_RANGE_TEMPLATE = "bytes {:d}-{:d}/{:d}" +_RANGE_UNKNOWN_TEMPLATE = "bytes {:d}-{:d}/*" +_EMPTY_RANGE_TEMPLATE = "bytes */{:d}" _BOUNDARY_WIDTH = len(str(sys.maxsize - 1)) -_BOUNDARY_FORMAT = u"==============={{:0{:d}d}}==".format(_BOUNDARY_WIDTH) +_BOUNDARY_FORMAT = "==============={{:0{:d}d}}==".format(_BOUNDARY_WIDTH) _MULTIPART_SEP = b"--" _CRLF = b"\r\n" _MULTIPART_BEGIN = b"\r\ncontent-type: application/json; charset=UTF-8\r\n\r\n" _RELATED_HEADER = b'multipart/related; boundary="' _BYTES_RANGE_RE = re.compile(r"bytes=0-(?P\d+)", flags=re.IGNORECASE) _STREAM_ERROR_TEMPLATE = ( - u"Bytes stream is in unexpected state. " - u"The local stream has had {:d} bytes read from it while " - u"{:d} bytes have already been updated (they should match)." + "Bytes stream is in unexpected state. " + "The local stream has had {:d} bytes read from it while " + "{:d} bytes have already been updated (they should match)." ) _STREAM_READ_PAST_TEMPLATE = ( - u"{:d} bytes have been read from the stream, which exceeds " - u"the expected total {:d}." + "{:d} bytes have been read from the stream, which exceeds " + "the expected total {:d}." ) -_POST = u"POST" -_PUT = u"PUT" +_POST = "POST" +_PUT = "PUT" _UPLOAD_CHECKSUM_MISMATCH_MESSAGE = ( "The computed ``{}`` checksum, ``{}``, and the checksum reported by the " "remote host, ``{}``, did not match." @@ -113,7 +110,7 @@ def _process_response(self, response): # Tombstone the current upload so it cannot be used again (in either # failure or success). self._finished = True - _helpers.require_status_code(response, (http_client.OK,), self._get_status_code) + _helpers.require_status_code(response, (http.client.OK,), self._get_status_code) @staticmethod def _get_status_code(response): @@ -125,7 +122,7 @@ def _get_status_code(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") @staticmethod def _get_headers(response): @@ -137,7 +134,7 @@ def _get_headers(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") @staticmethod def _get_body(response): @@ -149,7 +146,7 @@ def _get_body(response): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") class SimpleUpload(UploadBase): @@ -198,10 +195,10 @@ def _prepare_request(self, data, content_type): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u"An upload can only be used once.") + raise ValueError("An upload can only be used once.") - if not isinstance(data, six.binary_type): - raise TypeError(u"`data` must be bytes, received", type(data)) + if not isinstance(data, bytes): + raise TypeError("`data` must be bytes, received", type(data)) self._headers[_CONTENT_TYPE_HEADER] = content_type return _POST, self.upload_url, data, self._headers @@ -225,7 +222,7 @@ def transmit(self, transport, data, content_type, timeout=None): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") class MultipartUpload(UploadBase): @@ -286,10 +283,10 @@ def _prepare_request(self, data, metadata, content_type): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u"An upload can only be used once.") + raise ValueError("An upload can only be used once.") - if not isinstance(data, six.binary_type): - raise TypeError(u"`data` must be bytes, received", type(data)) + if not isinstance(data, bytes): + raise TypeError("`data` must be bytes, received", type(data)) checksum_object = _helpers._get_checksum_object(self._checksum_type) if checksum_object: @@ -328,7 +325,7 @@ def transmit(self, transport, data, metadata, content_type, timeout=None): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") class ResumableUpload(UploadBase): @@ -366,7 +363,7 @@ def __init__(self, upload_url, chunk_size, checksum=None, headers=None): super(ResumableUpload, self).__init__(upload_url, headers=headers) if chunk_size % resumable_media.UPLOAD_CHUNK_SIZE != 0: raise ValueError( - u"{} KB must divide chunk size".format( + "{} KB must divide chunk size".format( resumable_media.UPLOAD_CHUNK_SIZE / 1024 ) ) @@ -459,15 +456,15 @@ def _prepare_initiate_request( .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.resumable_url is not None: - raise ValueError(u"This upload has already been initiated.") + raise ValueError("This upload has already been initiated.") if stream.tell() != 0: - raise ValueError(u"Stream must be at beginning.") + raise ValueError("Stream must be at beginning.") self._stream = stream self._content_type = content_type headers = { - _CONTENT_TYPE_HEADER: u"application/json; charset=UTF-8", - u"x-upload-content-type": content_type, + _CONTENT_TYPE_HEADER: "application/json; charset=UTF-8", + "x-upload-content-type": content_type, } # Set the total bytes if possible. if total_bytes is not None: @@ -476,11 +473,11 @@ def _prepare_initiate_request( self._total_bytes = get_total_bytes(stream) # Add the total bytes to the headers if set. if self._total_bytes is not None: - content_length = u"{:d}".format(self._total_bytes) - headers[u"x-upload-content-length"] = content_length + content_length = "{:d}".format(self._total_bytes) + headers["x-upload-content-length"] = content_length headers.update(self._headers) - payload = json.dumps(metadata).encode(u"utf-8") + payload = json.dumps(metadata).encode("utf-8") return _POST, self.upload_url, payload, headers def _process_initiate_response(self, response): @@ -501,12 +498,12 @@ def _process_initiate_response(self, response): """ _helpers.require_status_code( response, - (http_client.OK, http_client.CREATED), + (http.client.OK, http.client.CREATED), self._get_status_code, callback=self._make_invalid, ) self._resumable_url = _helpers.header_required( - response, u"location", self._get_headers + response, "location", self._get_headers ) def initiate( @@ -560,7 +557,7 @@ def initiate( Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") def _prepare_request(self): """Prepare the contents of HTTP request to upload a chunk. @@ -593,15 +590,15 @@ def _prepare_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if self.finished: - raise ValueError(u"Upload has finished.") + raise ValueError("Upload has finished.") if self.invalid: raise ValueError( - u"Upload is in an invalid state. To recover call `recover()`." + "Upload is in an invalid state. To recover call `recover()`." ) if self.resumable_url is None: raise ValueError( - u"This upload has not been initiated. Please call " - u"initiate() before beginning to transmit chunks." + "This upload has not been initiated. Please call " + "initiate() before beginning to transmit chunks." ) start_byte, payload, content_range = get_next_chunk( @@ -673,11 +670,11 @@ def _process_response(self, response, bytes_sent): """ status_code = _helpers.require_status_code( response, - (http_client.OK, resumable_media.PERMANENT_REDIRECT), + (http.client.OK, resumable_media.PERMANENT_REDIRECT), self._get_status_code, callback=self._make_invalid, ) - if status_code == http_client.OK: + if status_code == http.client.OK: # NOTE: We use the "local" information of ``bytes_sent`` to update # ``bytes_uploaded``, but do not verify this against other # state. However, there may be some other information: @@ -703,11 +700,11 @@ def _process_response(self, response, bytes_sent): self._make_invalid() raise common.InvalidResponse( response, - u'Unexpected "range" header', + 'Unexpected "range" header', bytes_range, - u'Expected to be of the form "bytes=0-{end}"', + 'Expected to be of the form "bytes=0-{end}"', ) - self._bytes_uploaded = int(match.group(u"end_byte")) + 1 + self._bytes_uploaded = int(match.group("end_byte")) + 1 def _validate_checksum(self, response): """Check the computed checksum, if any, against the response headers. @@ -764,7 +761,7 @@ def transmit_next_chunk(self, transport, timeout=None): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") def _prepare_recover_request(self): """Prepare the contents of HTTP request to recover from failure. @@ -792,9 +789,9 @@ def _prepare_recover_request(self): .. _sans-I/O: https://sans-io.readthedocs.io/ """ if not self.invalid: - raise ValueError(u"Upload is not in invalid state, no need to recover.") + raise ValueError("Upload is not in invalid state, no need to recover.") - headers = {_helpers.CONTENT_RANGE_HEADER: u"bytes */*"} + headers = {_helpers.CONTENT_RANGE_HEADER: "bytes */*"} return _PUT, self.resumable_url, None, headers def _process_recover_response(self, response): @@ -826,11 +823,11 @@ def _process_recover_response(self, response): if match is None: raise common.InvalidResponse( response, - u'Unexpected "range" header', + 'Unexpected "range" header', bytes_range, - u'Expected to be of the form "bytes=0-{end}"', + 'Expected to be of the form "bytes=0-{end}"', ) - self._bytes_uploaded = int(match.group(u"end_byte")) + 1 + self._bytes_uploaded = int(match.group("end_byte")) + 1 else: # In this case, the upload has not "begun". self._bytes_uploaded = 0 @@ -855,7 +852,7 @@ def recover(self, transport): Raises: NotImplementedError: Always, since virtual. """ - raise NotImplementedError(u"This implementation is virtual.") + raise NotImplementedError("This implementation is virtual.") def get_boundary(): @@ -868,7 +865,7 @@ def get_boundary(): boundary = _BOUNDARY_FORMAT.format(random_int) # NOTE: Neither % formatting nor .format() are available for byte strings # in Python 3.4, so we must use unicode strings as templates. - return boundary.encode(u"utf-8") + return boundary.encode("utf-8") def construct_multipart_request(data, metadata, content_type): @@ -887,8 +884,8 @@ def construct_multipart_request(data, metadata, content_type): between each part. """ multipart_boundary = get_boundary() - json_bytes = json.dumps(metadata).encode(u"utf-8") - content_type = content_type.encode(u"utf-8") + json_bytes = json.dumps(metadata).encode("utf-8") + content_type = content_type.encode("utf-8") # Combine the two parts into a multipart payload. # NOTE: We'd prefer a bytes template but are restricted by Python 3.4. boundary_sep = _MULTIPART_SEP + multipart_boundary @@ -977,12 +974,12 @@ def get_next_chunk(stream, chunk_size, total_bytes): # stream to be at the beginning. if num_bytes_read != 0: raise ValueError( - u"Stream specified as empty, but produced non-empty content." + "Stream specified as empty, but produced non-empty content." ) else: if num_bytes_read == 0: raise ValueError( - u"Stream is already exhausted. There is no content remaining." + "Stream is already exhausted. There is no content remaining." ) content_range = get_content_range(start_byte, end_byte, total_bytes) diff --git a/google/resumable_media/common.py b/google/resumable_media/common.py index 43f70f9c..d2c0be52 100644 --- a/google/resumable_media/common.py +++ b/google/resumable_media/common.py @@ -17,10 +17,10 @@ Includes custom exception types, useful constants and shared helpers. """ -from six.moves import http_client +import http.client _SLEEP_RETRY_ERROR_MSG = ( - u"At most one of `max_cumulative_retry` and `max_retries` " u"can be specified." + "At most one of `max_cumulative_retry` and `max_retries` " "can be specified." ) UPLOAD_CHUNK_SIZE = 262144 # 256 * 1024 @@ -63,10 +63,10 @@ RETRYABLE = ( TOO_MANY_REQUESTS, # 429 - http_client.INTERNAL_SERVER_ERROR, # 500 - http_client.BAD_GATEWAY, # 502 - http_client.SERVICE_UNAVAILABLE, # 503 - http_client.GATEWAY_TIMEOUT, # 504 + http.client.INTERNAL_SERVER_ERROR, # 500 + http.client.BAD_GATEWAY, # 502 + http.client.SERVICE_UNAVAILABLE, # 503 + http.client.GATEWAY_TIMEOUT, # 504 ) """iterable: HTTP status codes that indicate a retryable error. diff --git a/google/resumable_media/requests/__init__.py b/google/resumable_media/requests/__init__.py index 11ac9584..c2245495 100644 --- a/google/resumable_media/requests/__init__.py +++ b/google/resumable_media/requests/__init__.py @@ -36,7 +36,7 @@ def mock_default(scopes=None): credentials = mock.Mock(spec=creds_mod.Credentials) - return credentials, u'mock-project' + return credentials, 'mock-project' # Patch the ``default`` function on the module. original_default = google.auth.default @@ -47,7 +47,7 @@ def mock_default(scopes=None): >>> import google.auth >>> import google.auth.transport.requests as tr_requests >>> - >>> ro_scope = u'https://www.googleapis.com/auth/devstorage.read_only' + >>> ro_scope = 'https://www.googleapis.com/auth/devstorage.read_only' >>> credentials, _ = google.auth.default(scopes=(ro_scope,)) >>> transport = tr_requests.AuthorizedSession(credentials) >>> transport @@ -70,14 +70,14 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client - bucket = u'bucket-foo' - blob_name = u'file.txt' + bucket = 'bucket-foo' + blob_name = 'file.txt' fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) - fake_response.headers[u'Content-Length'] = u'1364156' + fake_response.status_code = int(http.client.OK) + fake_response.headers['Content-Length'] = '1364156' fake_content = mock.MagicMock(spec=['__len__']) fake_content.__len__.return_value = 1364156 fake_response._content = fake_content @@ -90,8 +90,8 @@ def mock_default(scopes=None): >>> from google.resumable_media.requests import Download >>> >>> url_template = ( - ... u'https://www.googleapis.com/download/storage/v1/b/' - ... u'{bucket}/o/{blob_name}?alt=media') + ... 'https://www.googleapis.com/download/storage/v1/b/' + ... '{bucket}/o/{blob_name}?alt=media') >>> media_url = url_template.format( ... bucket=bucket, blob_name=blob_name) >>> @@ -101,7 +101,7 @@ def mock_default(scopes=None): True >>> response - >>> response.headers[u'Content-Length'] + >>> response.headers['Content-Length'] '1364156' >>> len(response.content) 1364156 @@ -113,20 +113,20 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client from google.resumable_media.requests import Download - media_url = u'http://test.invalid' + media_url = 'http://test.invalid' start = 4096 end = 8191 slice_size = end - start + 1 fake_response = requests.Response() - fake_response.status_code = int(http_client.PARTIAL_CONTENT) - fake_response.headers[u'Content-Length'] = u'{:d}'.format(slice_size) - content_range = u'bytes {:d}-{:d}/1364156'.format(start, end) - fake_response.headers[u'Content-Range'] = content_range + fake_response.status_code = int(http.client.PARTIAL_CONTENT) + fake_response.headers['Content-Length'] = '{:d}'.format(slice_size) + content_range = 'bytes {:d}-{:d}/1364156'.format(start, end) + fake_response.headers['Content-Range'] = content_range fake_content = mock.MagicMock(spec=['__len__']) fake_content.__len__.return_value = slice_size fake_response._content = fake_content @@ -142,9 +142,9 @@ def mock_default(scopes=None): True >>> response - >>> response.headers[u'Content-Length'] + >>> response.headers['Content-Length'] '4096' - >>> response.headers[u'Content-Range'] + >>> response.headers['Content-Range'] 'bytes 4096-8191/1364156' >>> len(response.content) 4096 @@ -172,17 +172,17 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client - media_url = u'http://test.invalid' + media_url = 'http://test.invalid' fifty_mb = 50 * 1024 * 1024 one_gb = 1024 * 1024 * 1024 fake_response = requests.Response() - fake_response.status_code = int(http_client.PARTIAL_CONTENT) - fake_response.headers[u'Content-Length'] = u'{:d}'.format(fifty_mb) - content_range = u'bytes 0-{:d}/{:d}'.format(fifty_mb - 1, one_gb) - fake_response.headers[u'Content-Range'] = content_range + fake_response.status_code = int(http.client.PARTIAL_CONTENT) + fake_response.headers['Content-Length'] = '{:d}'.format(fifty_mb) + content_range = 'bytes 0-{:d}/{:d}'.format(fifty_mb - 1, one_gb) + fake_response.headers['Content-Range'] = content_range fake_content_begin = b'The beginning of the chunk...' fake_content = fake_content_begin + b'1' * (fifty_mb - 29) fake_response._content = fake_content @@ -213,9 +213,9 @@ def mock_default(scopes=None): 1073741824 >>> response - >>> response.headers[u'Content-Length'] + >>> response.headers['Content-Length'] '52428800' - >>> response.headers[u'Content-Range'] + >>> response.headers['Content-Range'] 'bytes 0-52428799/1073741824' >>> len(response.content) == chunk_size True @@ -232,11 +232,11 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client from google.resumable_media.requests import ChunkedDownload - media_url = u'http://test.invalid' + media_url = 'http://test.invalid' fifty_mb = 50 * 1024 * 1024 one_gb = 1024 * 1024 * 1024 @@ -246,12 +246,12 @@ def mock_default(scopes=None): download._total_bytes = one_gb fake_response = requests.Response() - fake_response.status_code = int(http_client.PARTIAL_CONTENT) + fake_response.status_code = int(http.client.PARTIAL_CONTENT) slice_size = one_gb - 20 * fifty_mb - fake_response.headers[u'Content-Length'] = u'{:d}'.format(slice_size) - content_range = u'bytes {:d}-{:d}/{:d}'.format( + fake_response.headers['Content-Length'] = '{:d}'.format(slice_size) + content_range = 'bytes {:d}-{:d}/{:d}'.format( 20 * fifty_mb, one_gb - 1, one_gb) - fake_response.headers[u'Content-Range'] = content_range + fake_response.headers['Content-Range'] = content_range fake_content = mock.MagicMock(spec=['__len__']) fake_content.__len__.return_value = slice_size fake_response._content = fake_content @@ -276,9 +276,9 @@ def mock_default(scopes=None): True >>> response - >>> response.headers[u'Content-Length'] + >>> response.headers['Content-Length'] '25165824' - >>> response.headers[u'Content-Range'] + >>> response.headers['Content-Range'] 'bytes 1048576000-1073741823/1073741824' >>> len(response.content) < download.chunk_size True @@ -301,21 +301,21 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client - bucket = u'some-bucket' - blob_name = u'file.txt' + bucket = 'some-bucket' + blob_name = 'file.txt' fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) payload = { - u'bucket': bucket, - u'contentType': u'text/plain', - u'md5Hash': u'M0XLEsX9/sMdiI+4pB4CAQ==', - u'name': blob_name, - u'size': u'27', + 'bucket': bucket, + 'contentType': 'text/plain', + 'md5Hash': 'M0XLEsX9/sMdiI+4pB4CAQ==', + 'name': blob_name, + 'size': '27', } - fake_response._content = json.dumps(payload).encode(u'utf-8') + fake_response._content = json.dumps(payload).encode('utf-8') post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) @@ -326,30 +326,30 @@ def mock_default(scopes=None): >>> from google.resumable_media.requests import SimpleUpload >>> >>> url_template = ( - ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' - ... u'uploadType=media&' - ... u'name={blob_name}') + ... 'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' + ... 'uploadType=media&' + ... 'name={blob_name}') >>> upload_url = url_template.format( ... bucket=bucket, blob_name=blob_name) >>> >>> upload = SimpleUpload(upload_url) >>> data = b'Some not too large content.' - >>> content_type = u'text/plain' + >>> content_type = 'text/plain' >>> response = upload.transmit(transport, data, content_type) >>> upload.finished True >>> response >>> json_response = response.json() - >>> json_response[u'bucket'] == bucket + >>> json_response['bucket'] == bucket True - >>> json_response[u'name'] == blob_name + >>> json_response['name'] == blob_name True - >>> json_response[u'contentType'] == content_type + >>> json_response['contentType'] == content_type True - >>> json_response[u'md5Hash'] + >>> json_response['md5Hash'] 'M0XLEsX9/sMdiI+4pB4CAQ==' - >>> int(json_response[u'size']) == len(data) + >>> int(json_response['size']) == len(data) True In the rare case that an upload fails, an :exc:`.InvalidResponse` @@ -361,25 +361,25 @@ def mock_default(scopes=None): import mock import requests - from six.moves import http_client + import http.client from google import resumable_media from google.resumable_media import _helpers from google.resumable_media.requests import SimpleUpload as constructor - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' data = b'Some not too large content.' - content_type = u'text/plain' + content_type = 'text/plain' fake_response = requests.Response() - fake_response.status_code = int(http_client.SERVICE_UNAVAILABLE) + fake_response.status_code = int(http.client.SERVICE_UNAVAILABLE) post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) time_sleep = time.sleep def dont_sleep(seconds): - raise RuntimeError(u'No sleep', seconds) + raise RuntimeError('No sleep', seconds) def SimpleUpload(*args, **kwargs): upload = constructor(*args, **kwargs) @@ -436,21 +436,21 @@ def SimpleUpload(*args, **kwargs): import mock import requests - from six.moves import http_client + import http.client - bucket = u'some-bucket' - blob_name = u'file.txt' + bucket = 'some-bucket' + blob_name = 'file.txt' data = b'Some not too large content.' - content_type = u'text/plain' + content_type = 'text/plain' fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) payload = { - u'bucket': bucket, - u'name': blob_name, - u'metadata': {u'color': u'grurple'}, + 'bucket': bucket, + 'name': blob_name, + 'metadata': {'color': 'grurple'}, } - fake_response._content = json.dumps(payload).encode(u'utf-8') + fake_response._content = json.dumps(payload).encode('utf-8') post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) @@ -460,15 +460,15 @@ def SimpleUpload(*args, **kwargs): >>> from google.resumable_media.requests import MultipartUpload >>> >>> url_template = ( - ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' - ... u'uploadType=multipart') + ... 'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' + ... 'uploadType=multipart') >>> upload_url = url_template.format(bucket=bucket) >>> >>> upload = MultipartUpload(upload_url) >>> metadata = { - ... u'name': blob_name, - ... u'metadata': { - ... u'color': u'grurple', + ... 'name': blob_name, + ... 'metadata': { + ... 'color': 'grurple', ... }, ... } >>> response = upload.transmit(transport, data, metadata, content_type) @@ -477,11 +477,11 @@ def SimpleUpload(*args, **kwargs): >>> response >>> json_response = response.json() - >>> json_response[u'bucket'] == bucket + >>> json_response['bucket'] == bucket True - >>> json_response[u'name'] == blob_name + >>> json_response['name'] == blob_name True - >>> json_response[u'metadata'] == metadata[u'metadata'] + >>> json_response['metadata'] == metadata['metadata'] True As with the simple upload, in the case of failure an :exc:`.InvalidResponse` @@ -522,24 +522,24 @@ def SimpleUpload(*args, **kwargs): import mock import requests - from six.moves import http_client + import http.client - bucket = u'some-bucket' - blob_name = u'file.txt' + bucket = 'some-bucket' + blob_name = 'file.txt' data = b'Some resumable bytes.' - content_type = u'text/plain' + content_type = 'text/plain' fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) fake_response._content = b'' - upload_id = u'ABCdef189XY_super_serious' + upload_id = 'ABCdef189XY_super_serious' resumable_url_template = ( - u'https://www.googleapis.com/upload/storage/v1/b/{bucket}' - u'/o?uploadType=resumable&upload_id={upload_id}') + 'https://www.googleapis.com/upload/storage/v1/b/{bucket}' + '/o?uploadType=resumable&upload_id={upload_id}') resumable_url = resumable_url_template.format( bucket=bucket, upload_id=upload_id) - fake_response.headers[u'location'] = resumable_url - fake_response.headers[u'x-guploader-uploadid'] = upload_id + fake_response.headers['location'] = resumable_url + fake_response.headers['x-guploader-uploadid'] = upload_id post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) @@ -549,8 +549,8 @@ def SimpleUpload(*args, **kwargs): >>> from google.resumable_media.requests import ResumableUpload >>> >>> url_template = ( - ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' - ... u'uploadType=resumable') + ... 'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' + ... 'uploadType=resumable') >>> upload_url = url_template.format(bucket=bucket) >>> >>> chunk_size = 1024 * 1024 # 1MB @@ -559,18 +559,18 @@ def SimpleUpload(*args, **kwargs): >>> # The upload doesn't know how "big" it is until seeing a stream. >>> upload.total_bytes is None True - >>> metadata = {u'name': blob_name} + >>> metadata = {'name': blob_name} >>> response = upload.initiate(transport, stream, metadata, content_type) >>> response - >>> upload.resumable_url == response.headers[u'Location'] + >>> upload.resumable_url == response.headers['Location'] True >>> upload.total_bytes == len(data) True - >>> upload_id = response.headers[u'X-GUploader-UploadID'] + >>> upload_id = response.headers['X-GUploader-UploadID'] >>> upload_id 'ABCdef189XY_super_serious' - >>> upload.resumable_url == upload_url + u'&upload_id=' + upload_id + >>> upload.resumable_url == upload_url + '&upload_id=' + upload_id True Once a :class:`.ResumableUpload` has been initiated, the resource is @@ -583,7 +583,7 @@ def SimpleUpload(*args, **kwargs): import mock import requests - from six.moves import http_client + import http.client from google import resumable_media import google.resumable_media.requests.upload as upload_mod @@ -591,12 +591,12 @@ def SimpleUpload(*args, **kwargs): data = b'01234567891' stream = io.BytesIO(data) # Create an "already initiated" upload. - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' chunk_size = 256 * 1024 # 256KB upload = upload_mod.ResumableUpload(upload_url, chunk_size) - upload._resumable_url = u'http://test.invalid?upload_id=mocked' + upload._resumable_url = 'http://test.invalid?upload_id=mocked' upload._stream = stream - upload._content_type = u'text/plain' + upload._content_type = 'text/plain' upload._total_bytes = len(data) # After-the-fact update the chunk size so that len(data) @@ -605,22 +605,22 @@ def SimpleUpload(*args, **kwargs): # Make three fake responses. fake_response0 = requests.Response() fake_response0.status_code = resumable_media.PERMANENT_REDIRECT - fake_response0.headers[u'range'] = u'bytes=0-3' + fake_response0.headers['range'] = 'bytes=0-3' fake_response1 = requests.Response() fake_response1.status_code = resumable_media.PERMANENT_REDIRECT - fake_response1.headers[u'range'] = u'bytes=0-7' + fake_response1.headers['range'] = 'bytes=0-7' fake_response2 = requests.Response() - fake_response2.status_code = int(http_client.OK) - bucket = u'some-bucket' - blob_name = u'file.txt' + fake_response2.status_code = int(http.client.OK) + bucket = 'some-bucket' + blob_name = 'file.txt' payload = { - u'bucket': bucket, - u'name': blob_name, - u'size': u'{:d}'.format(len(data)), + 'bucket': bucket, + 'name': blob_name, + 'size': '{:d}'.format(len(data)), } - fake_response2._content = json.dumps(payload).encode(u'utf-8') + fake_response2._content = json.dumps(payload).encode('utf-8') # Use the fake responses to mock a transport. responses = [fake_response0, fake_response1, fake_response2] @@ -653,9 +653,9 @@ def SimpleUpload(*args, **kwargs): >>> upload.bytes_uploaded == upload.total_bytes True >>> json_response = response2.json() - >>> json_response[u'bucket'] == bucket + >>> json_response['bucket'] == bucket True - >>> json_response[u'name'] == blob_name + >>> json_response['name'] == blob_name True """ from google.resumable_media.requests.download import ChunkedDownload @@ -668,11 +668,11 @@ def SimpleUpload(*args, **kwargs): __all__ = [ - u"ChunkedDownload", - u"Download", - u"MultipartUpload", - u"RawChunkedDownload", - u"RawDownload", - u"ResumableUpload", - u"SimpleUpload", + "ChunkedDownload", + "Download", + "MultipartUpload", + "RawChunkedDownload", + "RawDownload", + "ResumableUpload", + "SimpleUpload", ] diff --git a/google/resumable_media/requests/download.py b/google/resumable_media/requests/download.py index d44fb93a..22195a6b 100644 --- a/google/resumable_media/requests/download.py +++ b/google/resumable_media/requests/download.py @@ -22,7 +22,7 @@ from google.resumable_media.requests import _request_helpers -_CHECKSUM_MISMATCH = u"""\ +_CHECKSUM_MISMATCH = """\ Checksum mismatch while downloading: {} @@ -155,13 +155,13 @@ def consume( method, url, payload, headers = self._prepare_request() # NOTE: We assume "payload is None" but pass it along anyway. request_kwargs = { - u"data": payload, - u"headers": headers, - u"retry_strategy": self._retry_strategy, - u"timeout": timeout, + "data": payload, + "headers": headers, + "retry_strategy": self._retry_strategy, + "timeout": timeout, } if self._stream is not None: - request_kwargs[u"stream"] = True + request_kwargs["stream"] = True result = _request_helpers.http_request(transport, method, url, **request_kwargs) @@ -465,8 +465,8 @@ def _add_decoder(response_raw, checksum): patched, or a ``_DoNothingHash`` if the decoder is patched, since the caller will no longer need to hash to decoded bytes. """ - encoding = response_raw.headers.get(u"content-encoding", u"").lower() - if encoding != u"gzip": + encoding = response_raw.headers.get("content-encoding", "").lower() + if encoding != "gzip": return checksum response_raw._decoder = _GzipDecoder(checksum) diff --git a/google/resumable_media/requests/upload.py b/google/resumable_media/requests/upload.py index f440d680..f3278910 100644 --- a/google/resumable_media/requests/upload.py +++ b/google/resumable_media/requests/upload.py @@ -163,15 +163,15 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): .. testsetup:: resumable-constructor - bucket = u'bucket-foo' + bucket = 'bucket-foo' .. doctest:: resumable-constructor >>> from google.resumable_media.requests import ResumableUpload >>> >>> url_template = ( - ... u'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' - ... u'uploadType=resumable') + ... 'https://www.googleapis.com/upload/storage/v1/b/{bucket}/o?' + ... 'uploadType=resumable') >>> upload_url = url_template.format(bucket=bucket) >>> >>> chunk_size = 3 * 1024 * 1024 # 3MB @@ -188,11 +188,11 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): import mock import requests - from six.moves import http_client + import http.client from google.resumable_media.requests import ResumableUpload - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' chunk_size = 3 * 1024 * 1024 # 3MB upload = ResumableUpload(upload_url, chunk_size) @@ -200,14 +200,14 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): os.close(file_desc) data = b'some bytes!' - with open(filename, u'wb') as file_obj: + with open(filename, 'wb') as file_obj: file_obj.write(data) fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) fake_response._content = b'' - resumable_url = u'http://test.invalid?upload_id=7up' - fake_response.headers[u'location'] = resumable_url + resumable_url = 'http://test.invalid?upload_id=7up' + fake_response.headers['location'] = resumable_url post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) @@ -219,11 +219,11 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): >>> upload.total_bytes is None True >>> - >>> stream = open(filename, u'rb') + >>> stream = open(filename, 'rb') >>> total_bytes = os.path.getsize(filename) - >>> metadata = {u'name': filename} + >>> metadata = {'name': filename} >>> response = upload.initiate( - ... transport, stream, metadata, u'text/plain', + ... transport, stream, metadata, 'text/plain', ... total_bytes=total_bytes) >>> response @@ -245,26 +245,26 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): import mock import requests - from six.moves import http_client + import http.client from google.resumable_media.requests import ResumableUpload - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' chunk_size = 3 * 1024 * 1024 # 3MB upload = ResumableUpload(upload_url, chunk_size) fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) fake_response._content = b'' - resumable_url = u'http://test.invalid?upload_id=7up' - fake_response.headers[u'location'] = resumable_url + resumable_url = 'http://test.invalid?upload_id=7up' + fake_response.headers['location'] = resumable_url post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) data = b'some MOAR bytes!' - metadata = {u'name': u'some-file.jpg'} - content_type = u'image/jpeg' + metadata = {'name': 'some-file.jpg'} + content_type = 'image/jpeg' .. doctest:: resumable-implicit-size @@ -286,25 +286,25 @@ class ResumableUpload(_request_helpers.RequestsMixin, _upload.ResumableUpload): import mock import requests - from six.moves import http_client + import http.client from google.resumable_media.requests import ResumableUpload - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' chunk_size = 3 * 1024 * 1024 # 3MB upload = ResumableUpload(upload_url, chunk_size) fake_response = requests.Response() - fake_response.status_code = int(http_client.OK) + fake_response.status_code = int(http.client.OK) fake_response._content = b'' - resumable_url = u'http://test.invalid?upload_id=7up' - fake_response.headers[u'location'] = resumable_url + resumable_url = 'http://test.invalid?upload_id=7up' + fake_response.headers['location'] = resumable_url post_method = mock.Mock(return_value=fake_response, spec=[]) transport = mock.Mock(request=post_method, spec=['request']) - metadata = {u'name': u'some-file.jpg'} - content_type = u'application/octet-stream' + metadata = {'name': 'some-file.jpg'} + content_type = 'application/octet-stream' stream = io.BytesIO(b'data') @@ -437,24 +437,24 @@ def transmit_next_chunk( import mock import requests - from six.moves import http_client + import http.client from google import resumable_media import google.resumable_media.requests.upload as upload_mod transport = mock.Mock(spec=['request']) fake_response = requests.Response() - fake_response.status_code = int(http_client.BAD_REQUEST) + fake_response.status_code = int(http.client.BAD_REQUEST) transport.request.return_value = fake_response - upload_url = u'http://test.invalid' + upload_url = 'http://test.invalid' upload = upload_mod.ResumableUpload( upload_url, resumable_media.UPLOAD_CHUNK_SIZE) # Fake that the upload has been initiate()-d data = b'data is here' upload._stream = io.BytesIO(data) upload._total_bytes = len(data) - upload._resumable_url = u'http://test.invalid?upload_id=nope' + upload._resumable_url = 'http://test.invalid?upload_id=nope' .. doctest:: bad-response :options: +NORMALIZE_WHITESPACE diff --git a/noxfile.py b/noxfile.py index 19695a98..62d3654c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -26,9 +26,8 @@ GOOGLE_AUTH = "google-auth >= 1.22.0, < 2.0dev" DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] -UNIT_TEST_SYNC_PYTHON_VERSIONS = ["2.7"] @nox.session(python=UNIT_TEST_PYTHON_VERSIONS) @@ -63,35 +62,6 @@ def unit(session): ) -@nox.session(python=UNIT_TEST_SYNC_PYTHON_VERSIONS) -def unit_2(session): - """Run the unit test suite.""" - - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - - # Install all test dependencies, then install this package in-place. - session.install("mock", "pytest", "pytest-cov") - session.install("-e", ".[requests]", "-c", constraints_path) - - # Run py.test against the unit tests. - # NOTE: We don't require 100% line coverage for unit test runs since - # some have branches that are Py2/Py3 specific. - line_coverage = "--cov-fail-under=0" - session.run( - "py.test", - "--cov=google.resumable_media", - "--cov=tests.unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - line_coverage, - os.path.join("tests", "unit"), - *session.posargs - ) - - @nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" diff --git a/setup.py b/setup.py index 9594fde3..228071be 100644 --- a/setup.py +++ b/setup.py @@ -24,21 +24,18 @@ REQUIREMENTS = [ - 'six>=1.4.0', - 'google-crc32c >= 1.0, < 2.0dev; python_version>="3.5"', - 'crcmod >= 1.7; python_version=="2.7"', - + 'google-crc32c >= 1.0, < 2.0dev', ] EXTRAS_REQUIRE = { 'requests': [ 'requests >= 2.18.0, < 3.0.0dev', ], - 'aiohttp': 'aiohttp >= 3.6.2, < 4.0.0dev; python_version>="3.6"' + 'aiohttp': 'aiohttp >= 3.6.2, < 4.0.0dev' } setuptools.setup( name='google-resumable-media', - version = "1.3.3", + version = "2.0.0b1", description='Utilities for Google Media Downloads and Resumable Uploads', author='Google Cloud Platform', author_email='googleapis-publisher@google.com', @@ -53,14 +50,12 @@ zip_safe=False, install_requires=REQUIREMENTS, extras_require=EXTRAS_REQUIRE, - python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*', + python_requires='>= 3.6', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index efea9da8..07ed2ed2 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,8 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -six==1.4.0 crcmod==1.7 google-crc32c==1.0 aiohttp==3.6.2 -requests==2.18.0 \ No newline at end of file +requests==2.18.0 diff --git a/tests/system/requests/conftest.py b/tests/system/requests/conftest.py index d0a3c87f..e22229a5 100644 --- a/tests/system/requests/conftest.py +++ b/tests/system/requests/conftest.py @@ -43,13 +43,13 @@ def cleanup_bucket(transport): raise ValueError("{}: {}".format(del_response.status_code, del_response.reason)) -@pytest.fixture(scope=u"session") +@pytest.fixture(scope="session") def authorized_transport(): credentials, _ = google.auth.default(scopes=(utils.GCS_RW_SCOPE,)) yield tr_requests.AuthorizedSession(credentials) -@pytest.fixture(scope=u"session") +@pytest.fixture(scope="session") def bucket(authorized_transport): ensure_bucket(authorized_transport) diff --git a/tests/system/requests/test_download.py b/tests/system/requests/test_download.py index 2345d073..15d3cbfd 100644 --- a/tests/system/requests/test_download.py +++ b/tests/system/requests/test_download.py @@ -15,13 +15,13 @@ import base64 import copy import hashlib +import http.client import io import os import google.auth import google.auth.transport.requests as tr_requests import pytest -from six.moves import http_client from google.resumable_media import common import google.resumable_media.requests as resumable_requests @@ -32,11 +32,11 @@ CURR_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(CURR_DIR, u"..", u"..", u"data") -PLAIN_TEXT = u"text/plain" -IMAGE_JPEG = u"image/jpeg" +DATA_DIR = os.path.join(CURR_DIR, "..", "..", "data") +PLAIN_TEXT = "text/plain" +IMAGE_JPEG = "image/jpeg" ENCRYPTED_ERR = b"The target object is encrypted by a customer-supplied encryption key." -NO_BODY_ERR = u"The content for this response was already consumed" +NO_BODY_ERR = "The content for this response was already consumed" NOT_FOUND_ERR = ( b"No such object: " + utils.BUCKET_NAME.encode("utf-8") + b"/does-not-exist.txt" ) @@ -60,17 +60,17 @@ class CorruptingAuthorizedSession(tr_requests.AuthorizedSession): constructor. """ - EMPTY_MD5 = base64.b64encode(hashlib.md5(b"").digest()).decode(u"utf-8") + EMPTY_MD5 = base64.b64encode(hashlib.md5(b"").digest()).decode("utf-8") crc32c = _helpers._get_crc32c_object() crc32c.update(b"") - EMPTY_CRC32C = base64.b64encode(crc32c.digest()).decode(u"utf-8") + EMPTY_CRC32C = base64.b64encode(crc32c.digest()).decode("utf-8") def request(self, method, url, data=None, headers=None, **kwargs): """Implementation of Requests' request.""" response = tr_requests.AuthorizedSession.request( self, method, url, data=data, headers=headers, **kwargs ) - response.headers[_helpers._HASH_HEADER] = u"crc32c={},md5={}".format( + response.headers[_helpers._HASH_HEADER] = "crc32c={},md5={}".format( self.EMPTY_CRC32C, self.EMPTY_MD5 ) return response @@ -82,11 +82,11 @@ def get_path(filename): ALL_FILES = ( { - u"path": get_path(u"image1.jpg"), - u"content_type": IMAGE_JPEG, - u"md5": u"1bsd83IYNug8hd+V1ING3Q==", - u"crc32c": u"YQGPxA==", - u"slices": ( + "path": get_path("image1.jpg"), + "content_type": IMAGE_JPEG, + "md5": "1bsd83IYNug8hd+V1ING3Q==", + "crc32c": "YQGPxA==", + "slices": ( slice(1024, 16386, None), # obj[1024:16386] slice(None, 8192, None), # obj[:8192] slice(-256, None, None), # obj[-256:] @@ -94,11 +94,11 @@ def get_path(filename): ), }, { - u"path": get_path(u"image2.jpg"), - u"content_type": IMAGE_JPEG, - u"md5": u"gdLXJltiYAMP9WZZFEQI1Q==", - u"crc32c": u"sxxEFQ==", - u"slices": ( + "path": get_path("image2.jpg"), + "content_type": IMAGE_JPEG, + "md5": "gdLXJltiYAMP9WZZFEQI1Q==", + "crc32c": "sxxEFQ==", + "slices": ( slice(1024, 16386, None), # obj[1024:16386] slice(None, 8192, None), # obj[:8192] slice(-256, None, None), # obj[-256:] @@ -106,62 +106,62 @@ def get_path(filename): ), }, { - u"path": get_path(u"file.txt"), - u"content_type": PLAIN_TEXT, - u"md5": u"XHSHAr/SpIeZtZbjgQ4nGw==", - u"crc32c": u"MeMHoQ==", - u"slices": (), + "path": get_path("file.txt"), + "content_type": PLAIN_TEXT, + "md5": "XHSHAr/SpIeZtZbjgQ4nGw==", + "crc32c": "MeMHoQ==", + "slices": (), }, { - u"path": get_path(u"gzipped.txt.gz"), - u"uncompressed": get_path(u"gzipped.txt"), - u"content_type": PLAIN_TEXT, - u"md5": u"KHRs/+ZSrc/FuuR4qz/PZQ==", - u"crc32c": u"/LIRNg==", - u"slices": (), - u"metadata": {u"contentEncoding": u"gzip"}, + "path": get_path("gzipped.txt.gz"), + "uncompressed": get_path("gzipped.txt"), + "content_type": PLAIN_TEXT, + "md5": "KHRs/+ZSrc/FuuR4qz/PZQ==", + "crc32c": "/LIRNg==", + "slices": (), + "metadata": {"contentEncoding": "gzip"}, }, ) def get_contents_for_upload(info): - with open(info[u"path"], u"rb") as file_obj: + with open(info["path"], "rb") as file_obj: return file_obj.read() def get_contents(info): - full_path = info.get(u"uncompressed", info[u"path"]) - with open(full_path, u"rb") as file_obj: + full_path = info.get("uncompressed", info["path"]) + with open(full_path, "rb") as file_obj: return file_obj.read() def get_raw_contents(info): - full_path = info[u"path"] - with open(full_path, u"rb") as file_obj: + full_path = info["path"] + with open(full_path, "rb") as file_obj: return file_obj.read() def get_blob_name(info): - full_path = info.get(u"uncompressed", info[u"path"]) + full_path = info.get("uncompressed", info["path"]) return os.path.basename(full_path) def delete_blob(transport, blob_name): metadata_url = utils.METADATA_URL_TEMPLATE.format(blob_name=blob_name) response = transport.delete(metadata_url) - assert response.status_code == http_client.NO_CONTENT + assert response.status_code == http.client.NO_CONTENT -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") def secret_file(authorized_transport, bucket): - blob_name = u"super-seekrit.txt" + blob_name = "super-seekrit.txt" data = b"Please do not tell anyone my encrypted seekrit." upload_url = utils.SIMPLE_UPLOAD_TEMPLATE.format(blob_name=blob_name) headers = utils.get_encryption_headers() upload = resumable_requests.SimpleUpload(upload_url, headers=headers) response = upload.transmit(authorized_transport, data, PLAIN_TEXT) - assert response.status_code == http_client.OK + assert response.status_code == http.client.OK yield blob_name, data, headers @@ -169,27 +169,27 @@ def secret_file(authorized_transport, bucket): # Transport that returns corrupt data, so we can exercise checksum handling. -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") def corrupting_transport(): credentials, _ = google.auth.default(scopes=(utils.GCS_RW_SCOPE,)) yield CorruptingAuthorizedSession(credentials) -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") def simple_file(authorized_transport, bucket): - blob_name = u"basic-file.txt" + blob_name = "basic-file.txt" upload_url = utils.SIMPLE_UPLOAD_TEMPLATE.format(blob_name=blob_name) upload = resumable_requests.SimpleUpload(upload_url) data = b"Simple contents" response = upload.transmit(authorized_transport, data, PLAIN_TEXT) - assert response.status_code == http_client.OK + assert response.status_code == http.client.OK yield blob_name, data delete_blob(authorized_transport, blob_name) -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") def add_files(authorized_transport, bucket): blob_names = [] for info in ALL_FILES: @@ -197,21 +197,21 @@ def add_files(authorized_transport, bucket): blob_name = get_blob_name(info) blob_names.append(blob_name) - if u"metadata" in info: + if "metadata" in info: upload = resumable_requests.MultipartUpload(utils.MULTIPART_UPLOAD) - metadata = copy.deepcopy(info[u"metadata"]) - metadata[u"name"] = blob_name + metadata = copy.deepcopy(info["metadata"]) + metadata["name"] = blob_name response = upload.transmit( - authorized_transport, to_upload, metadata, info[u"content_type"] + authorized_transport, to_upload, metadata, info["content_type"] ) else: upload_url = utils.SIMPLE_UPLOAD_TEMPLATE.format(blob_name=blob_name) upload = resumable_requests.SimpleUpload(upload_url) response = upload.transmit( - authorized_transport, to_upload, info[u"content_type"] + authorized_transport, to_upload, info["content_type"] ) - assert response.status_code == http_client.OK + assert response.status_code == http.client.OK yield @@ -225,11 +225,11 @@ def check_tombstoned(download, transport): if isinstance(download, SIMPLE_DOWNLOADS): with pytest.raises(ValueError) as exc_info: download.consume(transport) - assert exc_info.match(u"A download can only be used once.") + assert exc_info.match("A download can only be used once.") else: with pytest.raises(ValueError) as exc_info: download.consume_next_chunk(transport) - assert exc_info.match(u"Download has finished.") + assert exc_info.match("Download has finished.") def check_error_response(exc_info, status_code, message): @@ -239,8 +239,8 @@ def check_error_response(exc_info, status_code, message): assert response.content.startswith(message) assert len(error.args) == 5 assert error.args[1] == status_code - assert error.args[3] == http_client.OK - assert error.args[4] == http_client.PARTIAL_CONTENT + assert error.args[3] == http.client.OK + assert error.args[4] == http.client.PARTIAL_CONTENT class TestDownload(object): @@ -270,7 +270,7 @@ def test_download_full(self, add_files, authorized_transport, checksum): download = self._make_one(media_url, checksum=checksum) # Consume the resource. response = download.consume(authorized_transport) - assert response.status_code == http_client.OK + assert response.status_code == http.client.OK assert self._read_response_content(response) == actual_contents check_tombstoned(download, authorized_transport) @@ -285,9 +285,9 @@ def test_download_to_stream(self, add_files, authorized_transport): download = self._make_one(media_url, stream=stream) # Consume the resource. response = download.consume(authorized_transport) - assert response.status_code == http_client.OK + assert response.status_code == http.client.OK with pytest.raises(RuntimeError) as exc_info: - getattr(response, u"content") + getattr(response, "content") assert exc_info.value.args == (NO_BODY_ERR,) assert response._content is False assert response._content_consumed is True @@ -301,7 +301,7 @@ def test_extra_headers(self, authorized_transport, secret_file): download = self._make_one(media_url, headers=headers) # Consume the resource. response = download.consume(authorized_transport) - assert response.status_code == http_client.OK + assert response.status_code == http.client.OK assert response.content == data check_tombstoned(download, authorized_transport) # Attempt to consume the resource **without** the headers. @@ -309,18 +309,18 @@ def test_extra_headers(self, authorized_transport, secret_file): with pytest.raises(common.InvalidResponse) as exc_info: download_wo.consume(authorized_transport) - check_error_response(exc_info, http_client.BAD_REQUEST, ENCRYPTED_ERR) + check_error_response(exc_info, http.client.BAD_REQUEST, ENCRYPTED_ERR) check_tombstoned(download_wo, authorized_transport) def test_non_existent_file(self, authorized_transport, bucket): - blob_name = u"does-not-exist.txt" + blob_name = "does-not-exist.txt" media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) download = self._make_one(media_url) # Try to consume the resource and fail. with pytest.raises(common.InvalidResponse) as exc_info: download.consume(authorized_transport) - check_error_response(exc_info, http_client.NOT_FOUND, NOT_FOUND_ERR) + check_error_response(exc_info, http.client.NOT_FOUND, NOT_FOUND_ERR) check_tombstoned(download, authorized_transport) def test_bad_range(self, simple_file, authorized_transport): @@ -339,7 +339,7 @@ def test_bad_range(self, simple_file, authorized_transport): check_error_response( exc_info, - http_client.REQUESTED_RANGE_NOT_SATISFIABLE, + http.client.REQUESTED_RANGE_NOT_SATISFIABLE, b"Request range not satisfiable", ) check_tombstoned(download, authorized_transport) @@ -359,10 +359,10 @@ def test_download_partial(self, add_files, authorized_transport): blob_name = get_blob_name(info) media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) - for slice_ in info[u"slices"]: + for slice_ in info["slices"]: download = self._download_slice(media_url, slice_) response = download.consume(authorized_transport) - assert response.status_code == http_client.PARTIAL_CONTENT + assert response.status_code == http.client.PARTIAL_CONTENT assert response.content == actual_contents[slice_] with pytest.raises(ValueError): download.consume(authorized_transport) @@ -454,7 +454,7 @@ def consume_chunks(download, authorized_transport, total_bytes, actual_contents) next_byte = min(start_byte + download.chunk_size, end_byte + 1) assert download.bytes_downloaded == next_byte - download.start assert download.total_bytes == total_bytes - assert response.status_code == http_client.PARTIAL_CONTENT + assert response.status_code == http.client.PARTIAL_CONTENT assert response.content == actual_contents[start_byte:next_byte] start_byte = next_byte @@ -479,7 +479,7 @@ def test_chunked_download_partial(self, add_files, authorized_transport): blob_name = get_blob_name(info) media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) - for slice_ in info[u"slices"]: + for slice_ in info["slices"]: # Manually replace a missing start with 0. start = 0 if slice_.start is None else slice_.start # Chunked downloads don't support a negative index. @@ -546,7 +546,7 @@ def test_chunked_with_extra_headers(self, authorized_transport, secret_file): download_wo.consume_next_chunk(authorized_transport) assert stream_wo.tell() == 0 - check_error_response(exc_info, http_client.BAD_REQUEST, ENCRYPTED_ERR) + check_error_response(exc_info, http.client.BAD_REQUEST, ENCRYPTED_ERR) assert download_wo.invalid diff --git a/tests/system/requests/test_upload.py b/tests/system/requests/test_upload.py index 136d6c44..81be6d19 100644 --- a/tests/system/requests/test_upload.py +++ b/tests/system/requests/test_upload.py @@ -14,13 +14,13 @@ import base64 import hashlib +import http.client import io import os +import urllib.parse import pytest import mock -from six.moves import http_client -from six.moves import urllib_parse from google.resumable_media import common from google import resumable_media @@ -31,12 +31,12 @@ CURR_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(CURR_DIR, u"..", u"..", u"data") -ICO_FILE = os.path.realpath(os.path.join(DATA_DIR, u"favicon.ico")) -IMAGE_FILE = os.path.realpath(os.path.join(DATA_DIR, u"image1.jpg")) -ICO_CONTENT_TYPE = u"image/x-icon" -JPEG_CONTENT_TYPE = u"image/jpeg" -BYTES_CONTENT_TYPE = u"application/octet-stream" +DATA_DIR = os.path.join(CURR_DIR, "..", "..", "data") +ICO_FILE = os.path.realpath(os.path.join(DATA_DIR, "favicon.ico")) +IMAGE_FILE = os.path.realpath(os.path.join(DATA_DIR, "image1.jpg")) +ICO_CONTENT_TYPE = "image/x-icon" +JPEG_CONTENT_TYPE = "image/jpeg" +BYTES_CONTENT_TYPE = "application/octet-stream" BAD_CHUNK_SIZE_MSG = ( b"Invalid request. The number of bytes uploaded is required to be equal " b"or greater than 262144, except for the final request (it's recommended " @@ -57,7 +57,7 @@ def add_cleanup(blob_name, transport): for blob_name, transport in to_delete: metadata_url = utils.METADATA_URL_TEMPLATE.format(blob_name=blob_name) response = utils.retry_transient_errors(transport.delete)(metadata_url) - assert response.status_code == http_client.NO_CONTENT + assert response.status_code == http.client.NO_CONTENT @pytest.fixture @@ -67,7 +67,7 @@ def img_stream(): This is so that an entire test can execute in the context of the context manager without worrying about closing the file. """ - with open(IMAGE_FILE, u"rb") as file_obj: + with open(IMAGE_FILE, "rb") as file_obj: yield file_obj @@ -77,10 +77,10 @@ def get_md5(data): def get_upload_id(upload_url): - parse_result = urllib_parse.urlparse(upload_url) - parsed_query = urllib_parse.parse_qs(parse_result.query) + parse_result = urllib.parse.urlparse(upload_url) + parsed_query = urllib.parse.parse_qs(parse_result.query) # NOTE: We are unpacking here, so asserting exactly one match. - (upload_id,) = parsed_query[u"upload_id"] + (upload_id,) = parsed_query["upload_id"] return upload_id @@ -99,29 +99,29 @@ def check_response( metadata=None, content_type=ICO_CONTENT_TYPE, ): - assert response.status_code == http_client.OK + assert response.status_code == http.client.OK json_response = response.json() - assert json_response[u"bucket"] == utils.BUCKET_NAME - assert json_response[u"contentType"] == content_type + assert json_response["bucket"] == utils.BUCKET_NAME + assert json_response["contentType"] == content_type if actual_contents is not None: - md5_hash = json_response[u"md5Hash"].encode(u"ascii") + md5_hash = json_response["md5Hash"].encode("ascii") assert md5_hash == get_md5(actual_contents) total_bytes = len(actual_contents) - assert json_response[u"metageneration"] == u"1" - assert json_response[u"name"] == blob_name - assert json_response[u"size"] == u"{:d}".format(total_bytes) - assert json_response[u"storageClass"] == u"STANDARD" + assert json_response["metageneration"] == "1" + assert json_response["name"] == blob_name + assert json_response["size"] == "{:d}".format(total_bytes) + assert json_response["storageClass"] == "STANDARD" if metadata is None: - assert u"metadata" not in json_response + assert "metadata" not in json_response else: - assert json_response[u"metadata"] == metadata + assert json_response["metadata"] == metadata def check_content(blob_name, expected_content, transport, headers=None): media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) download = resumable_requests.Download(media_url, headers=headers) response = download.consume(transport) - assert response.status_code == http_client.OK + assert response.status_code == http.client.OK assert response.content == expected_content @@ -140,20 +140,20 @@ def check_does_not_exist(transport, blob_name): metadata_url = utils.METADATA_URL_TEMPLATE.format(blob_name=blob_name) # Make sure we are creating a **new** object. response = transport.get(metadata_url) - assert response.status_code == http_client.NOT_FOUND + assert response.status_code == http.client.NOT_FOUND def check_initiate(response, upload, stream, transport, metadata): - assert response.status_code == http_client.OK + assert response.status_code == http.client.OK assert response.content == b"" upload_id = get_upload_id(upload.resumable_url) - assert response.headers[u"x-guploader-uploadid"] == upload_id + assert response.headers["x-guploader-uploadid"] == upload_id assert stream.tell() == 0 # Make sure the upload cannot be re-initiated. with pytest.raises(ValueError) as exc_info: upload.initiate(transport, stream, metadata, JPEG_CONTENT_TYPE) - exc_info.match(u"This upload has already been initiated.") + exc_info.match("This upload has already been initiated.") def check_bad_chunk(upload, transport): @@ -161,7 +161,7 @@ def check_bad_chunk(upload, transport): upload.transmit_next_chunk(transport) error = exc_info.value response = error.response - assert response.status_code == http_client.BAD_REQUEST + assert response.status_code == http.client.BAD_REQUEST assert response.content == BAD_CHUNK_SIZE_MSG @@ -188,7 +188,7 @@ def transmit_chunks( def test_simple_upload(authorized_transport, bucket, cleanup): - with open(ICO_FILE, u"rb") as file_obj: + with open(ICO_FILE, "rb") as file_obj: actual_contents = file_obj.read() blob_name = os.path.basename(ICO_FILE) @@ -209,7 +209,7 @@ def test_simple_upload(authorized_transport, bucket, cleanup): def test_simple_upload_with_headers(authorized_transport, bucket, cleanup): - blob_name = u"some-stuff.bin" + blob_name = "some-stuff.bin" # Make sure to clean up the uploaded blob when we are done. cleanup(blob_name, authorized_transport) check_does_not_exist(authorized_transport, blob_name) @@ -232,7 +232,7 @@ def test_simple_upload_with_headers(authorized_transport, bucket, cleanup): @pytest.mark.parametrize("checksum", ["md5", "crc32c", None]) def test_multipart_upload(authorized_transport, bucket, cleanup, checksum): - with open(ICO_FILE, u"rb") as file_obj: + with open(ICO_FILE, "rb") as file_obj: actual_contents = file_obj.read() blob_name = os.path.basename(ICO_FILE) @@ -244,7 +244,7 @@ def test_multipart_upload(authorized_transport, bucket, cleanup, checksum): upload_url = utils.MULTIPART_UPLOAD upload = resumable_requests.MultipartUpload(upload_url, checksum=checksum) # Transmit the resource. - metadata = {u"name": blob_name, u"metadata": {u"color": u"yellow"}} + metadata = {"name": blob_name, "metadata": {"color": "yellow"}} response = upload.transmit( authorized_transport, actual_contents, metadata, ICO_CONTENT_TYPE ) @@ -252,7 +252,7 @@ def test_multipart_upload(authorized_transport, bucket, cleanup, checksum): response, blob_name, actual_contents=actual_contents, - metadata=metadata[u"metadata"], + metadata=metadata["metadata"], ) # Download the content to make sure it's "working as expected". check_content(blob_name, actual_contents, authorized_transport) @@ -262,9 +262,9 @@ def test_multipart_upload(authorized_transport, bucket, cleanup, checksum): ) -@pytest.mark.parametrize("checksum", [u"md5", u"crc32c"]) +@pytest.mark.parametrize("checksum", ["md5", "crc32c"]) def test_multipart_upload_with_bad_checksum(authorized_transport, checksum, bucket): - with open(ICO_FILE, u"rb") as file_obj: + with open(ICO_FILE, "rb") as file_obj: actual_contents = file_obj.read() blob_name = os.path.basename(ICO_FILE) @@ -274,7 +274,7 @@ def test_multipart_upload_with_bad_checksum(authorized_transport, checksum, buck upload_url = utils.MULTIPART_UPLOAD upload = resumable_requests.MultipartUpload(upload_url, checksum=checksum) # Transmit the resource. - metadata = {u"name": blob_name, u"metadata": {u"color": u"yellow"}} + metadata = {"name": blob_name, "metadata": {"color": "yellow"}} fake_checksum_object = _helpers._get_checksum_object(checksum) fake_checksum_object.update(b"bad data") fake_prepared_checksum_digest = _helpers.prepare_checksum_digest( @@ -300,7 +300,7 @@ def test_multipart_upload_with_bad_checksum(authorized_transport, checksum, buck def test_multipart_upload_with_headers(authorized_transport, bucket, cleanup): - blob_name = u"some-multipart-stuff.bin" + blob_name = "some-multipart-stuff.bin" # Make sure to clean up the uploaded blob when we are done. cleanup(blob_name, authorized_transport) check_does_not_exist(authorized_transport, blob_name) @@ -310,7 +310,7 @@ def test_multipart_upload_with_headers(authorized_transport, bucket, cleanup): headers = utils.get_encryption_headers() upload = resumable_requests.MultipartUpload(upload_url, headers=headers) # Transmit the resource. - metadata = {u"name": blob_name} + metadata = {"name": blob_name} data = b"Other binary contents\x03\x04\x05." response = upload.transmit(authorized_transport, data, metadata, BYTES_CONTENT_TYPE) check_response( @@ -335,7 +335,7 @@ def _resumable_upload_helper( utils.RESUMABLE_UPLOAD, chunk_size, headers=headers, checksum=checksum ) # Initiate the upload. - metadata = {u"name": blob_name, u"metadata": {u"direction": u"north"}} + metadata = {"name": blob_name, "metadata": {"direction": "north"}} response = upload.initiate( authorized_transport, stream, metadata, JPEG_CONTENT_TYPE ) @@ -343,7 +343,7 @@ def _resumable_upload_helper( check_initiate(response, upload, stream, authorized_transport, metadata) # Actually upload the file in chunks. num_chunks = transmit_chunks( - upload, authorized_transport, blob_name, metadata[u"metadata"] + upload, authorized_transport, blob_name, metadata["metadata"] ) assert num_chunks == get_num_chunks(upload.total_bytes, chunk_size) # Download the content to make sure it's "working as expected". @@ -354,7 +354,7 @@ def _resumable_upload_helper( check_tombstoned(upload, authorized_transport) -@pytest.mark.parametrize("checksum", [u"md5", u"crc32c", None]) +@pytest.mark.parametrize("checksum", ["md5", "crc32c", None]) def test_resumable_upload(authorized_transport, img_stream, bucket, cleanup, checksum): _resumable_upload_helper( authorized_transport, img_stream, cleanup, checksum=checksum @@ -368,7 +368,7 @@ def test_resumable_upload_with_headers( _resumable_upload_helper(authorized_transport, img_stream, cleanup, headers=headers) -@pytest.mark.parametrize("checksum", [u"md5", u"crc32c"]) +@pytest.mark.parametrize("checksum", ["md5", "crc32c"]) def test_resumable_upload_with_bad_checksum( authorized_transport, img_stream, bucket, cleanup, checksum ): @@ -402,7 +402,7 @@ def test_resumable_upload_bad_chunk_size(authorized_transport, img_stream): upload._chunk_size = 1024 assert upload._chunk_size < resumable_media.UPLOAD_CHUNK_SIZE # Initiate the upload. - metadata = {u"name": blob_name} + metadata = {"name": blob_name} response = upload.initiate( authorized_transport, img_stream, metadata, JPEG_CONTENT_TYPE ) @@ -436,7 +436,7 @@ def sabotage_and_recover(upload, stream, transport, chunk_size): def _resumable_upload_recover_helper( authorized_transport, cleanup, headers=None, checksum=None ): - blob_name = u"some-bytes.bin" + blob_name = "some-bytes.bin" chunk_size = resumable_media.UPLOAD_CHUNK_SIZE data = b"123" * chunk_size # 3 chunks worth. # Make sure to clean up the uploaded blob when we are done. @@ -447,7 +447,7 @@ def _resumable_upload_recover_helper( utils.RESUMABLE_UPLOAD, chunk_size, headers=headers, checksum=checksum ) # Initiate the upload. - metadata = {u"name": blob_name} + metadata = {"name": blob_name} stream = io.BytesIO(data) response = upload.initiate( authorized_transport, stream, metadata, BYTES_CONTENT_TYPE @@ -476,7 +476,7 @@ def _resumable_upload_recover_helper( check_tombstoned(upload, authorized_transport) -@pytest.mark.parametrize("checksum", [u"md5", u"crc32c", None]) +@pytest.mark.parametrize("checksum", ["md5", "crc32c", None]) def test_resumable_upload_recover(authorized_transport, bucket, cleanup, checksum): _resumable_upload_recover_helper(authorized_transport, cleanup, checksum=checksum) @@ -491,16 +491,16 @@ class TestResumableUploadUnknownSize(object): def _check_range_sent(response, start, end, total): headers_sent = response.request.headers if start is None and end is None: - expected_content_range = u"bytes */{:d}".format(total) + expected_content_range = "bytes */{:d}".format(total) else: # Allow total to be an int or a string "*" - expected_content_range = u"bytes {:d}-{:d}/{}".format(start, end, total) + expected_content_range = "bytes {:d}-{:d}/{}".format(start, end, total) - assert headers_sent[u"content-range"] == expected_content_range + assert headers_sent["content-range"] == expected_content_range @staticmethod def _check_range_received(response, size): - assert response.headers[u"range"] == u"bytes=0-{:d}".format(size - 1) + assert response.headers["range"] == "bytes=0-{:d}".format(size - 1) def _check_partial(self, upload, response, chunk_size, num_chunks): start_byte = (num_chunks - 1) * chunk_size @@ -511,10 +511,10 @@ def _check_partial(self, upload, response, chunk_size, num_chunks): assert response.status_code == resumable_media.PERMANENT_REDIRECT assert response.content == b"" - self._check_range_sent(response, start_byte, end_byte, u"*") + self._check_range_sent(response, start_byte, end_byte, "*") self._check_range_received(response, end_byte + 1) - @pytest.mark.parametrize("checksum", [u"md5", u"crc32c", None]) + @pytest.mark.parametrize("checksum", ["md5", "crc32c", None]) def test_smaller_than_chunk_size( self, authorized_transport, bucket, cleanup, checksum ): @@ -531,8 +531,8 @@ def test_smaller_than_chunk_size( utils.RESUMABLE_UPLOAD, chunk_size, checksum=checksum ) # Initiate the upload. - metadata = {u"name": blob_name} - with open(ICO_FILE, u"rb") as stream: + metadata = {"name": blob_name} + with open(ICO_FILE, "rb") as stream: response = upload.initiate( authorized_transport, stream, @@ -555,9 +555,9 @@ def test_smaller_than_chunk_size( # Make sure the upload is tombstoned. check_tombstoned(upload, authorized_transport) - @pytest.mark.parametrize("checksum", [u"md5", u"crc32c", None]) + @pytest.mark.parametrize("checksum", ["md5", "crc32c", None]) def test_finish_at_chunk(self, authorized_transport, bucket, cleanup, checksum): - blob_name = u"some-clean-stuff.bin" + blob_name = "some-clean-stuff.bin" chunk_size = resumable_media.UPLOAD_CHUNK_SIZE # Make sure to clean up the uploaded blob when we are done. cleanup(blob_name, authorized_transport) @@ -571,7 +571,7 @@ def test_finish_at_chunk(self, authorized_transport, bucket, cleanup, checksum): utils.RESUMABLE_UPLOAD, chunk_size, checksum=checksum ) # Initiate the upload. - metadata = {u"name": blob_name} + metadata = {"name": blob_name} response = upload.initiate( authorized_transport, stream, @@ -610,9 +610,9 @@ def _add_bytes(stream, data): # Go back to where we were before the write. stream.seek(curr_pos) - @pytest.mark.parametrize("checksum", [u"md5", u"crc32c", None]) + @pytest.mark.parametrize("checksum", ["md5", "crc32c", None]) def test_interleave_writes(self, authorized_transport, bucket, cleanup, checksum): - blob_name = u"some-moar-stuff.bin" + blob_name = "some-moar-stuff.bin" chunk_size = resumable_media.UPLOAD_CHUNK_SIZE # Make sure to clean up the uploaded blob when we are done. cleanup(blob_name, authorized_transport) @@ -624,7 +624,7 @@ def test_interleave_writes(self, authorized_transport, bucket, cleanup, checksum utils.RESUMABLE_UPLOAD, chunk_size, checksum=checksum ) # Initiate the upload. - metadata = {u"name": blob_name} + metadata = {"name": blob_name} response = upload.initiate( authorized_transport, stream, diff --git a/tests/system/utils.py b/tests/system/utils.py index dd97fa29..3884d0c9 100644 --- a/tests/system/utils.py +++ b/tests/system/utils.py @@ -19,26 +19,26 @@ from test_utils.retry import RetryResult -BUCKET_NAME = u"grpm-systest-{}".format(int(1000 * time.time())) -BUCKET_POST_URL = u"https://www.googleapis.com/storage/v1/b/" -BUCKET_URL = u"https://www.googleapis.com/storage/v1/b/{}".format(BUCKET_NAME) +BUCKET_NAME = "grpm-systest-{}".format(int(1000 * time.time())) +BUCKET_POST_URL = "https://www.googleapis.com/storage/v1/b/" +BUCKET_URL = "https://www.googleapis.com/storage/v1/b/{}".format(BUCKET_NAME) -_DOWNLOAD_BASE = u"https://www.googleapis.com/download/storage/v1/b/{}".format( +_DOWNLOAD_BASE = "https://www.googleapis.com/download/storage/v1/b/{}".format( BUCKET_NAME ) -DOWNLOAD_URL_TEMPLATE = _DOWNLOAD_BASE + u"/o/{blob_name}?alt=media" +DOWNLOAD_URL_TEMPLATE = _DOWNLOAD_BASE + "/o/{blob_name}?alt=media" _UPLOAD_BASE = ( - u"https://www.googleapis.com/upload/storage/v1/b/{}".format(BUCKET_NAME) - + u"/o?uploadType=" + "https://www.googleapis.com/upload/storage/v1/b/{}".format(BUCKET_NAME) + + "/o?uploadType=" ) -SIMPLE_UPLOAD_TEMPLATE = _UPLOAD_BASE + u"media&name={blob_name}" -MULTIPART_UPLOAD = _UPLOAD_BASE + u"multipart" -RESUMABLE_UPLOAD = _UPLOAD_BASE + u"resumable" +SIMPLE_UPLOAD_TEMPLATE = _UPLOAD_BASE + "media&name={blob_name}" +MULTIPART_UPLOAD = _UPLOAD_BASE + "multipart" +RESUMABLE_UPLOAD = _UPLOAD_BASE + "resumable" -METADATA_URL_TEMPLATE = BUCKET_URL + u"/o/{blob_name}" +METADATA_URL_TEMPLATE = BUCKET_URL + "/o/{blob_name}" -GCS_RW_SCOPE = u"https://www.googleapis.com/auth/devstorage.read_write" +GCS_RW_SCOPE = "https://www.googleapis.com/auth/devstorage.read_write" # Generated using random.choice() with all 256 byte choices. ENCRYPTION_KEY = ( b"R\xb8\x1b\x94T\xea_\xa8\x93\xae\xd1\xf6\xfca\x15\x0ekA" @@ -79,7 +79,7 @@ def get_encryption_headers(key=ENCRYPTION_KEY): key_b64 = base64.b64encode(key) return { - u"x-goog-encryption-algorithm": u"AES256", - u"x-goog-encryption-key": key_b64.decode(u"utf-8"), - u"x-goog-encryption-key-sha256": key_hash_b64.decode(u"utf-8"), + "x-goog-encryption-algorithm": "AES256", + "x-goog-encryption-key": key_b64.decode("utf-8"), + "x-goog-encryption-key-sha256": key_hash_b64.decode("utf-8"), } diff --git a/tests/unit/requests/test__helpers.py b/tests/unit/requests/test__helpers.py index e1c720a0..37e0e1a9 100644 --- a/tests/unit/requests/test__helpers.py +++ b/tests/unit/requests/test__helpers.py @@ -12,8 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import http.client + import mock -from six.moves import http_client from google.resumable_media.requests import _request_helpers @@ -22,12 +23,12 @@ class TestRequestsMixin(object): def test__get_status_code(self): - status_code = int(http_client.OK) + status_code = int(http.client.OK) response = _make_response(status_code) assert status_code == _request_helpers.RequestsMixin._get_status_code(response) def test__get_headers(self): - headers = {u"fruit": u"apple"} + headers = {"fruit": "apple"} response = mock.Mock(headers=headers, spec=["headers"]) assert headers == _request_helpers.RequestsMixin._get_headers(response) @@ -55,11 +56,11 @@ def test__get_body_w_content_consumed(self): def test_http_request(): - transport, responses = _make_transport(http_client.OK) - method = u"POST" - url = u"http://test.invalid" + transport, responses = _make_transport(http.client.OK) + method = "POST" + url = "http://test.invalid" data = mock.sentinel.data - headers = {u"one": u"fish", u"blue": u"fish"} + headers = {"one": "fish", "blue": "fish"} timeout = mock.sentinel.timeout ret_val = _request_helpers.http_request( transport, @@ -85,9 +86,9 @@ def test_http_request(): def test_http_request_defaults(): - transport, responses = _make_transport(http_client.OK) - method = u"POST" - url = u"http://test.invalid" + transport, responses = _make_transport(http.client.OK) + method = "POST" + url = "http://test.invalid" ret_val = _request_helpers.http_request(transport, method, url) assert ret_val is responses[0] diff --git a/tests/unit/requests/test_download.py b/tests/unit/requests/test_download.py index fd42b330..fd430ef7 100644 --- a/tests/unit/requests/test_download.py +++ b/tests/unit/requests/test_download.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +import http.client import io import mock import pytest -from six.moves import http_client from google.resumable_media import common from google.resumable_media import _helpers @@ -24,10 +24,8 @@ from google.resumable_media.requests import _request_helpers -EXAMPLE_URL = ( - u"https://www.googleapis.com/download/storage/v1/b/" - u"{BUCKET}/o/{OBJECT}?alt=media" -) +URL_PREFIX = "https://www.googleapis.com/download/storage/v1/b/{BUCKET}/o/" +EXAMPLE_URL = URL_PREFIX + "{OBJECT}?alt=media" EXPECTED_TIMEOUT = (61, 60) @@ -60,7 +58,7 @@ def test__write_to_stream_with_hash_check_success(self, checksum): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - header_value = u"crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w==" + header_value = "crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w==" headers = {_helpers._HASH_HEADER: header_value} response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -84,8 +82,8 @@ def test__write_to_stream_with_hash_check_fail(self, checksum): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "d3JvbmcgbiBtYWRlIHVwIQ==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -97,10 +95,10 @@ def test__write_to_stream_with_hash_check_fail(self, checksum): error = exc_info.value assert error.response is response assert len(error.args) == 1 - if checksum == u"md5": - good_checksum = u"fPAJHnnoi/+NadyNxT2c2w==" + if checksum == "md5": + good_checksum = "fPAJHnnoi/+NadyNxT2c2w==" else: - good_checksum = u"qmNCyg==" + good_checksum = "qmNCyg==" msg = download_mod._CHECKSUM_MISMATCH.format( EXAMPLE_URL, bad_checksum, good_checksum, checksum_type=checksum.upper() ) @@ -124,8 +122,8 @@ def test__write_to_stream_with_invalid_checksum_type(self): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "d3JvbmcgbiBtYWRlIHVwIQ==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -165,18 +163,18 @@ def _consume_helper( assert ret_val is transport.request.return_value called_kwargs = { - u"data": None, - u"headers": download._headers, - u"timeout": EXPECTED_TIMEOUT if timeout is None else timeout, + "data": None, + "headers": download._headers, + "timeout": EXPECTED_TIMEOUT if timeout is None else timeout, } if chunks: assert stream is not None - called_kwargs[u"stream"] = True + called_kwargs["stream"] = True - transport.request.assert_called_once_with(u"GET", EXAMPLE_URL, **called_kwargs) + transport.request.assert_called_once_with("GET", EXAMPLE_URL, **called_kwargs) - range_bytes = u"bytes={:d}-{:d}".format(0, end) - assert download._headers[u"range"] == range_bytes + range_bytes = "bytes={:d}-{:d}".format(0, end) + assert download._headers["range"] == range_bytes assert download.finished return transport @@ -209,7 +207,7 @@ def test_consume_with_stream(self, checksum): def test_consume_with_stream_hash_check_success(self, checksum): stream = io.BytesIO() chunks = (b"up down ", b"charlie ", b"brown") - header_value = u"crc32c=UNIQxg==,md5=JvS1wjMvfbCXgEGeaJJLDQ==" + header_value = "crc32c=UNIQxg==,md5=JvS1wjMvfbCXgEGeaJJLDQ==" headers = {_helpers._HASH_HEADER: header_value} transport = self._consume_helper( stream=stream, chunks=chunks, response_headers=headers, checksum=checksum @@ -231,8 +229,8 @@ def test_consume_with_stream_hash_check_fail(self, checksum): download = download_mod.Download(EXAMPLE_URL, stream=stream, checksum=checksum) chunks = (b"zero zero", b"niner tango") - bad_checksum = u"anVzdCBub3QgdGhpcyAxLA==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "anVzdCBub3QgdGhpcyAxLA==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} transport = mock.Mock(spec=["request"]) transport.request.return_value = _mock_response(chunks=chunks, headers=headers) @@ -248,10 +246,10 @@ def test_consume_with_stream_hash_check_fail(self, checksum): error = exc_info.value assert error.response is transport.request.return_value assert len(error.args) == 1 - if checksum == u"md5": - good_checksum = u"1A/dxEpys717C6FH7FIWDw==" + if checksum == "md5": + good_checksum = "1A/dxEpys717C6FH7FIWDw==" else: - good_checksum = u"GvNZlg==" + good_checksum = "GvNZlg==" msg = download_mod._CHECKSUM_MISMATCH.format( EXAMPLE_URL, bad_checksum, good_checksum, checksum_type=checksum.upper() ) @@ -259,7 +257,7 @@ def test_consume_with_stream_hash_check_fail(self, checksum): # Check mocks. transport.request.assert_called_once_with( - u"GET", + "GET", EXAMPLE_URL, data=None, headers={}, @@ -271,9 +269,9 @@ def test_consume_with_headers(self): headers = {} # Empty headers end = 16383 self._consume_helper(end=end, headers=headers) - range_bytes = u"bytes={:d}-{:d}".format(0, end) + range_bytes = "bytes={:d}-{:d}".format(0, end) # Make sure the headers have been modified. - assert headers == {u"range": range_bytes} + assert headers == {"range": range_bytes} class TestRawDownload(object): @@ -307,7 +305,7 @@ def test__write_to_stream_with_hash_check_success(self, checksum): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - header_value = u"crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w==" + header_value = "crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w==" headers = {_helpers._HASH_HEADER: header_value} response = _mock_raw_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -333,8 +331,8 @@ def test__write_to_stream_with_hash_check_fail(self, checksum): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "d3JvbmcgbiBtYWRlIHVwIQ==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} response = _mock_raw_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -346,10 +344,10 @@ def test__write_to_stream_with_hash_check_fail(self, checksum): error = exc_info.value assert error.response is response assert len(error.args) == 1 - if checksum == u"md5": - good_checksum = u"fPAJHnnoi/+NadyNxT2c2w==" + if checksum == "md5": + good_checksum = "fPAJHnnoi/+NadyNxT2c2w==" else: - good_checksum = u"qmNCyg==" + good_checksum = "qmNCyg==" msg = download_mod._CHECKSUM_MISMATCH.format( EXAMPLE_URL, bad_checksum, good_checksum, checksum_type=checksum.upper() ) @@ -373,8 +371,8 @@ def test__write_to_stream_with_invalid_checksum_type(self): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "d3JvbmcgbiBtYWRlIHVwIQ==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -416,7 +414,7 @@ def _consume_helper( if chunks: assert stream is not None transport.request.assert_called_once_with( - u"GET", + "GET", EXAMPLE_URL, data=None, headers=download._headers, @@ -424,8 +422,8 @@ def _consume_helper( timeout=EXPECTED_TIMEOUT if timeout is None else timeout, ) - range_bytes = u"bytes={:d}-{:d}".format(0, end) - assert download._headers[u"range"] == range_bytes + range_bytes = "bytes={:d}-{:d}".format(0, end) + assert download._headers["range"] == range_bytes assert download.finished return transport @@ -458,7 +456,7 @@ def test_consume_with_stream(self, checksum): def test_consume_with_stream_hash_check_success(self, checksum): stream = io.BytesIO() chunks = (b"up down ", b"charlie ", b"brown") - header_value = u"crc32c=UNIQxg==,md5=JvS1wjMvfbCXgEGeaJJLDQ==" + header_value = "crc32c=UNIQxg==,md5=JvS1wjMvfbCXgEGeaJJLDQ==" headers = {_helpers._HASH_HEADER: header_value} transport = self._consume_helper( stream=stream, chunks=chunks, response_headers=headers, checksum=checksum @@ -482,8 +480,8 @@ def test_consume_with_stream_hash_check_fail(self, checksum): ) chunks = (b"zero zero", b"niner tango") - bad_checksum = u"anVzdCBub3QgdGhpcyAxLA==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "anVzdCBub3QgdGhpcyAxLA==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} transport = mock.Mock(spec=["request"]) transport.request.return_value = _mock_raw_response( @@ -501,10 +499,10 @@ def test_consume_with_stream_hash_check_fail(self, checksum): error = exc_info.value assert error.response is transport.request.return_value assert len(error.args) == 1 - if checksum == u"md5": - good_checksum = u"1A/dxEpys717C6FH7FIWDw==" + if checksum == "md5": + good_checksum = "1A/dxEpys717C6FH7FIWDw==" else: - good_checksum = u"GvNZlg==" + good_checksum = "GvNZlg==" msg = download_mod._CHECKSUM_MISMATCH.format( EXAMPLE_URL, bad_checksum, good_checksum, checksum_type=checksum.upper() ) @@ -512,7 +510,7 @@ def test_consume_with_stream_hash_check_fail(self, checksum): # Check mocks. transport.request.assert_called_once_with( - u"GET", + "GET", EXAMPLE_URL, data=None, headers={}, @@ -524,22 +522,22 @@ def test_consume_with_headers(self): headers = {} # Empty headers end = 16383 self._consume_helper(end=end, headers=headers) - range_bytes = u"bytes={:d}-{:d}".format(0, end) + range_bytes = "bytes={:d}-{:d}".format(0, end) # Make sure the headers have been modified. - assert headers == {u"range": range_bytes} + assert headers == {"range": range_bytes} class TestChunkedDownload(object): @staticmethod def _response_content_range(start_byte, end_byte, total_bytes): - return u"bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) + return "bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) def _response_headers(self, start_byte, end_byte, total_bytes): content_length = end_byte - start_byte + 1 resp_range = self._response_content_range(start_byte, end_byte, total_bytes) return { - u"content-length": u"{:d}".format(content_length), - u"content-range": resp_range, + "content-length": "{:d}".format(content_length), + "content-range": resp_range, } def _mock_response( @@ -567,7 +565,7 @@ def _mock_transport(self, start, chunk_size, total_bytes, content=b""): start + chunk_size - 1, total_bytes, content=content, - status_code=int(http_client.OK), + status_code=int(http.client.OK), ) return transport @@ -590,10 +588,10 @@ def test_consume_next_chunk(self): # Actually consume the chunk and check the output. ret_val = download.consume_next_chunk(transport) assert ret_val is transport.request.return_value - range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1) - download_headers = {u"range": range_bytes} + range_bytes = "bytes={:d}-{:d}".format(start, start + chunk_size - 1) + download_headers = {"range": range_bytes} transport.request.assert_called_once_with( - u"GET", + "GET", EXAMPLE_URL, data=None, headers=download_headers, @@ -619,10 +617,10 @@ def test_consume_next_chunk_with_custom_timeout(self): # Actually consume the chunk and check the output. download.consume_next_chunk(transport, timeout=14.7) - range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1) - download_headers = {u"range": range_bytes} + range_bytes = "bytes={:d}-{:d}".format(start, start + chunk_size - 1) + download_headers = {"range": range_bytes} transport.request.assert_called_once_with( - u"GET", + "GET", EXAMPLE_URL, data=None, headers=download_headers, @@ -633,14 +631,14 @@ def test_consume_next_chunk_with_custom_timeout(self): class TestRawChunkedDownload(object): @staticmethod def _response_content_range(start_byte, end_byte, total_bytes): - return u"bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) + return "bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) def _response_headers(self, start_byte, end_byte, total_bytes): content_length = end_byte - start_byte + 1 resp_range = self._response_content_range(start_byte, end_byte, total_bytes) return { - u"content-length": u"{:d}".format(content_length), - u"content-range": resp_range, + "content-length": "{:d}".format(content_length), + "content-range": resp_range, } def _mock_response( @@ -668,7 +666,7 @@ def _mock_transport(self, start, chunk_size, total_bytes, content=b""): start + chunk_size - 1, total_bytes, content=content, - status_code=int(http_client.OK), + status_code=int(http.client.OK), ) return transport @@ -691,10 +689,10 @@ def test_consume_next_chunk(self): # Actually consume the chunk and check the output. ret_val = download.consume_next_chunk(transport) assert ret_val is transport.request.return_value - range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1) - download_headers = {u"range": range_bytes} + range_bytes = "bytes={:d}-{:d}".format(start, start + chunk_size - 1) + download_headers = {"range": range_bytes} transport.request.assert_called_once_with( - u"GET", + "GET", EXAMPLE_URL, data=None, headers=download_headers, @@ -721,10 +719,10 @@ def test_consume_next_chunk_with_custom_timeout(self): # Actually consume the chunk and check the output. download.consume_next_chunk(transport, timeout=14.7) - range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1) - download_headers = {u"range": range_bytes} + range_bytes = "bytes={:d}-{:d}".format(start, start + chunk_size - 1) + download_headers = {"range": range_bytes} transport.request.assert_called_once_with( - u"GET", + "GET", EXAMPLE_URL, data=None, headers=download_headers, @@ -746,7 +744,7 @@ def test_non_gzipped(self): assert md5_hash is mock.sentinel.md5_hash def test_gzipped(self): - headers = {u"content-encoding": u"gzip"} + headers = {"content-encoding": "gzip"} response_raw = mock.Mock(headers=headers, spec=["headers", "_decoder"]) md5_hash = download_mod._add_decoder(response_raw, mock.sentinel.md5_hash) @@ -772,7 +770,7 @@ def test_decompress(self): md5_hash.update.assert_called_once_with(data) -def _mock_response(status_code=http_client.OK, chunks=(), headers=None): +def _mock_response(status_code=http.client.OK, chunks=(), headers=None): if headers is None: headers = {} @@ -783,12 +781,12 @@ def _mock_response(status_code=http_client.OK, chunks=(), headers=None): status_code=int(status_code), raw=mock_raw, spec=[ - u"__enter__", - u"__exit__", - u"iter_content", - u"status_code", - u"headers", - u"raw", + "__enter__", + "__exit__", + "iter_content", + "status_code", + "headers", + "raw", ], ) # i.e. context manager returns ``self``. @@ -804,7 +802,7 @@ def _mock_response(status_code=http_client.OK, chunks=(), headers=None): ) -def _mock_raw_response(status_code=http_client.OK, chunks=(), headers=None): +def _mock_raw_response(status_code=http.client.OK, chunks=(), headers=None): if headers is None: headers = {} @@ -815,12 +813,12 @@ def _mock_raw_response(status_code=http_client.OK, chunks=(), headers=None): status_code=int(status_code), raw=mock_raw, spec=[ - u"__enter__", - u"__exit__", - u"iter_content", - u"status_code", - u"headers", - u"raw", + "__enter__", + "__exit__", + "iter_content", + "status_code", + "headers", + "raw", ], ) # i.e. context manager returns ``self``. diff --git a/tests/unit/requests/test_upload.py b/tests/unit/requests/test_upload.py index 97946e37..b8d83157 100644 --- a/tests/unit/requests/test_upload.py +++ b/tests/unit/requests/test_upload.py @@ -12,31 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. +import http.client import io import json import mock -from six.moves import http_client from google import resumable_media import google.resumable_media.requests.upload as upload_mod -SIMPLE_URL = ( - u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" - u"uploadType=media&name={OBJECT}" -) -MULTIPART_URL = ( - u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" - u"uploadType=multipart" -) -RESUMABLE_URL = ( - u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" - u"uploadType=resumable" -) +URL_PREFIX = "https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o" +SIMPLE_URL = URL_PREFIX + "?uploadType=media&name={OBJECT}" +MULTIPART_URL = URL_PREFIX + "?uploadType=multipart" +RESUMABLE_URL = URL_PREFIX + "?uploadType=resumable" ONE_MB = 1024 * 1024 -BASIC_CONTENT = u"text/plain" -JSON_TYPE = u"application/json; charset=UTF-8" +BASIC_CONTENT = "text/plain" +JSON_TYPE = "application/json; charset=UTF-8" JSON_TYPE_LINE = b"content-type: application/json; charset=UTF-8\r\n" EXPECTED_TIMEOUT = (61, 60) @@ -52,9 +44,9 @@ def test_transmit(self): assert not upload.finished ret_val = upload.transmit(transport, data, content_type) assert ret_val is transport.request.return_value - upload_headers = {u"content-type": content_type} + upload_headers = {"content-type": content_type} transport.request.assert_called_once_with( - u"POST", + "POST", SIMPLE_URL, data=data, headers=upload_headers, @@ -71,9 +63,9 @@ def test_transmit_w_custom_timeout(self): upload.transmit(transport, data, content_type, timeout=12.6) - expected_headers = {u"content-type": content_type} + expected_headers = {"content-type": content_type} transport.request.assert_called_once_with( - u"POST", + "POST", SIMPLE_URL, data=data, headers=expected_headers, @@ -82,10 +74,10 @@ def test_transmit_w_custom_timeout(self): class TestMultipartUpload(object): - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==4==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==4==") def test_transmit(self, mock_get_boundary): data = b"Mock data here and there." - metadata = {u"Hey": u"You", u"Guys": u"90909"} + metadata = {"Hey": "You", "Guys": "90909"} content_type = BASIC_CONTENT upload = upload_mod.MultipartUpload(MULTIPART_URL) @@ -98,7 +90,7 @@ def test_transmit(self, mock_get_boundary): b"--==4==\r\n" + JSON_TYPE_LINE + b"\r\n" - + json.dumps(metadata).encode(u"utf-8") + + json.dumps(metadata).encode("utf-8") + b"\r\n" + b"--==4==\r\n" b"content-type: text/plain\r\n" @@ -107,9 +99,9 @@ def test_transmit(self, mock_get_boundary): b"--==4==--" ) multipart_type = b'multipart/related; boundary="==4=="' - upload_headers = {u"content-type": multipart_type} + upload_headers = {"content-type": multipart_type} transport.request.assert_called_once_with( - u"POST", + "POST", MULTIPART_URL, data=expected_payload, headers=upload_headers, @@ -118,10 +110,10 @@ def test_transmit(self, mock_get_boundary): assert upload.finished mock_get_boundary.assert_called_once_with() - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==4==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==4==") def test_transmit_w_custom_timeout(self, mock_get_boundary): data = b"Mock data here and there." - metadata = {u"Hey": u"You", u"Guys": u"90909"} + metadata = {"Hey": "You", "Guys": "90909"} content_type = BASIC_CONTENT upload = upload_mod.MultipartUpload(MULTIPART_URL) transport = mock.Mock(spec=["request"]) @@ -134,7 +126,7 @@ def test_transmit_w_custom_timeout(self, mock_get_boundary): b"--==4==\r\n", JSON_TYPE_LINE, b"\r\n", - json.dumps(metadata).encode(u"utf-8"), + json.dumps(metadata).encode("utf-8"), b"\r\n", b"--==4==\r\n", b"content-type: text/plain\r\n", @@ -144,10 +136,10 @@ def test_transmit_w_custom_timeout(self, mock_get_boundary): ) ) multipart_type = b'multipart/related; boundary="==4=="' - upload_headers = {u"content-type": multipart_type} + upload_headers = {"content-type": multipart_type} transport.request.assert_called_once_with( - u"POST", + "POST", MULTIPART_URL, data=expected_payload, headers=upload_headers, @@ -162,11 +154,11 @@ def test_initiate(self): upload = upload_mod.ResumableUpload(RESUMABLE_URL, ONE_MB) data = b"Knock knock who is there" stream = io.BytesIO(data) - metadata = {u"name": u"got-jokes.txt"} + metadata = {"name": "got-jokes.txt"} transport = mock.Mock(spec=["request"]) - location = (u"http://test.invalid?upload_id=AACODBBBxuw9u3AA",) - response_headers = {u"location": location} + location = ("http://test.invalid?upload_id=AACODBBBxuw9u3AA",) + response_headers = {"location": location} post_response = _make_response(headers=response_headers) transport.request.return_value = post_response # Check resumable_url before. @@ -188,12 +180,12 @@ def test_initiate(self): # Make sure the mock was called as expected. json_bytes = b'{"name": "got-jokes.txt"}' expected_headers = { - u"content-type": JSON_TYPE, - u"x-upload-content-type": BASIC_CONTENT, - u"x-upload-content-length": u"{:d}".format(total_bytes), + "content-type": JSON_TYPE, + "x-upload-content-type": BASIC_CONTENT, + "x-upload-content-length": "{:d}".format(total_bytes), } transport.request.assert_called_once_with( - u"POST", + "POST", RESUMABLE_URL, data=json_bytes, headers=expected_headers, @@ -204,11 +196,11 @@ def test_initiate_w_custom_timeout(self): upload = upload_mod.ResumableUpload(RESUMABLE_URL, ONE_MB) data = b"Knock knock who is there" stream = io.BytesIO(data) - metadata = {u"name": u"got-jokes.txt"} + metadata = {"name": "got-jokes.txt"} transport = mock.Mock(spec=["request"]) - location = (u"http://test.invalid?upload_id=AACODBBBxuw9u3AA",) - response_headers = {u"location": location} + location = ("http://test.invalid?upload_id=AACODBBBxuw9u3AA",) + response_headers = {"location": location} post_response = _make_response(headers=response_headers) transport.request.return_value = post_response @@ -224,12 +216,12 @@ def test_initiate_w_custom_timeout(self): # Make sure timeout was passed to the transport json_bytes = b'{"name": "got-jokes.txt"}' expected_headers = { - u"content-type": JSON_TYPE, - u"x-upload-content-type": BASIC_CONTENT, - u"x-upload-content-length": u"{:d}".format(100), + "content-type": JSON_TYPE, + "x-upload-content-type": BASIC_CONTENT, + "x-upload-content-length": "{:d}".format(100), } transport.request.assert_called_once_with( - u"POST", + "POST", RESUMABLE_URL, data=json_bytes, headers=expected_headers, @@ -242,7 +234,7 @@ def _upload_in_flight(data, headers=None): upload._stream = io.BytesIO(data) upload._content_type = BASIC_CONTENT upload._total_bytes = len(data) - upload._resumable_url = u"http://test.invalid?upload_id=not-none" + upload._resumable_url = "http://test.invalid?upload_id=not-none" return upload @staticmethod @@ -261,7 +253,7 @@ def test_transmit_next_chunk(self): assert chunk_size < len(data) upload._chunk_size = chunk_size # Make a fake 308 response. - response_headers = {u"range": u"bytes=0-{:d}".format(chunk_size - 1)} + response_headers = {"range": "bytes=0-{:d}".format(chunk_size - 1)} transport = self._chunk_mock( resumable_media.PERMANENT_REDIRECT, response_headers ) @@ -275,13 +267,13 @@ def test_transmit_next_chunk(self): assert upload._bytes_uploaded == chunk_size # Make sure the mock was called as expected. payload = data[:chunk_size] - content_range = u"bytes 0-{:d}/{:d}".format(chunk_size - 1, len(data)) + content_range = "bytes 0-{:d}/{:d}".format(chunk_size - 1, len(data)) expected_headers = { - u"content-range": content_range, - u"content-type": BASIC_CONTENT, + "content-range": content_range, + "content-type": BASIC_CONTENT, } transport.request.assert_called_once_with( - u"PUT", + "PUT", upload.resumable_url, data=payload, headers=expected_headers, @@ -297,7 +289,7 @@ def test_transmit_next_chunk_w_custom_timeout(self): upload._chunk_size = chunk_size # Make a fake 308 response. - response_headers = {u"range": u"bytes=0-{:d}".format(chunk_size - 1)} + response_headers = {"range": "bytes=0-{:d}".format(chunk_size - 1)} transport = self._chunk_mock( resumable_media.PERMANENT_REDIRECT, response_headers ) @@ -307,13 +299,13 @@ def test_transmit_next_chunk_w_custom_timeout(self): # Make sure timeout was passed to the transport payload = data[:chunk_size] - content_range = u"bytes 0-{:d}/{:d}".format(chunk_size - 1, len(data)) + content_range = "bytes 0-{:d}/{:d}".format(chunk_size - 1, len(data)) expected_headers = { - u"content-range": content_range, - u"content-type": BASIC_CONTENT, + "content-range": content_range, + "content-type": BASIC_CONTENT, } transport.request.assert_called_once_with( - u"PUT", + "PUT", upload.resumable_url, data=payload, headers=expected_headers, @@ -324,10 +316,10 @@ def test_recover(self): upload = upload_mod.ResumableUpload(RESUMABLE_URL, ONE_MB) upload._invalid = True # Make sure invalid. upload._stream = mock.Mock(spec=["seek"]) - upload._resumable_url = u"http://test.invalid?upload_id=big-deal" + upload._resumable_url = "http://test.invalid?upload_id=big-deal" end = 55555 - headers = {u"range": u"bytes=0-{:d}".format(end)} + headers = {"range": "bytes=0-{:d}".format(end)} transport = self._chunk_mock(resumable_media.PERMANENT_REDIRECT, headers) ret_val = upload.recover(transport) @@ -336,9 +328,9 @@ def test_recover(self): assert upload.bytes_uploaded == end + 1 assert not upload.invalid upload._stream.seek.assert_called_once_with(end + 1) - expected_headers = {u"content-range": u"bytes */*"} + expected_headers = {"content-range": "bytes */*"} transport.request.assert_called_once_with( - u"PUT", + "PUT", upload.resumable_url, data=None, headers=expected_headers, @@ -346,7 +338,7 @@ def test_recover(self): ) -def _make_response(status_code=http_client.OK, headers=None): +def _make_response(status_code=http.client.OK, headers=None): headers = headers or {} return mock.Mock( headers=headers, status_code=status_code, spec=["headers", "status_code"] diff --git a/tests/unit/test__download.py b/tests/unit/test__download.py index 378751f7..8c86f252 100644 --- a/tests/unit/test__download.py +++ b/tests/unit/test__download.py @@ -12,19 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import http.client import io import mock import pytest -from six.moves import http_client from google.resumable_media import _download from google.resumable_media import common EXAMPLE_URL = ( - u"https://www.googleapis.com/download/storage/v1/b/" - u"{BUCKET}/o/{OBJECT}?alt=media" + "https://www.googleapis.com/download/storage/v1/b/{BUCKET}/o/{OBJECT}?alt=media" ) @@ -42,7 +41,7 @@ def test_constructor_defaults(self): def test_constructor_explicit(self): start = 11 end = 10001 - headers = {u"foof": u"barf"} + headers = {"foof": "barf"} download = _download.DownloadBase( EXAMPLE_URL, stream=mock.sentinel.stream, @@ -75,19 +74,19 @@ def test__get_status_code(self): with pytest.raises(NotImplementedError) as exc_info: _download.DownloadBase._get_status_code(None) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__get_headers(self): with pytest.raises(NotImplementedError) as exc_info: _download.DownloadBase._get_headers(None) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__get_body(self): with pytest.raises(NotImplementedError) as exc_info: _download.DownloadBase._get_body(None) - exc_info.match(u"virtual") + exc_info.match("virtual") class TestDownload(object): @@ -100,27 +99,27 @@ def test__prepare_request_already_finished(self): def test__prepare_request(self): download1 = _download.Download(EXAMPLE_URL) method1, url1, payload1, headers1 = download1._prepare_request() - assert method1 == u"GET" + assert method1 == "GET" assert url1 == EXAMPLE_URL assert payload1 is None assert headers1 == {} download2 = _download.Download(EXAMPLE_URL, start=53) method2, url2, payload2, headers2 = download2._prepare_request() - assert method2 == u"GET" + assert method2 == "GET" assert url2 == EXAMPLE_URL assert payload2 is None - assert headers2 == {u"range": u"bytes=53-"} + assert headers2 == {"range": "bytes=53-"} def test__prepare_request_with_headers(self): - headers = {u"spoonge": u"borb"} + headers = {"spoonge": "borb"} download = _download.Download(EXAMPLE_URL, start=11, end=111, headers=headers) method, url, payload, new_headers = download._prepare_request() - assert method == u"GET" + assert method == "GET" assert url == EXAMPLE_URL assert payload is None assert new_headers is headers - assert headers == {u"range": u"bytes=11-111", u"spoonge": u"borb"} + assert headers == {"range": "bytes=11-111", "spoonge": "borb"} def test__process_response(self): download = _download.Download(EXAMPLE_URL) @@ -128,7 +127,7 @@ def test__process_response(self): # Make sure **not finished** before. assert not download.finished - response = mock.Mock(status_code=int(http_client.OK), spec=["status_code"]) + response = mock.Mock(status_code=int(http.client.OK), spec=["status_code"]) ret_val = download._process_response(response) assert ret_val is None # Make sure **finished** after. @@ -141,7 +140,7 @@ def test__process_response_bad_status(self): # Make sure **not finished** before. assert not download.finished response = mock.Mock( - status_code=int(http_client.NOT_FOUND), spec=["status_code"] + status_code=int(http.client.NOT_FOUND), spec=["status_code"] ) with pytest.raises(common.InvalidResponse) as exc_info: download._process_response(response) @@ -150,8 +149,8 @@ def test__process_response_bad_status(self): assert error.response is response assert len(error.args) == 5 assert error.args[1] == response.status_code - assert error.args[3] == http_client.OK - assert error.args[4] == http_client.PARTIAL_CONTENT + assert error.args[3] == http.client.OK + assert error.args[4] == http.client.PARTIAL_CONTENT # Make sure **finished** even after a failure. assert download.finished @@ -160,7 +159,7 @@ def test_consume(self): with pytest.raises(NotImplementedError) as exc_info: download.consume(None) - exc_info.match(u"virtual") + exc_info.match("virtual") class TestChunkedDownload(object): @@ -245,14 +244,14 @@ def test__get_byte_range_with_total_bytes(self): @staticmethod def _response_content_range(start_byte, end_byte, total_bytes): - return u"bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) + return "bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) def _response_headers(self, start_byte, end_byte, total_bytes): content_length = end_byte - start_byte + 1 resp_range = self._response_content_range(start_byte, end_byte, total_bytes) return { - u"content-length": u"{:d}".format(content_length), - u"content-range": resp_range, + "content-length": "{:d}".format(content_length), + "content-range": resp_range, } def _mock_response( @@ -272,7 +271,7 @@ def test__prepare_request_already_finished(self): with pytest.raises(ValueError) as exc_info: download._prepare_request() - assert exc_info.match(u"Download has finished.") + assert exc_info.match("Download has finished.") def test__prepare_request_invalid(self): download = _download.ChunkedDownload(EXAMPLE_URL, 64, None) @@ -280,39 +279,39 @@ def test__prepare_request_invalid(self): with pytest.raises(ValueError) as exc_info: download._prepare_request() - assert exc_info.match(u"Download is invalid and cannot be re-used.") + assert exc_info.match("Download is invalid and cannot be re-used.") def test__prepare_request(self): chunk_size = 2048 download1 = _download.ChunkedDownload(EXAMPLE_URL, chunk_size, None) method1, url1, payload1, headers1 = download1._prepare_request() - assert method1 == u"GET" + assert method1 == "GET" assert url1 == EXAMPLE_URL assert payload1 is None - assert headers1 == {u"range": u"bytes=0-2047"} + assert headers1 == {"range": "bytes=0-2047"} download2 = _download.ChunkedDownload( EXAMPLE_URL, chunk_size, None, start=19991 ) download2._total_bytes = 20101 method2, url2, payload2, headers2 = download2._prepare_request() - assert method2 == u"GET" + assert method2 == "GET" assert url2 == EXAMPLE_URL assert payload2 is None - assert headers2 == {u"range": u"bytes=19991-20100"} + assert headers2 == {"range": "bytes=19991-20100"} def test__prepare_request_with_headers(self): chunk_size = 2048 - headers = {u"patrizio": u"Starf-ish"} + headers = {"patrizio": "Starf-ish"} download = _download.ChunkedDownload( EXAMPLE_URL, chunk_size, None, headers=headers ) method, url, payload, new_headers = download._prepare_request() - assert method == u"GET" + assert method == "GET" assert url == EXAMPLE_URL assert payload is None assert new_headers is headers - expected = {u"patrizio": u"Starf-ish", u"range": u"bytes=0-2047"} + expected = {"patrizio": "Starf-ish", "range": "bytes=0-2047"} assert headers == expected def test__make_invalid(self): @@ -342,7 +341,7 @@ def test__process_response(self): already + chunk_size - 1, total_bytes, content=data, - status_code=int(http_client.PARTIAL_CONTENT), + status_code=int(http.client.PARTIAL_CONTENT), ) download._process_response(response) # Check internal state after. @@ -373,10 +372,10 @@ def test__process_response_transfer_encoding(self): already + chunk_size - 1, total_bytes, content=data, - status_code=int(http_client.PARTIAL_CONTENT), + status_code=int(http.client.PARTIAL_CONTENT), ) - response.headers[u"transfer-encoding"] = "chunked" - del response.headers[u"content-length"] + response.headers["transfer-encoding"] = "chunked" + del response.headers["content-length"] download._process_response(response) # Check internal state after. assert not download.finished @@ -398,7 +397,7 @@ def test__process_response_bad_status(self): assert download.total_bytes is None # Actually call the method to update. response = self._mock_response( - 0, total_bytes - 1, total_bytes, status_code=int(http_client.NOT_FOUND) + 0, total_bytes - 1, total_bytes, status_code=int(http.client.NOT_FOUND) ) with pytest.raises(common.InvalidResponse) as exc_info: download._process_response(response) @@ -407,8 +406,8 @@ def test__process_response_bad_status(self): assert error.response is response assert len(error.args) == 5 assert error.args[1] == response.status_code - assert error.args[3] == http_client.OK - assert error.args[4] == http_client.PARTIAL_CONTENT + assert error.args[3] == http.client.OK + assert error.args[4] == http.client.PARTIAL_CONTENT # Check internal state after. assert not download.finished assert download.bytes_downloaded == 0 @@ -427,8 +426,8 @@ def test__process_response_missing_content_length(self): assert not download.invalid # Actually call the method to update. response = mock.Mock( - headers={u"content-range": u"bytes 0-99/99"}, - status_code=int(http_client.PARTIAL_CONTENT), + headers={"content-range": "bytes 0-99/99"}, + status_code=int(http.client.PARTIAL_CONTENT), content=b"DEADBEEF", spec=["headers", "status_code", "content"], ) @@ -438,7 +437,7 @@ def test__process_response_missing_content_length(self): error = exc_info.value assert error.response is response assert len(error.args) == 2 - assert error.args[1] == u"content-length" + assert error.args[1] == "content-length" # Check internal state after. assert not download.finished assert download.bytes_downloaded == 0 @@ -457,13 +456,13 @@ def test__process_response_bad_content_range(self): # Actually call the method to update. data = b"stuff" headers = { - u"content-length": u"{:d}".format(len(data)), - u"content-range": u"kites x-y/58", + "content-length": "{:d}".format(len(data)), + "content-range": "kites x-y/58", } response = mock.Mock( content=data, headers=headers, - status_code=int(http_client.PARTIAL_CONTENT), + status_code=int(http.client.PARTIAL_CONTENT), spec=["content", "headers", "status_code"], ) with pytest.raises(common.InvalidResponse) as exc_info: @@ -472,7 +471,7 @@ def test__process_response_bad_content_range(self): error = exc_info.value assert error.response is response assert len(error.args) == 3 - assert error.args[1] == headers[u"content-range"] + assert error.args[1] == headers["content-range"] # Check internal state after. assert not download.finished assert download.bytes_downloaded == 0 @@ -498,7 +497,7 @@ def test__process_response_body_wrong_length(self): chunk_size - 1, total_bytes, content=data, - status_code=int(http_client.PARTIAL_CONTENT), + status_code=int(http.client.PARTIAL_CONTENT), ) with pytest.raises(common.InvalidResponse) as exc_info: download._process_response(response) @@ -534,7 +533,7 @@ def test__process_response_when_finished(self): total_bytes - 1, total_bytes, content=data, - status_code=int(http_client.OK), + status_code=int(http.client.OK), ) download._process_response(response) # Check internal state after. @@ -566,7 +565,7 @@ def test__process_response_when_reaching_end(self): end, 8 * chunk_size, content=data, - status_code=int(http_client.PARTIAL_CONTENT), + status_code=int(http.client.PARTIAL_CONTENT), ) download._process_response(response) # Check internal state after. @@ -583,8 +582,8 @@ def test__process_response_when_content_range_is_zero(self): _fix_up_virtual(download) content_range = _download._ZERO_CONTENT_RANGE_HEADER - headers = {u"content-range": content_range} - status_code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE + headers = {"content-range": content_range} + status_code = http.client.REQUESTED_RANGE_NOT_SATISFIABLE response = mock.Mock( headers=headers, status_code=status_code, spec=["headers", "status_code"] ) @@ -599,7 +598,7 @@ def test_consume_next_chunk(self): with pytest.raises(NotImplementedError) as exc_info: download.consume_next_chunk(None) - exc_info.match(u"virtual") + exc_info.match("virtual") class Test__add_bytes_range(object): @@ -613,35 +612,35 @@ def test_both_vals(self): headers = {} ret_val = _download.add_bytes_range(17, 1997, headers) assert ret_val is None - assert headers == {u"range": u"bytes=17-1997"} + assert headers == {"range": "bytes=17-1997"} def test_end_only(self): headers = {} ret_val = _download.add_bytes_range(None, 909, headers) assert ret_val is None - assert headers == {u"range": u"bytes=0-909"} + assert headers == {"range": "bytes=0-909"} def test_start_only(self): headers = {} ret_val = _download.add_bytes_range(3735928559, None, headers) assert ret_val is None - assert headers == {u"range": u"bytes=3735928559-"} + assert headers == {"range": "bytes=3735928559-"} def test_start_as_offset(self): headers = {} ret_val = _download.add_bytes_range(-123454321, None, headers) assert ret_val is None - assert headers == {u"range": u"bytes=-123454321"} + assert headers == {"range": "bytes=-123454321"} class Test_get_range_info(object): @staticmethod def _make_response(content_range): - headers = {u"content-range": content_range} + headers = {"content-range": content_range} return mock.Mock(headers=headers, spec=["headers"]) def _success_helper(self, **kwargs): - content_range = u"Bytes 7-11/42" + content_range = "Bytes 7-11/42" response = self._make_response(content_range) start_byte, end_byte, total_bytes = _download.get_range_info( response, _get_headers, **kwargs @@ -659,7 +658,7 @@ def test_success_with_callback(self): callback.assert_not_called() def _failure_helper(self, **kwargs): - content_range = u"nope x-6/y" + content_range = "nope x-6/y" response = self._make_response(content_range) with pytest.raises(common.InvalidResponse) as exc_info: _download.get_range_info(response, _get_headers, **kwargs) @@ -685,7 +684,7 @@ def _missing_header_helper(self, **kwargs): error = exc_info.value assert error.response is response assert len(error.args) == 2 - assert error.args[1] == u"content-range" + assert error.args[1] == "content-range" def test_missing_header(self): self._missing_header_helper() @@ -699,22 +698,22 @@ def test_missing_header_with_callback(self): class Test__check_for_zero_content_range(object): @staticmethod def _make_response(content_range, status_code): - headers = {u"content-range": content_range} + headers = {"content-range": content_range} return mock.Mock( headers=headers, status_code=status_code, spec=["headers", "status_code"] ) def test_status_code_416_and_test_content_range_zero_both(self): content_range = _download._ZERO_CONTENT_RANGE_HEADER - status_code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE + status_code = http.client.REQUESTED_RANGE_NOT_SATISFIABLE response = self._make_response(content_range, status_code) assert _download._check_for_zero_content_range( response, _get_status_code, _get_headers ) def test_status_code_416_only(self): - content_range = u"bytes 2-5/3" - status_code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE + content_range = "bytes 2-5/3" + status_code = http.client.REQUESTED_RANGE_NOT_SATISFIABLE response = self._make_response(content_range, status_code) assert not _download._check_for_zero_content_range( response, _get_status_code, _get_headers @@ -722,7 +721,7 @@ def test_status_code_416_only(self): def test_content_range_zero_only(self): content_range = _download._ZERO_CONTENT_RANGE_HEADER - status_code = http_client.OK + status_code = http.client.OK response = self._make_response(content_range, status_code) assert not _download._check_for_zero_content_range( response, _get_status_code, _get_headers diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index c0a9fecc..ddfec117 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -15,10 +15,11 @@ from __future__ import absolute_import import hashlib +import http.client + import mock import pytest import requests.exceptions -from six.moves import http_client from google.resumable_media import _helpers from google.resumable_media import common @@ -31,9 +32,9 @@ def test_do_nothing(): class Test_header_required(object): def _success_helper(self, **kwargs): - name = u"some-header" - value = u"The Right Hand Side" - headers = {name: value, u"other-name": u"other-value"} + name = "some-header" + value = "The Right Hand Side" + headers = {name: value, "other-name": "other-value"} response = mock.Mock(headers=headers, spec=["headers"]) result = _helpers.header_required(response, name, _get_headers, **kwargs) assert result == value @@ -48,7 +49,7 @@ def test_success_with_callback(self): def _failure_helper(self, **kwargs): response = mock.Mock(headers={}, spec=["headers"]) - name = u"any-name" + name = "any-name" with pytest.raises(common.InvalidResponse) as exc_info: _helpers.header_required(response, name, _get_headers, **kwargs) @@ -72,12 +73,12 @@ def _get_status_code(response): return response.status_code def test_success(self): - status_codes = (http_client.OK, http_client.CREATED) + status_codes = (http.client.OK, http.client.CREATED) acceptable = ( - http_client.OK, - int(http_client.OK), - http_client.CREATED, - int(http_client.CREATED), + http.client.OK, + int(http.client.OK), + http.client.CREATED, + int(http.client.CREATED), ) for value in acceptable: response = _make_response(value) @@ -87,18 +88,18 @@ def test_success(self): assert value == status_code def test_success_with_callback(self): - status_codes = (http_client.OK,) - response = _make_response(http_client.OK) + status_codes = (http.client.OK,) + response = _make_response(http.client.OK) callback = mock.Mock(spec=[]) status_code = _helpers.require_status_code( response, status_codes, self._get_status_code, callback=callback ) - assert status_code == http_client.OK + assert status_code == http.client.OK callback.assert_not_called() def test_failure(self): - status_codes = (http_client.CREATED, http_client.NO_CONTENT) - response = _make_response(http_client.OK) + status_codes = (http.client.CREATED, http.client.NO_CONTENT) + response = _make_response(http.client.OK) with pytest.raises(common.InvalidResponse) as exc_info: _helpers.require_status_code(response, status_codes, self._get_status_code) @@ -109,8 +110,8 @@ def test_failure(self): assert error.args[3:] == status_codes def test_failure_with_callback(self): - status_codes = (http_client.OK,) - response = _make_response(http_client.NOT_FOUND) + status_codes = (http.client.OK,) + response = _make_response(http.client.NOT_FOUND) callback = mock.Mock(spec=[]) with pytest.raises(common.InvalidResponse) as exc_info: _helpers.require_status_code( @@ -126,7 +127,7 @@ def test_failure_with_callback(self): class Test_calculate_retry_wait(object): - @mock.patch(u"random.randint", return_value=125) + @mock.patch("random.randint", return_value=125) def test_past_limit(self, randint_mock): base_wait, wait_time = _helpers.calculate_retry_wait(70.0, 64.0) @@ -134,7 +135,7 @@ def test_past_limit(self, randint_mock): assert wait_time == 64.125 randint_mock.assert_called_once_with(0, 1000) - @mock.patch(u"random.randint", return_value=250) + @mock.patch("random.randint", return_value=250) def test_at_limit(self, randint_mock): base_wait, wait_time = _helpers.calculate_retry_wait(50.0, 50.0) @@ -142,7 +143,7 @@ def test_at_limit(self, randint_mock): assert wait_time == 50.25 randint_mock.assert_called_once_with(0, 1000) - @mock.patch(u"random.randint", return_value=875) + @mock.patch("random.randint", return_value=875) def test_under_limit(self, randint_mock): base_wait, wait_time = _helpers.calculate_retry_wait(16.0, 33.0) @@ -150,7 +151,7 @@ def test_under_limit(self, randint_mock): assert wait_time == 32.875 randint_mock.assert_called_once_with(0, 1000) - @mock.patch(u"random.randint", return_value=875) + @mock.patch("random.randint", return_value=875) def test_custom_multiplier(self, randint_mock): base_wait, wait_time = _helpers.calculate_retry_wait(16.0, 64.0, 3) @@ -161,7 +162,7 @@ def test_custom_multiplier(self, randint_mock): class Test_wait_and_retry(object): def test_success_no_retry(self): - truthy = http_client.OK + truthy = http.client.OK assert truthy not in common.RETRYABLE response = _make_response(truthy) @@ -172,16 +173,16 @@ def test_success_no_retry(self): assert ret_val is response func.assert_called_once_with() - @mock.patch(u"time.sleep") - @mock.patch(u"random.randint") + @mock.patch("time.sleep") + @mock.patch("random.randint") def test_success_with_retry(self, randint_mock, sleep_mock): randint_mock.side_effect = [125, 625, 375] status_codes = ( - http_client.INTERNAL_SERVER_ERROR, - http_client.BAD_GATEWAY, - http_client.SERVICE_UNAVAILABLE, - http_client.NOT_FOUND, + http.client.INTERNAL_SERVER_ERROR, + http.client.BAD_GATEWAY, + http.client.SERVICE_UNAVAILABLE, + http.client.NOT_FOUND, ) responses = [_make_response(status_code) for status_code in status_codes] func = mock.Mock(side_effect=responses, spec=[]) @@ -203,16 +204,16 @@ def test_success_with_retry(self, randint_mock, sleep_mock): sleep_mock.assert_any_call(2.625) sleep_mock.assert_any_call(4.375) - @mock.patch(u"time.sleep") - @mock.patch(u"random.randint") + @mock.patch("time.sleep") + @mock.patch("random.randint") def test_success_with_retry_custom_delay(self, randint_mock, sleep_mock): randint_mock.side_effect = [125, 625, 375] status_codes = ( - http_client.INTERNAL_SERVER_ERROR, - http_client.BAD_GATEWAY, - http_client.SERVICE_UNAVAILABLE, - http_client.NOT_FOUND, + http.client.INTERNAL_SERVER_ERROR, + http.client.BAD_GATEWAY, + http.client.SERVICE_UNAVAILABLE, + http.client.NOT_FOUND, ) responses = [_make_response(status_code) for status_code in status_codes] func = mock.Mock(side_effect=responses, spec=[]) @@ -238,12 +239,12 @@ def test_success_with_retry_custom_delay(self, randint_mock, sleep_mock): 48.375 ) # previous delay 12 * multiplier 4 + jitter 0.375 - @mock.patch(u"time.sleep") - @mock.patch(u"random.randint") + @mock.patch("time.sleep") + @mock.patch("random.randint") def test_success_with_retry_connection_error(self, randint_mock, sleep_mock): randint_mock.side_effect = [125, 625, 375] - response = _make_response(http_client.NOT_FOUND) + response = _make_response(http.client.NOT_FOUND) responses = [ requests.exceptions.ConnectionError, requests.exceptions.ConnectionError, @@ -268,12 +269,40 @@ def test_success_with_retry_connection_error(self, randint_mock, sleep_mock): sleep_mock.assert_any_call(2.625) sleep_mock.assert_any_call(4.375) + @mock.patch(u"time.sleep") + @mock.patch(u"random.randint") + def test_success_with_retry_chunked_encoding_error(self, randint_mock, sleep_mock): + randint_mock.side_effect = [125, 625, 375] + + response = _make_response(http.client.NOT_FOUND) + responses = [ + requests.exceptions.ChunkedEncodingError, + requests.exceptions.ChunkedEncodingError, + response, + ] + func = mock.Mock(side_effect=responses, spec=[]) + + retry_strategy = common.RetryStrategy() + ret_val = _helpers.wait_and_retry(func, _get_status_code, retry_strategy) + + assert ret_val == responses[-1] + + assert func.call_count == 3 + assert func.mock_calls == [mock.call()] * 3 + + assert randint_mock.call_count == 2 + assert randint_mock.mock_calls == [mock.call(0, 1000)] * 2 + + assert sleep_mock.call_count == 2 + sleep_mock.assert_any_call(1.125) + sleep_mock.assert_any_call(2.625) + @mock.patch(u"time.sleep") @mock.patch(u"random.randint") def test_connection_import_error_failure(self, randint_mock, sleep_mock): randint_mock.side_effect = [125, 625, 375] - response = _make_response(http_client.NOT_FOUND) + response = _make_response(http.client.NOT_FOUND) responses = [ requests.exceptions.ConnectionError, requests.exceptions.ConnectionError, @@ -291,19 +320,19 @@ def test_connection_import_error_failure(self, randint_mock, sleep_mock): retry_strategy = common.RetryStrategy() _helpers.wait_and_retry(func, _get_status_code, retry_strategy) - @mock.patch(u"time.sleep") - @mock.patch(u"random.randint") + @mock.patch("time.sleep") + @mock.patch("random.randint") def test_retry_exceeds_max_cumulative(self, randint_mock, sleep_mock): randint_mock.side_effect = [875, 0, 375, 500, 500, 250, 125] status_codes = ( - http_client.SERVICE_UNAVAILABLE, - http_client.GATEWAY_TIMEOUT, + http.client.SERVICE_UNAVAILABLE, + http.client.GATEWAY_TIMEOUT, common.TOO_MANY_REQUESTS, - http_client.INTERNAL_SERVER_ERROR, - http_client.SERVICE_UNAVAILABLE, - http_client.BAD_GATEWAY, - http_client.GATEWAY_TIMEOUT, + http.client.INTERNAL_SERVER_ERROR, + http.client.SERVICE_UNAVAILABLE, + http.client.BAD_GATEWAY, + http.client.GATEWAY_TIMEOUT, common.TOO_MANY_REQUESTS, ) responses = [_make_response(status_code) for status_code in status_codes] @@ -330,8 +359,8 @@ def test_retry_exceeds_max_cumulative(self, randint_mock, sleep_mock): sleep_mock.assert_any_call(32.25) sleep_mock.assert_any_call(64.125) - @mock.patch(u"time.sleep") - @mock.patch(u"random.randint") + @mock.patch("time.sleep") + @mock.patch("random.randint") def test_retry_exceeded_reraises_connection_error(self, randint_mock, sleep_mock): randint_mock.side_effect = [875, 0, 375, 500, 500, 250, 125] @@ -387,47 +416,80 @@ def test__get_checksum_object_invalid(): _helpers._get_checksum_object("invalid") -def test_crc32c_throws_import_error(): - try: - import builtins - except ImportError: - import __builtin__ as builtins - orig_import = builtins.__import__ +@mock.patch("builtins.__import__") +def test__get_crc32_object_wo_google_crc32c_wo_crcmod(mock_import): + mock_import.side_effect = ImportError("testing") - # Raises ImportError for name == "crc32c" or name == "crcmod" - def mock_import(name, globals, locals, fromlist, level=None): - raise ImportError + with pytest.raises(ImportError): + _helpers._get_crc32c_object() - builtins.__import__ = mock_import + expected_calls = [ + mock.call("google_crc32c", mock.ANY, None, None, 0), + mock.call("crcmod", mock.ANY, None, None, 0), + ] + mock_import.assert_has_calls(expected_calls) - try: - with pytest.raises(ImportError): - _helpers._get_crc32c_object() - finally: - builtins.__import__ = orig_import +@mock.patch("builtins.__import__") +def test__get_crc32_object_w_google_crc32c(mock_import): + google_crc32c = mock.Mock(spec=["Checksum"]) + mock_import.return_value = google_crc32c -@pytest.mark.filterwarnings("ignore::RuntimeWarning") -def test_crc32c_warning_on_slow_crcmod(): - try: - import builtins - except ImportError: - import __builtin__ as builtins + found = _helpers._get_crc32c_object() - orig_import = builtins.__import__ + assert found is google_crc32c.Checksum.return_value + google_crc32c.Checksum.assert_called_once_with() + + mock_import.assert_called_once_with("google_crc32c", mock.ANY, None, None, 0) + + +@mock.patch("builtins.__import__") +def test__get_crc32_object_wo_google_crc32c_w_crcmod(mock_import): + crcmod = mock.Mock(spec=["predefined", "crcmod"]) + crcmod.predefined = mock.Mock(spec=["Crc"]) + crcmod.crcmod = mock.Mock(spec=["_usingExtension"]) + mock_import.side_effect = [ImportError("testing"), crcmod, crcmod.crcmod] + + found = _helpers._get_crc32c_object() + + assert found is crcmod.predefined.Crc.return_value + crcmod.predefined.Crc.assert_called_once_with("crc-32c") + + expected_calls = [ + mock.call("google_crc32c", mock.ANY, None, None, 0), + mock.call("crcmod", mock.ANY, None, None, 0), + mock.call("crcmod.crcmod", mock.ANY, {}, ["_usingExtension"], 0), + ] + mock_import.assert_has_calls(expected_calls) + + +@pytest.mark.filterwarnings("ignore::RuntimeWarning") +@mock.patch("builtins.__import__") +def test__is_fast_crcmod_wo_extension_warning(mock_import): + crcmod = mock.Mock(spec=["crcmod"]) + crcmod.crcmod = mock.Mock(spec=["_usingExtension"]) + crcmod.crcmod._usingExtension = False + mock_import.return_value = crcmod.crcmod + + assert not _helpers._is_fast_crcmod() + + mock_import.assert_called_once_with( + "crcmod.crcmod", + mock.ANY, + {}, + ["_usingExtension"], + 0, + ) - # crcmod.crcmod is the only import. - def mock_import(name, globals, locals, fromlist, level): - crcmod = mock.MagicMock() - crcmod._usingExtension = False - return crcmod - builtins.__import__ = mock_import +@mock.patch("builtins.__import__") +def test__is_fast_crcmod_w_extension(mock_import): + crcmod = mock.Mock(spec=["crcmod"]) + crcmod.crcmod = mock.Mock(spec=["_usingExtension"]) + crcmod.crcmod._usingExtension = True + mock_import.return_value = crcmod.crcmod - try: - assert not _helpers._is_fast_crcmod() - finally: - builtins.__import__ = orig_import + assert _helpers._is_fast_crcmod() def test__DoNothingHash(): @@ -437,11 +499,11 @@ def test__DoNothingHash(): class Test__get_expected_checksum(object): - @pytest.mark.parametrize("template", [u"crc32c={},md5={}", u"crc32c={}, md5={}"]) + @pytest.mark.parametrize("template", ["crc32c={},md5={}", "crc32c={}, md5={}"]) @pytest.mark.parametrize("checksum", ["md5", "crc32c"]) @mock.patch("google.resumable_media._helpers._LOGGER") def test__w_header_present(self, _LOGGER, template, checksum): - checksums = {"md5": u"b2twdXNodGhpc2J1dHRvbg==", "crc32c": u"3q2+7w=="} + checksums = {"md5": "b2twdXNodGhpc2J1dHRvbg==", "crc32c": "3q2+7w=="} header_value = template.format(checksums["crc32c"], checksums["md5"]) headers = {_helpers._HASH_HEADER: header_value} response = _mock_response(headers=headers) @@ -486,8 +548,8 @@ def _get_headers(response): class Test__parse_checksum_header(object): - CRC32C_CHECKSUM = u"3q2+7w==" - MD5_CHECKSUM = u"c2l4dGVlbmJ5dGVzbG9uZw==" + CRC32C_CHECKSUM = "3q2+7w==" + MD5_CHECKSUM = "c2l4dGVlbmJ5dGVzbG9uZw==" def test_empty_value(self): header_value = None @@ -502,7 +564,7 @@ def test_empty_value(self): assert crc32c_header is None def test_crc32c_only(self): - header_value = u"crc32c={}".format(self.CRC32C_CHECKSUM) + header_value = "crc32c={}".format(self.CRC32C_CHECKSUM) response = None md5_header = _helpers._parse_checksum_header( header_value, response, checksum_label="md5" @@ -514,7 +576,7 @@ def test_crc32c_only(self): assert crc32c_header == self.CRC32C_CHECKSUM def test_md5_only(self): - header_value = u"md5={}".format(self.MD5_CHECKSUM) + header_value = "md5={}".format(self.MD5_CHECKSUM) response = None md5_header = _helpers._parse_checksum_header( header_value, response, checksum_label="md5" @@ -526,7 +588,7 @@ def test_md5_only(self): assert crc32c_header is None def test_both_crc32c_and_md5(self): - header_value = u"crc32c={},md5={}".format( + header_value = "crc32c={},md5={}".format( self.CRC32C_CHECKSUM, self.MD5_CHECKSUM ) response = None @@ -540,8 +602,8 @@ def test_both_crc32c_and_md5(self): assert crc32c_header == self.CRC32C_CHECKSUM def test_md5_multiple_matches(self): - another_checksum = u"eW91IGRpZCBXQVQgbm93Pw==" - header_value = u"md5={},md5={}".format(self.MD5_CHECKSUM, another_checksum) + another_checksum = "eW91IGRpZCBXQVQgbm93Pw==" + header_value = "md5={},md5={}".format(self.MD5_CHECKSUM, another_checksum) response = mock.sentinel.response with pytest.raises(common.InvalidResponse) as exc_info: diff --git a/tests/unit/test__upload.py b/tests/unit/test__upload.py index 18411ca8..bbd1cb0f 100644 --- a/tests/unit/test__upload.py +++ b/tests/unit/test__upload.py @@ -12,12 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import http.client import io import sys import mock import pytest -from six.moves import http_client from google import resumable_media from google.resumable_media import _helpers @@ -25,21 +25,13 @@ from google.resumable_media import common -SIMPLE_URL = ( - u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" - u"uploadType=media&name={OBJECT}" -) -MULTIPART_URL = ( - u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" - u"uploadType=multipart" -) -RESUMABLE_URL = ( - u"https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o?" - u"uploadType=resumable" -) +URL_PREFIX = "https://www.googleapis.com/upload/storage/v1/b/{BUCKET}/o" +SIMPLE_URL = URL_PREFIX + "?uploadType=media&name={OBJECT}" +MULTIPART_URL = URL_PREFIX + "?uploadType=multipart" +RESUMABLE_URL = URL_PREFIX + "?uploadType=resumable" ONE_MB = 1024 * 1024 -BASIC_CONTENT = u"text/plain" -JSON_TYPE = u"application/json; charset=UTF-8" +BASIC_CONTENT = "text/plain" +JSON_TYPE = "application/json; charset=UTF-8" JSON_TYPE_LINE = b"content-type: application/json; charset=UTF-8\r\n" @@ -52,7 +44,7 @@ def test_constructor_defaults(self): _check_retry_strategy(upload) def test_constructor_explicit(self): - headers = {u"spin": u"doctors"} + headers = {"spin": "doctors"} upload = _upload.UploadBase(SIMPLE_URL, headers=headers) assert upload.upload_url == SIMPLE_URL assert upload._headers is headers @@ -78,7 +70,7 @@ def test__process_response_bad_status(self): # Make sure **not finished** before. assert not upload.finished - status_code = http_client.SERVICE_UNAVAILABLE + status_code = http.client.SERVICE_UNAVAILABLE response = _make_response(status_code=status_code) with pytest.raises(common.InvalidResponse) as exc_info: upload._process_response(response) @@ -87,7 +79,7 @@ def test__process_response_bad_status(self): assert error.response is response assert len(error.args) == 4 assert error.args[1] == status_code - assert error.args[3] == http_client.OK + assert error.args[3] == http.client.OK # Make sure **finished** after (even in failure). assert upload.finished @@ -107,19 +99,19 @@ def test__get_status_code(self): with pytest.raises(NotImplementedError) as exc_info: _upload.UploadBase._get_status_code(None) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__get_headers(self): with pytest.raises(NotImplementedError) as exc_info: _upload.UploadBase._get_headers(None) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__get_body(self): with pytest.raises(NotImplementedError) as exc_info: _upload.UploadBase._get_body(None) - exc_info.match(u"virtual") + exc_info.match("virtual") class TestSimpleUpload(object): @@ -129,39 +121,39 @@ def test__prepare_request_already_finished(self): with pytest.raises(ValueError) as exc_info: upload._prepare_request(b"", None) - exc_info.match(u"An upload can only be used once.") + exc_info.match("An upload can only be used once.") def test__prepare_request_non_bytes_data(self): upload = _upload.SimpleUpload(SIMPLE_URL) assert not upload.finished with pytest.raises(TypeError) as exc_info: - upload._prepare_request(u"", None) + upload._prepare_request("", None) - exc_info.match(u"must be bytes") + exc_info.match("must be bytes") def test__prepare_request(self): upload = _upload.SimpleUpload(SIMPLE_URL) - content_type = u"image/jpeg" + content_type = "image/jpeg" data = b"cheetos and eetos" method, url, payload, headers = upload._prepare_request(data, content_type) - assert method == u"POST" + assert method == "POST" assert url == SIMPLE_URL assert payload == data - assert headers == {u"content-type": content_type} + assert headers == {"content-type": content_type} def test__prepare_request_with_headers(self): - headers = {u"x-goog-cheetos": u"spicy"} + headers = {"x-goog-cheetos": "spicy"} upload = _upload.SimpleUpload(SIMPLE_URL, headers=headers) - content_type = u"image/jpeg" + content_type = "image/jpeg" data = b"some stuff" method, url, payload, new_headers = upload._prepare_request(data, content_type) - assert method == u"POST" + assert method == "POST" assert url == SIMPLE_URL assert payload == data assert new_headers is headers - expected = {u"content-type": content_type, u"x-goog-cheetos": u"spicy"} + expected = {"content-type": content_type, "x-goog-cheetos": "spicy"} assert headers == expected def test_transmit(self): @@ -169,7 +161,7 @@ def test_transmit(self): with pytest.raises(NotImplementedError) as exc_info: upload.transmit(None, None, None) - exc_info.match(u"virtual") + exc_info.match("virtual") class TestMultipartUpload(object): @@ -182,7 +174,7 @@ def test_constructor_defaults(self): _check_retry_strategy(upload) def test_constructor_explicit(self): - headers = {u"spin": u"doctors"} + headers = {"spin": "doctors"} upload = _upload.MultipartUpload(MULTIPART_URL, headers=headers, checksum="md5") assert upload.upload_url == MULTIPART_URL assert upload._headers is headers @@ -197,12 +189,12 @@ def test__prepare_request_already_finished(self): upload._prepare_request(b"Hi", {}, BASIC_CONTENT) def test__prepare_request_non_bytes_data(self): - data = u"Nope not bytes." + data = "Nope not bytes." upload = _upload.MultipartUpload(MULTIPART_URL) with pytest.raises(TypeError): upload._prepare_request(data, {}, BASIC_CONTENT) - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==3==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==3==") def _prepare_request_helper( self, mock_get_boundary, @@ -220,19 +212,19 @@ def _prepare_request_helper( # This should be fully overwritten by the calculated checksum, so # the output should not change even if this is set. if checksum == "md5": - metadata = {u"md5Hash": u"ZZZZZZZZZZZZZZZZZZZZZZ=="} + metadata = {"md5Hash": "ZZZZZZZZZZZZZZZZZZZZZZ=="} else: - metadata = {u"crc32c": u"ZZZZZZ=="} + metadata = {"crc32c": "ZZZZZZ=="} else: # To simplify parsing the response, omit other test metadata if a # checksum is specified. - metadata = {u"Some": u"Stuff"} if not checksum else {} + metadata = {"Some": "Stuff"} if not checksum else {} content_type = BASIC_CONTENT method, url, payload, new_headers = upload._prepare_request( data, metadata, content_type ) - assert method == u"POST" + assert method == "POST" assert url == MULTIPART_URL preamble = b"--==3==\r\n" + JSON_TYPE_LINE + b"\r\n" @@ -264,16 +256,16 @@ def _prepare_request_helper( def test__prepare_request(self): headers, multipart_type = self._prepare_request_helper() - assert headers == {u"content-type": multipart_type} + assert headers == {"content-type": multipart_type} def test__prepare_request_with_headers(self): - headers = {u"best": u"shirt", u"worst": u"hat"} + headers = {"best": "shirt", "worst": "hat"} new_headers, multipart_type = self._prepare_request_helper(headers=headers) assert new_headers is headers expected_headers = { - u"best": u"shirt", - u"content-type": multipart_type, - u"worst": u"hat", + "best": "shirt", + "content-type": multipart_type, + "worst": "hat", } assert expected_headers == headers @@ -287,7 +279,7 @@ def test__prepare_request_with_checksum(self, checksum): checksum=checksum, expected_checksum=checksums[checksum] ) assert headers == { - u"content-type": multipart_type, + "content-type": multipart_type, } @pytest.mark.parametrize("checksum", ["md5", "crc32c"]) @@ -302,7 +294,7 @@ def test__prepare_request_with_checksum_overwrite(self, checksum): test_overwrite=True, ) assert headers == { - u"content-type": multipart_type, + "content-type": multipart_type, } def test_transmit(self): @@ -310,7 +302,7 @@ def test_transmit(self): with pytest.raises(NotImplementedError) as exc_info: upload.transmit(None, None, None, None) - exc_info.match(u"virtual") + exc_info.match("virtual") class TestResumableUpload(object): @@ -368,7 +360,7 @@ def test_resumable_url_property(self): assert upload.resumable_url is None # Make sure we cannot set it on public @property. - new_url = u"http://test.invalid?upload_id=not-none" + new_url = "http://test.invalid?upload_id=not-none" with pytest.raises(AttributeError): upload.resumable_url = new_url @@ -405,7 +397,7 @@ def test_total_bytes_property(self): def _prepare_initiate_request_helper(self, upload_headers=None, **method_kwargs): data = b"some really big big data." stream = io.BytesIO(data) - metadata = {u"name": u"big-data-file.txt"} + metadata = {"name": "big-data-file.txt"} upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB, headers=upload_headers) orig_headers = upload._headers.copy() @@ -421,14 +413,14 @@ def _prepare_initiate_request_helper(self, upload_headers=None, **method_kwargs) # Make sure the ``upload``-s state was updated. assert upload._stream == stream assert upload._content_type == BASIC_CONTENT - if method_kwargs == {u"stream_final": False}: + if method_kwargs == {"stream_final": False}: assert upload._total_bytes is None else: assert upload._total_bytes == len(data) # Make sure headers are untouched. assert headers is not upload._headers assert upload._headers == orig_headers - assert method == u"POST" + assert method == "POST" assert url == upload.upload_url # Make sure the stream is still at the beginning. assert stream.tell() == 0 @@ -438,23 +430,23 @@ def _prepare_initiate_request_helper(self, upload_headers=None, **method_kwargs) def test__prepare_initiate_request(self): data, headers = self._prepare_initiate_request_helper() expected_headers = { - u"content-type": JSON_TYPE, - u"x-upload-content-length": u"{:d}".format(len(data)), - u"x-upload-content-type": BASIC_CONTENT, + "content-type": JSON_TYPE, + "x-upload-content-length": "{:d}".format(len(data)), + "x-upload-content-type": BASIC_CONTENT, } assert headers == expected_headers def test__prepare_initiate_request_with_headers(self): - headers = {u"caviar": u"beluga", u"top": u"quark"} + headers = {"caviar": "beluga", "top": "quark"} data, new_headers = self._prepare_initiate_request_helper( upload_headers=headers ) expected_headers = { - u"caviar": u"beluga", - u"content-type": JSON_TYPE, - u"top": u"quark", - u"x-upload-content-length": u"{:d}".format(len(data)), - u"x-upload-content-type": BASIC_CONTENT, + "caviar": "beluga", + "content-type": JSON_TYPE, + "top": "quark", + "x-upload-content-length": "{:d}".format(len(data)), + "x-upload-content-type": BASIC_CONTENT, } assert new_headers == expected_headers @@ -463,24 +455,24 @@ def test__prepare_initiate_request_known_size(self): data, headers = self._prepare_initiate_request_helper(total_bytes=total_bytes) assert len(data) == total_bytes expected_headers = { - u"content-type": u"application/json; charset=UTF-8", - u"x-upload-content-length": u"{:d}".format(total_bytes), - u"x-upload-content-type": BASIC_CONTENT, + "content-type": "application/json; charset=UTF-8", + "x-upload-content-length": "{:d}".format(total_bytes), + "x-upload-content-type": BASIC_CONTENT, } assert headers == expected_headers def test__prepare_initiate_request_unknown_size(self): _, headers = self._prepare_initiate_request_helper(stream_final=False) expected_headers = { - u"content-type": u"application/json; charset=UTF-8", - u"x-upload-content-type": BASIC_CONTENT, + "content-type": "application/json; charset=UTF-8", + "x-upload-content-type": BASIC_CONTENT, } assert headers == expected_headers def test__prepare_initiate_request_already_initiated(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) # Fake that the upload has been started. - upload._resumable_url = u"http://test.invalid?upload_id=definitely-started" + upload._resumable_url = "http://test.invalid?upload_id=definitely-started" with pytest.raises(ValueError): upload._prepare_initiate_request(io.BytesIO(), {}, BASIC_CONTENT) @@ -516,7 +508,7 @@ def test__process_initiate_response(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) _fix_up_virtual(upload) - headers = {u"location": u"http://test.invalid?upload_id=kmfeij3234"} + headers = {"location": "http://test.invalid?upload_id=kmfeij3234"} response = _make_response(headers=headers) # Check resumable_url before. assert upload._resumable_url is None @@ -524,14 +516,14 @@ def test__process_initiate_response(self): ret_val = upload._process_initiate_response(response) assert ret_val is None # Check resumable_url after. - assert upload._resumable_url == headers[u"location"] + assert upload._resumable_url == headers["location"] def test_initiate(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) with pytest.raises(NotImplementedError) as exc_info: upload.initiate(None, None, {}, BASIC_CONTENT) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__prepare_request_already_finished(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -540,7 +532,7 @@ def test__prepare_request_already_finished(self): with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.value.args == (u"Upload has finished.",) + assert exc_info.value.args == ("Upload has finished.",) def test__prepare_request_invalid(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -549,8 +541,8 @@ def test__prepare_request_invalid(self): with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.match(u"invalid state") - assert exc_info.match(u"recover()") + assert exc_info.match("invalid state") + assert exc_info.match("recover()") def test__prepare_request_not_initiated(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -560,21 +552,21 @@ def test__prepare_request_not_initiated(self): with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.match(u"upload has not been initiated") - assert exc_info.match(u"initiate()") + assert exc_info.match("upload has not been initiated") + assert exc_info.match("initiate()") def test__prepare_request_invalid_stream_state(self): stream = io.BytesIO(b"some data here") upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) upload._stream = stream - upload._resumable_url = u"http://test.invalid?upload_id=not-none" + upload._resumable_url = "http://test.invalid?upload_id=not-none" # Make stream.tell() disagree with bytes_uploaded. upload._bytes_uploaded = 5 assert upload.bytes_uploaded != stream.tell() with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.match(u"Bytes stream is in unexpected state.") + assert exc_info.match("Bytes stream is in unexpected state.") @staticmethod def _upload_in_flight(data, headers=None, checksum=None): @@ -584,7 +576,7 @@ def _upload_in_flight(data, headers=None, checksum=None): upload._stream = io.BytesIO(data) upload._content_type = BASIC_CONTENT upload._total_bytes = len(data) - upload._resumable_url = u"http://test.invalid?upload_id=not-none" + upload._resumable_url = "http://test.invalid?upload_id=not-none" return upload def _prepare_request_helper(self, headers=None, checksum=None): @@ -592,7 +584,7 @@ def _prepare_request_helper(self, headers=None, checksum=None): upload = self._upload_in_flight(data, headers=headers, checksum=checksum) method, url, payload, new_headers = upload._prepare_request() # Check the response values. - assert method == u"PUT" + assert method == "PUT" assert url == upload.resumable_url assert payload == data # Make sure headers are **NOT** updated @@ -603,22 +595,22 @@ def _prepare_request_helper(self, headers=None, checksum=None): def test__prepare_request_success(self): headers = self._prepare_request_helper() expected_headers = { - u"content-range": u"bytes 0-32/33", - u"content-type": BASIC_CONTENT, + "content-range": "bytes 0-32/33", + "content-type": BASIC_CONTENT, } assert headers == expected_headers def test__prepare_request_success_with_headers(self): - headers = {u"cannot": u"touch this"} + headers = {"cannot": "touch this"} new_headers = self._prepare_request_helper(headers) assert new_headers is not headers expected_headers = { - u"content-range": u"bytes 0-32/33", - u"content-type": BASIC_CONTENT, + "content-range": "bytes 0-32/33", + "content-type": BASIC_CONTENT, } assert new_headers == expected_headers # Make sure the ``_headers`` are not incorporated. - assert u"cannot" not in new_headers + assert "cannot" not in new_headers @pytest.mark.parametrize("checksum", ["md5", "crc32c"]) def test__prepare_request_with_checksum(self, checksum): @@ -720,7 +712,7 @@ def test__process_response_bad_status(self): # Make sure the upload is valid before the failure. assert not upload.invalid - response = _make_response(status_code=http_client.NOT_FOUND) + response = _make_response(status_code=http.client.NOT_FOUND) with pytest.raises(common.InvalidResponse) as exc_info: upload._process_response(response, None) @@ -728,7 +720,7 @@ def test__process_response_bad_status(self): assert error.response is response assert len(error.args) == 5 assert error.args[1] == response.status_code - assert error.args[3] == http_client.OK + assert error.args[3] == http.client.OK assert error.args[4] == resumable_media.PERMANENT_REDIRECT # Make sure the upload is invalid after the failure. assert upload.invalid @@ -745,11 +737,11 @@ def test__process_response_success(self): # Set the response body. bytes_sent = 158 total_bytes = upload._bytes_uploaded + bytes_sent - response_body = u'{{"size": "{:d}"}}'.format(total_bytes) - response_body = response_body.encode(u"utf-8") + response_body = '{{"size": "{:d}"}}'.format(total_bytes) + response_body = response_body.encode("utf-8") response = mock.Mock( content=response_body, - status_code=http_client.OK, + status_code=http.client.OK, spec=["content", "status_code"], ) ret_val = upload._process_response(response, bytes_sent) @@ -774,7 +766,7 @@ def test__process_response_partial_no_range(self): error = exc_info.value assert error.response is response assert len(error.args) == 2 - assert error.args[1] == u"range" + assert error.args[1] == "range" def test__process_response_partial_bad_range(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -782,7 +774,7 @@ def test__process_response_partial_bad_range(self): # Make sure the upload is valid before the failure. assert not upload.invalid - headers = {u"range": u"nights 1-81"} + headers = {"range": "nights 1-81"} response = _make_response( status_code=resumable_media.PERMANENT_REDIRECT, headers=headers ) @@ -793,7 +785,7 @@ def test__process_response_partial_bad_range(self): error = exc_info.value assert error.response is response assert len(error.args) == 3 - assert error.args[1] == headers[u"range"] + assert error.args[1] == headers["range"] # Make sure the upload is invalid after the failure. assert upload.invalid @@ -803,7 +795,7 @@ def test__process_response_partial(self): # Check status before. assert upload._bytes_uploaded == 0 - headers = {u"range": u"bytes=0-171"} + headers = {"range": "bytes=0-171"} response = _make_response( status_code=resumable_media.PERMANENT_REDIRECT, headers=headers ) @@ -922,7 +914,7 @@ def test__validate_checksum_mismatch(self, checksum): error = exc_info.value assert error.response is response message = error.args[0] - correct_checksums = {"crc32c": u"Qg8thA==", "md5": u"GRvfKbqr5klAOwLkxgIf8w=="} + correct_checksums = {"crc32c": "Qg8thA==", "md5": "GRvfKbqr5klAOwLkxgIf8w=="} metadata_key = _helpers._get_metadata_key(checksum) assert message == _upload._UPLOAD_CHECKSUM_MISMATCH_MESSAGE.format( checksum.upper(), correct_checksums[checksum], metadata[metadata_key] @@ -933,7 +925,7 @@ def test_transmit_next_chunk(self): with pytest.raises(NotImplementedError) as exc_info: upload.transmit_next_chunk(None) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__prepare_recover_request_not_invalid(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -947,27 +939,27 @@ def test__prepare_recover_request(self): upload._invalid = True method, url, payload, headers = upload._prepare_recover_request() - assert method == u"PUT" + assert method == "PUT" assert url == upload.resumable_url assert payload is None - assert headers == {u"content-range": u"bytes */*"} + assert headers == {"content-range": "bytes */*"} # Make sure headers are untouched. assert upload._headers == {} def test__prepare_recover_request_with_headers(self): - headers = {u"lake": u"ocean"} + headers = {"lake": "ocean"} upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB, headers=headers) upload._invalid = True method, url, payload, new_headers = upload._prepare_recover_request() - assert method == u"PUT" + assert method == "PUT" assert url == upload.resumable_url assert payload is None - assert new_headers == {u"content-range": u"bytes */*"} + assert new_headers == {"content-range": "bytes */*"} # Make sure the ``_headers`` are not incorporated. - assert u"lake" not in new_headers + assert "lake" not in new_headers # Make sure headers are untouched. - assert upload._headers == {u"lake": u"ocean"} + assert upload._headers == {"lake": "ocean"} def test__process_recover_response_bad_status(self): upload = _upload.ResumableUpload(RESUMABLE_URL, ONE_MB) @@ -975,7 +967,7 @@ def test__process_recover_response_bad_status(self): upload._invalid = True - response = _make_response(status_code=http_client.BAD_REQUEST) + response = _make_response(status_code=http.client.BAD_REQUEST) with pytest.raises(common.InvalidResponse) as exc_info: upload._process_recover_response(response) @@ -1012,7 +1004,7 @@ def test__process_recover_response_bad_range(self): upload._stream = mock.Mock(spec=["seek"]) upload._bytes_uploaded = mock.sentinel.not_zero - headers = {u"range": u"bites=9-11"} + headers = {"range": "bites=9-11"} response = _make_response( status_code=resumable_media.PERMANENT_REDIRECT, headers=headers ) @@ -1022,7 +1014,7 @@ def test__process_recover_response_bad_range(self): error = exc_info.value assert error.response is response assert len(error.args) == 3 - assert error.args[1] == headers[u"range"] + assert error.args[1] == headers["range"] # Check the state of ``upload`` after (untouched). assert upload.bytes_uploaded is mock.sentinel.not_zero assert upload.invalid @@ -1038,7 +1030,7 @@ def test__process_recover_response_with_range(self): assert upload.bytes_uploaded != 0 end = 11 - headers = {u"range": u"bytes=0-{:d}".format(end)} + headers = {"range": "bytes=0-{:d}".format(end)} response = _make_response( status_code=resumable_media.PERMANENT_REDIRECT, headers=headers ) @@ -1054,10 +1046,10 @@ def test_recover(self): with pytest.raises(NotImplementedError) as exc_info: upload.recover(None) - exc_info.match(u"virtual") + exc_info.match("virtual") -@mock.patch(u"random.randrange", return_value=1234567890123456789) +@mock.patch("random.randrange", return_value=1234567890123456789) def test_get_boundary(mock_rand): result = _upload.get_boundary() assert result == b"===============1234567890123456789==" @@ -1065,11 +1057,11 @@ def test_get_boundary(mock_rand): class Test_construct_multipart_request(object): - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==1==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==1==") def test_binary(self, mock_get_boundary): data = b"By nary day tuh" - metadata = {u"name": u"hi-file.bin"} - content_type = u"application/octet-stream" + metadata = {"name": "hi-file.bin"} + content_type = "application/octet-stream" payload, multipart_boundary = _upload.construct_multipart_request( data, metadata, content_type ) @@ -1087,12 +1079,12 @@ def test_binary(self, mock_get_boundary): assert payload == expected_payload mock_get_boundary.assert_called_once_with() - @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==2==") + @mock.patch("google.resumable_media._upload.get_boundary", return_value=b"==2==") def test_unicode(self, mock_get_boundary): - data_unicode = u"\N{snowman}" + data_unicode = "\N{snowman}" # construct_multipart_request( ASSUMES callers pass bytes. - data = data_unicode.encode(u"utf-8") - metadata = {u"name": u"snowman.txt"} + data = data_unicode.encode("utf-8") + metadata = {"name": "snowman.txt"} content_type = BASIC_CONTENT payload, multipart_boundary = _upload.construct_multipart_request( data, metadata, content_type @@ -1137,7 +1129,7 @@ def test_exhausted_known_size(self): with pytest.raises(ValueError) as exc_info: _upload.get_next_chunk(stream, 1, len(data)) - exc_info.match(u"Stream is already exhausted. There is no content remaining.") + exc_info.match("Stream is already exhausted. There is no content remaining.") def test_exhausted_known_size_zero(self): stream = io.BytesIO(b"") @@ -1148,7 +1140,7 @@ def test_exhausted_known_size_zero_nonempty(self): stream = io.BytesIO(b"not empty WAT!") with pytest.raises(ValueError) as exc_info: _upload.get_next_chunk(stream, 1, 0) - exc_info.match(u"Stream specified as empty, but produced non-empty content.") + exc_info.match("Stream specified as empty, but produced non-empty content.") def test_success_known_size_lt_stream_size(self): data = b"0123456789" @@ -1161,9 +1153,9 @@ def test_success_known_size_lt_stream_size(self): result1 = _upload.get_next_chunk(stream, chunk_size, total_bytes) result2 = _upload.get_next_chunk(stream, chunk_size, total_bytes) - assert result0 == (0, b"012", u"bytes 0-2/8") - assert result1 == (3, b"345", u"bytes 3-5/8") - assert result2 == (6, b"67", u"bytes 6-7/8") + assert result0 == (0, b"012", "bytes 0-2/8") + assert result1 == (3, b"345", "bytes 3-5/8") + assert result2 == (6, b"67", "bytes 6-7/8") def test_success_known_size(self): data = b"0123456789" @@ -1175,10 +1167,10 @@ def test_success_known_size(self): result1 = _upload.get_next_chunk(stream, chunk_size, total_bytes) result2 = _upload.get_next_chunk(stream, chunk_size, total_bytes) result3 = _upload.get_next_chunk(stream, chunk_size, total_bytes) - assert result0 == (0, b"012", u"bytes 0-2/10") - assert result1 == (3, b"345", u"bytes 3-5/10") - assert result2 == (6, b"678", u"bytes 6-8/10") - assert result3 == (9, b"9", u"bytes 9-9/10") + assert result0 == (0, b"012", "bytes 0-2/10") + assert result1 == (3, b"345", "bytes 3-5/10") + assert result2 == (6, b"678", "bytes 6-8/10") + assert result3 == (9, b"9", "bytes 9-9/10") assert stream.tell() == total_bytes def test_success_unknown_size(self): @@ -1188,8 +1180,8 @@ def test_success_unknown_size(self): # Splits into 4 chunks: abcdef, ghij result0 = _upload.get_next_chunk(stream, chunk_size, None) result1 = _upload.get_next_chunk(stream, chunk_size, None) - assert result0 == (0, b"abcdef", u"bytes 0-5/*") - assert result1 == (chunk_size, b"ghij", u"bytes 6-9/10") + assert result0 == (0, b"abcdef", "bytes 0-5/*") + assert result1 == (chunk_size, b"ghij", "bytes 6-9/10") assert stream.tell() == len(data) # Do the same when the chunk size evenly divides len(data) @@ -1198,22 +1190,22 @@ def test_success_unknown_size(self): # Splits into 2 chunks: `data` and empty string result0 = _upload.get_next_chunk(stream, chunk_size, None) result1 = _upload.get_next_chunk(stream, chunk_size, None) - assert result0 == (0, data, u"bytes 0-9/*") - assert result1 == (len(data), b"", u"bytes */10") + assert result0 == (0, data, "bytes 0-9/*") + assert result1 == (len(data), b"", "bytes */10") assert stream.tell() == len(data) class Test_get_content_range(object): def test_known_size(self): result = _upload.get_content_range(5, 10, 40) - assert result == u"bytes 5-10/40" + assert result == "bytes 5-10/40" def test_unknown_size(self): result = _upload.get_content_range(1000, 10000, None) - assert result == u"bytes 1000-10000/*" + assert result == "bytes 1000-10000/*" -def _make_response(status_code=http_client.OK, headers=None, metadata=None): +def _make_response(status_code=http.client.OK, headers=None, metadata=None): headers = headers or {} return mock.Mock( headers=headers, diff --git a/tests/unit/test_common.py b/tests/unit/test_common.py index 1eb572e4..1ee4b34a 100644 --- a/tests/unit/test_common.py +++ b/tests/unit/test_common.py @@ -21,10 +21,10 @@ class TestInvalidResponse(object): def test_constructor(self): response = mock.sentinel.response - error = common.InvalidResponse(response, 1, u"a", [b"m"], True) + error = common.InvalidResponse(response, 1, "a", [b"m"], True) assert error.response is response - assert error.args == (1, u"a", [b"m"], True) + assert error.args == (1, "a", [b"m"], True) class TestRetryStrategy(object): diff --git a/tests_async/system/requests/conftest.py b/tests_async/system/requests/conftest.py index d617311b..f7513d3e 100644 --- a/tests_async/system/requests/conftest.py +++ b/tests_async/system/requests/conftest.py @@ -47,13 +47,13 @@ def _get_authorized_transport(): return tr_requests.AuthorizedSession(credentials) -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") async def authorized_transport(): credentials, project_id = default_async(scopes=(utils.GCS_RW_SCOPE,)) yield _get_authorized_transport() -@pytest.fixture(scope=u"session") +@pytest.fixture(scope="session") async def bucket(): authorized_transport = _get_authorized_transport() await ensure_bucket(authorized_transport) diff --git a/tests_async/system/requests/test_download.py b/tests_async/system/requests/test_download.py index a90b5202..ae9ec215 100644 --- a/tests_async/system/requests/test_download.py +++ b/tests_async/system/requests/test_download.py @@ -15,16 +15,15 @@ import base64 import copy import hashlib +import http.client import io import os +import asyncio from google.auth._default_async import default_async import google.auth.transport._aiohttp_requests as tr_requests -import pytest -from six.moves import http_client - -import asyncio import multidict +import pytest import google._async_resumable_media.requests as resumable_requests from google.resumable_media import _helpers @@ -34,18 +33,18 @@ CURR_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(CURR_DIR, u"..", u"..", u"data") -PLAIN_TEXT = u"text/plain" -IMAGE_JPEG = u"image/jpeg" +DATA_DIR = os.path.join(CURR_DIR, "..", "..", "data") +PLAIN_TEXT = "text/plain" +IMAGE_JPEG = "image/jpeg" ENCRYPTED_ERR = b"The target object is encrypted by a customer-supplied encryption key." -NO_BODY_ERR = u"The content for this response was already consumed" +NO_BODY_ERR = "The content for this response was already consumed" NOT_FOUND_ERR = ( b"No such object: " + utils.BUCKET_NAME.encode("utf-8") + b"/does-not-exist.txt" ) SIMPLE_DOWNLOADS = (resumable_requests.Download, resumable_requests.RawDownload) -@pytest.fixture(scope=u"session") +@pytest.fixture(scope="session") def event_loop(request): """Create an instance of the default event loop for each test session.""" loop = asyncio.get_event_loop_policy().new_event_loop() @@ -70,10 +69,10 @@ class CorruptingAuthorizedSession(tr_requests.AuthorizedSession): constructor. """ - EMPTY_MD5 = base64.b64encode(hashlib.md5(b"").digest()).decode(u"utf-8") + EMPTY_MD5 = base64.b64encode(hashlib.md5(b"").digest()).decode("utf-8") crc32c = _helpers._get_crc32c_object() crc32c.update(b"") - EMPTY_CRC32C = base64.b64encode(crc32c.digest()).decode(u"utf-8") + EMPTY_CRC32C = base64.b64encode(crc32c.digest()).decode("utf-8") async def request(self, method, url, data=None, headers=None, **kwargs): """Implementation of Requests' request.""" @@ -82,7 +81,7 @@ async def request(self, method, url, data=None, headers=None, **kwargs): ) temp = multidict.CIMultiDict(response.headers) - temp[_helpers._HASH_HEADER] = u"crc32c={},md5={}".format( + temp[_helpers._HASH_HEADER] = "crc32c={},md5={}".format( self.EMPTY_CRC32C, self.EMPTY_MD5 ) response._headers = temp @@ -96,11 +95,11 @@ def get_path(filename): ALL_FILES = ( { - u"path": get_path(u"image1.jpg"), - u"content_type": IMAGE_JPEG, - u"md5": u"1bsd83IYNug8hd+V1ING3Q==", - u"crc32c": u"YQGPxA==", - u"slices": ( + "path": get_path("image1.jpg"), + "content_type": IMAGE_JPEG, + "md5": "1bsd83IYNug8hd+V1ING3Q==", + "crc32c": "YQGPxA==", + "slices": ( slice(1024, 16386, None), # obj[1024:16386] slice(None, 8192, None), # obj[:8192] slice(-256, None, None), # obj[-256:] @@ -108,11 +107,11 @@ def get_path(filename): ), }, { - u"path": get_path(u"image2.jpg"), - u"content_type": IMAGE_JPEG, - u"md5": u"gdLXJltiYAMP9WZZFEQI1Q==", - u"crc32c": u"sxxEFQ==", - u"slices": ( + "path": get_path("image2.jpg"), + "content_type": IMAGE_JPEG, + "md5": "gdLXJltiYAMP9WZZFEQI1Q==", + "crc32c": "sxxEFQ==", + "slices": ( slice(1024, 16386, None), # obj[1024:16386] slice(None, 8192, None), # obj[:8192] slice(-256, None, None), # obj[-256:] @@ -120,62 +119,62 @@ def get_path(filename): ), }, { - u"path": get_path(u"file.txt"), - u"content_type": PLAIN_TEXT, - u"md5": u"XHSHAr/SpIeZtZbjgQ4nGw==", - u"crc32c": u"MeMHoQ==", - u"slices": (), + "path": get_path("file.txt"), + "content_type": PLAIN_TEXT, + "md5": "XHSHAr/SpIeZtZbjgQ4nGw==", + "crc32c": "MeMHoQ==", + "slices": (), }, { - u"path": get_path(u"gzipped.txt.gz"), - u"uncompressed": get_path(u"gzipped.txt"), - u"content_type": PLAIN_TEXT, - u"md5": u"KHRs/+ZSrc/FuuR4qz/PZQ==", - u"crc32c": u"/LIRNg==", - u"slices": (), - u"metadata": {u"contentEncoding": u"gzip"}, + "path": get_path("gzipped.txt.gz"), + "uncompressed": get_path("gzipped.txt"), + "content_type": PLAIN_TEXT, + "md5": "KHRs/+ZSrc/FuuR4qz/PZQ==", + "crc32c": "/LIRNg==", + "slices": (), + "metadata": {"contentEncoding": "gzip"}, }, ) def get_contents_for_upload(info): - with open(info[u"path"], u"rb") as file_obj: + with open(info["path"], "rb") as file_obj: return file_obj.read() def get_contents(info): - full_path = info.get(u"uncompressed", info[u"path"]) - with open(full_path, u"rb") as file_obj: + full_path = info.get("uncompressed", info["path"]) + with open(full_path, "rb") as file_obj: return file_obj.read() def get_raw_contents(info): - full_path = info[u"path"] - with open(full_path, u"rb") as file_obj: + full_path = info["path"] + with open(full_path, "rb") as file_obj: return file_obj.read() def get_blob_name(info): - full_path = info.get(u"uncompressed", info[u"path"]) + full_path = info.get("uncompressed", info["path"]) return os.path.basename(full_path) async def delete_blob(transport, blob_name): metadata_url = utils.METADATA_URL_TEMPLATE.format(blob_name=blob_name) response = await transport.request("DELETE", metadata_url) - assert response.status == http_client.NO_CONTENT + assert response.status == http.client.NO_CONTENT -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") async def secret_file(authorized_transport, bucket): - blob_name = u"super-seekrit.txt" + blob_name = "super-seekrit.txt" data = b"Please do not tell anyone my encrypted seekrit." upload_url = utils.SIMPLE_UPLOAD_TEMPLATE.format(blob_name=blob_name) headers = utils.get_encryption_headers() upload = resumable_requests.SimpleUpload(upload_url, headers=headers) response = await upload.transmit(authorized_transport, data, PLAIN_TEXT) - assert response.status == http_client.OK + assert response.status == http.client.OK yield blob_name, data, headers @@ -183,27 +182,27 @@ async def secret_file(authorized_transport, bucket): # Transport that returns corrupt data, so we can exercise checksum handling. -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") async def corrupting_transport(): credentials, _ = default_async(scopes=(utils.GCS_RW_SCOPE,)) yield CorruptingAuthorizedSession(credentials) -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") async def simple_file(authorized_transport, bucket): - blob_name = u"basic-file.txt" + blob_name = "basic-file.txt" upload_url = utils.SIMPLE_UPLOAD_TEMPLATE.format(blob_name=blob_name) upload = resumable_requests.SimpleUpload(upload_url) data = b"Simple contents" response = await upload.transmit(authorized_transport, data, PLAIN_TEXT) - assert response.status == http_client.OK + assert response.status == http.client.OK yield blob_name, data await delete_blob(authorized_transport, blob_name) -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") async def add_files(authorized_transport, bucket): blob_names = [] for info in ALL_FILES: @@ -211,21 +210,21 @@ async def add_files(authorized_transport, bucket): blob_name = get_blob_name(info) blob_names.append(blob_name) - if u"metadata" in info: + if "metadata" in info: upload = resumable_requests.MultipartUpload(utils.MULTIPART_UPLOAD) - metadata = copy.deepcopy(info[u"metadata"]) - metadata[u"name"] = blob_name + metadata = copy.deepcopy(info["metadata"]) + metadata["name"] = blob_name response = await upload.transmit( - authorized_transport, to_upload, metadata, info[u"content_type"] + authorized_transport, to_upload, metadata, info["content_type"] ) else: upload_url = utils.SIMPLE_UPLOAD_TEMPLATE.format(blob_name=blob_name) upload = resumable_requests.SimpleUpload(upload_url) response = await upload.transmit( - authorized_transport, to_upload, info[u"content_type"] + authorized_transport, to_upload, info["content_type"] ) - assert response.status == http_client.OK + assert response.status == http.client.OK yield @@ -239,11 +238,11 @@ async def check_tombstoned(download, transport): if isinstance(download, SIMPLE_DOWNLOADS): with pytest.raises(ValueError) as exc_info: await download.consume(transport) - assert exc_info.match(u"A download can only be used once.") + assert exc_info.match("A download can only be used once.") else: with pytest.raises(ValueError) as exc_info: await download.consume_next_chunk(transport) - assert exc_info.match(u"Download has finished.") + assert exc_info.match("Download has finished.") async def check_error_response(exc_info, status_code, message): @@ -254,8 +253,8 @@ async def check_error_response(exc_info, status_code, message): assert content.startswith(message) assert len(error.args) == 5 assert error.args[1] == status_code - assert error.args[3] == http_client.OK - assert error.args[4] == http_client.PARTIAL_CONTENT + assert error.args[3] == http.client.OK + assert error.args[4] == http.client.PARTIAL_CONTENT class TestDownload(object): @@ -288,7 +287,7 @@ async def test_download_full(self, add_files, authorized_transport, checksum): # Consume the resource. response = await download.consume(authorized_transport) response = tr_requests._CombinedResponse(response) - assert response.status == http_client.OK + assert response.status == http.client.OK content = await self._read_response_content(response) assert content == actual_contents await check_tombstoned(download, authorized_transport) @@ -301,7 +300,7 @@ async def test_extra_headers(self, authorized_transport, secret_file): download = self._make_one(media_url, headers=headers) # Consume the resource. response = await download.consume(authorized_transport) - assert response.status == http_client.OK + assert response.status == http.client.OK content = await response.content.read() assert content == data await check_tombstoned(download, authorized_transport) @@ -313,19 +312,19 @@ async def test_extra_headers(self, authorized_transport, secret_file): with pytest.raises(common.InvalidResponse) as exc_info: await download_wo.consume(authorized_transport) - await check_error_response(exc_info, http_client.BAD_REQUEST, ENCRYPTED_ERR) + await check_error_response(exc_info, http.client.BAD_REQUEST, ENCRYPTED_ERR) await check_tombstoned(download_wo, authorized_transport) @pytest.mark.asyncio async def test_non_existent_file(self, authorized_transport, bucket): - blob_name = u"does-not-exist.txt" + blob_name = "does-not-exist.txt" media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) download = self._make_one(media_url) # Try to consume the resource and fail. with pytest.raises(common.InvalidResponse) as exc_info: await download.consume(authorized_transport) - await check_error_response(exc_info, http_client.NOT_FOUND, NOT_FOUND_ERR) + await check_error_response(exc_info, http.client.NOT_FOUND, NOT_FOUND_ERR) await check_tombstoned(download, authorized_transport) @pytest.mark.asyncio @@ -345,7 +344,7 @@ async def test_bad_range(self, simple_file, authorized_transport): await check_error_response( exc_info, - http_client.REQUESTED_RANGE_NOT_SATISFIABLE, + http.client.REQUESTED_RANGE_NOT_SATISFIABLE, b"Request range not satisfiable", ) await check_tombstoned(download, authorized_transport) @@ -366,10 +365,10 @@ async def test_download_partial(self, add_files, authorized_transport): blob_name = get_blob_name(info) media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) - for slice_ in info[u"slices"]: + for slice_ in info["slices"]: download = self._download_slice(media_url, slice_) response = await download.consume(authorized_transport) - assert response.status == http_client.PARTIAL_CONTENT + assert response.status == http.client.PARTIAL_CONTENT content = await response.content.read() assert content == actual_contents[slice_] with pytest.raises(ValueError): @@ -465,7 +464,7 @@ async def consume_chunks(download, authorized_transport, total_bytes, actual_con next_byte = min(start_byte + download.chunk_size, end_byte + 1) assert download.bytes_downloaded == next_byte - download.start assert download.total_bytes == total_bytes - assert response.status == http_client.PARTIAL_CONTENT + assert response.status == http.client.PARTIAL_CONTENT # NOTE: Due to the consumption of the stream in the respone, the # response object for async requests will be EOF at this point. In # sync versions we could compare the content with the range of @@ -497,7 +496,7 @@ async def test_chunked_download_partial(self, add_files, authorized_transport): blob_name = get_blob_name(info) media_url = utils.DOWNLOAD_URL_TEMPLATE.format(blob_name=blob_name) - for slice_ in info[u"slices"]: + for slice_ in info["slices"]: # Manually replace a missing start with 0. start = 0 if slice_.start is None else slice_.start # Chunked downloads don't support a negative index. @@ -569,7 +568,7 @@ async def test_chunked_with_extra_headers(self, authorized_transport, secret_fil await download_wo.consume_next_chunk(authorized_transport) assert stream_wo.tell() == 0 - await check_error_response(exc_info, http_client.BAD_REQUEST, ENCRYPTED_ERR) + await check_error_response(exc_info, http.client.BAD_REQUEST, ENCRYPTED_ERR) assert download_wo.invalid diff --git a/tests_async/system/requests/test_upload.py b/tests_async/system/requests/test_upload.py index 6c1a7258..c943033e 100644 --- a/tests_async/system/requests/test_upload.py +++ b/tests_async/system/requests/test_upload.py @@ -14,31 +14,29 @@ import base64 import hashlib +import http.client import io import os +import urllib.parse +import asyncio import mock import pytest -from six.moves import http_client -from six.moves import urllib_parse - -import asyncio from google.resumable_media import common from google import _async_resumable_media import google._async_resumable_media.requests as resumable_requests - from google.resumable_media import _helpers from tests.system import utils CURR_DIR = os.path.dirname(os.path.realpath(__file__)) -DATA_DIR = os.path.join(CURR_DIR, u"..", u"..", u"data") -ICO_FILE = os.path.realpath(os.path.join(DATA_DIR, u"favicon.ico")) -IMAGE_FILE = os.path.realpath(os.path.join(DATA_DIR, u"image1.jpg")) -ICO_CONTENT_TYPE = u"image/x-icon" -JPEG_CONTENT_TYPE = u"image/jpeg" -BYTES_CONTENT_TYPE = u"application/octet-stream" +DATA_DIR = os.path.join(CURR_DIR, "..", "..", "data") +ICO_FILE = os.path.realpath(os.path.join(DATA_DIR, "favicon.ico")) +IMAGE_FILE = os.path.realpath(os.path.join(DATA_DIR, "image1.jpg")) +ICO_CONTENT_TYPE = "image/x-icon" +JPEG_CONTENT_TYPE = "image/jpeg" +BYTES_CONTENT_TYPE = "application/octet-stream" BAD_CHUNK_SIZE_MSG = ( b"Invalid request. The number of bytes uploaded is required to be equal " b"or greater than 262144, except for the final request (it's recommended " @@ -47,7 +45,7 @@ ) -@pytest.fixture(scope=u"session") +@pytest.fixture(scope="session") def event_loop(request): """Create an instance of the default event loop for each test session.""" loop = asyncio.get_event_loop_policy().new_event_loop() @@ -67,7 +65,7 @@ async def add_cleanup(blob_name, transport): for blob_name, transport in to_delete: metadata_url = utils.METADATA_URL_TEMPLATE.format(blob_name=blob_name) response = await transport.request("DELETE", metadata_url) - assert response.status == http_client.NO_CONTENT + assert response.status == http.client.NO_CONTENT @pytest.fixture @@ -77,7 +75,7 @@ def img_stream(): This is so that an entire test can execute in the context of the context manager without worrying about closing the file. """ - with open(IMAGE_FILE, u"rb") as file_obj: + with open(IMAGE_FILE, "rb") as file_obj: yield file_obj @@ -87,10 +85,10 @@ def get_md5(data): def get_upload_id(upload_url): - parse_result = urllib_parse.urlparse(upload_url) - parsed_query = urllib_parse.parse_qs(parse_result.query) + parse_result = urllib.parse.urlparse(upload_url) + parsed_query = urllib.parse.parse_qs(parse_result.query) # NOTE: We are unpacking here, so asserting exactly one match. - (upload_id,) = parsed_query[u"upload_id"] + (upload_id,) = parsed_query["upload_id"] return upload_id @@ -109,23 +107,23 @@ async def check_response( metadata=None, content_type=ICO_CONTENT_TYPE, ): - assert response.status == http_client.OK + assert response.status == http.client.OK json_response = await response.json() - assert json_response[u"bucket"] == utils.BUCKET_NAME - assert json_response[u"contentType"] == content_type + assert json_response["bucket"] == utils.BUCKET_NAME + assert json_response["contentType"] == content_type if actual_contents is not None: - md5_hash = json_response[u"md5Hash"].encode(u"ascii") + md5_hash = json_response["md5Hash"].encode("ascii") assert md5_hash == get_md5(actual_contents) total_bytes = len(actual_contents) - assert json_response[u"metageneration"] == u"1" - assert json_response[u"name"] == blob_name - assert json_response[u"size"] == u"{:d}".format(total_bytes) - assert json_response[u"storageClass"] == u"STANDARD" + assert json_response["metageneration"] == "1" + assert json_response["name"] == blob_name + assert json_response["size"] == "{:d}".format(total_bytes) + assert json_response["storageClass"] == "STANDARD" if metadata is None: - assert u"metadata" not in json_response + assert "metadata" not in json_response else: - assert json_response[u"metadata"] == metadata + assert json_response["metadata"] == metadata async def check_content(blob_name, expected_content, transport, headers=None): @@ -133,7 +131,7 @@ async def check_content(blob_name, expected_content, transport, headers=None): download = resumable_requests.Download(media_url, headers=headers) response = await download.consume(transport) content = await response.content.read() - assert response.status == http_client.OK + assert response.status == http.client.OK assert content == expected_content @@ -153,21 +151,21 @@ async def check_does_not_exist(transport, blob_name): metadata_url = utils.METADATA_URL_TEMPLATE.format(blob_name=blob_name) # Make sure we are creating a **new** object. response = await transport.request("GET", metadata_url) - assert response.status == http_client.NOT_FOUND + assert response.status == http.client.NOT_FOUND async def check_initiate(response, upload, stream, transport, metadata): - assert response.status == http_client.OK + assert response.status == http.client.OK content = await response.content.read() assert content == b"" upload_id = get_upload_id(upload.resumable_url) - assert response.headers[u"x-guploader-uploadid"] == upload_id + assert response.headers["x-guploader-uploadid"] == upload_id assert stream.tell() == 0 # Make sure the upload cannot be re-initiated. with pytest.raises(ValueError) as exc_info: await upload.initiate(transport, stream, metadata, JPEG_CONTENT_TYPE) - exc_info.match(u"This upload has already been initiated.") + exc_info.match("This upload has already been initiated.") async def check_bad_chunk(upload, transport): @@ -175,7 +173,7 @@ async def check_bad_chunk(upload, transport): await upload.transmit_next_chunk(transport) error = exc_info.value response = error.response - assert response.status == http_client.BAD_REQUEST + assert response.status == http.client.BAD_REQUEST content = await response.content.read() assert content == BAD_CHUNK_SIZE_MSG @@ -204,7 +202,7 @@ async def transmit_chunks( @pytest.mark.asyncio async def test_simple_upload(authorized_transport, bucket, cleanup): - with open(ICO_FILE, u"rb") as file_obj: + with open(ICO_FILE, "rb") as file_obj: actual_contents = file_obj.read() blob_name = os.path.basename(ICO_FILE) @@ -231,7 +229,7 @@ async def test_simple_upload(authorized_transport, bucket, cleanup): @pytest.mark.asyncio async def test_simple_upload_with_headers(authorized_transport, bucket, cleanup): - blob_name = u"some-stuff.bin" + blob_name = "some-stuff.bin" # Make sure to clean up the uploaded blob when we are done. await cleanup(blob_name, authorized_transport) await check_does_not_exist(authorized_transport, blob_name) @@ -254,7 +252,7 @@ async def test_simple_upload_with_headers(authorized_transport, bucket, cleanup) @pytest.mark.asyncio async def test_multipart_upload(authorized_transport, bucket, cleanup): - with open(ICO_FILE, u"rb") as file_obj: + with open(ICO_FILE, "rb") as file_obj: actual_contents = file_obj.read() blob_name = os.path.basename(ICO_FILE) @@ -266,7 +264,7 @@ async def test_multipart_upload(authorized_transport, bucket, cleanup): upload_url = utils.MULTIPART_UPLOAD upload = resumable_requests.MultipartUpload(upload_url) # Transmit the resource. - metadata = {u"name": blob_name, u"metadata": {u"color": u"yellow"}} + metadata = {"name": blob_name, "metadata": {"color": "yellow"}} response = await upload.transmit( authorized_transport, actual_contents, metadata, ICO_CONTENT_TYPE ) @@ -274,7 +272,7 @@ async def test_multipart_upload(authorized_transport, bucket, cleanup): response, blob_name, actual_contents=actual_contents, - metadata=metadata[u"metadata"], + metadata=metadata["metadata"], ) # Download the content to make sure it's "working as expected". await check_content(blob_name, actual_contents, authorized_transport) @@ -284,12 +282,12 @@ async def test_multipart_upload(authorized_transport, bucket, cleanup): ) -@pytest.mark.parametrize("checksum", [u"md5", u"crc32c"]) +@pytest.mark.parametrize("checksum", ["md5", "crc32c"]) @pytest.mark.asyncio async def test_multipart_upload_with_bad_checksum( authorized_transport, checksum, bucket ): - with open(ICO_FILE, u"rb") as file_obj: + with open(ICO_FILE, "rb") as file_obj: actual_contents = file_obj.read() blob_name = os.path.basename(ICO_FILE) @@ -299,7 +297,7 @@ async def test_multipart_upload_with_bad_checksum( upload_url = utils.MULTIPART_UPLOAD upload = resumable_requests.MultipartUpload(upload_url, checksum=checksum) # Transmit the resource. - metadata = {u"name": blob_name, u"metadata": {u"color": u"yellow"}} + metadata = {"name": blob_name, "metadata": {"color": "yellow"}} fake_checksum_object = _helpers._get_checksum_object(checksum) fake_checksum_object.update(b"bad data") fake_prepared_checksum_digest = _helpers.prepare_checksum_digest( @@ -326,7 +324,7 @@ async def test_multipart_upload_with_bad_checksum( @pytest.mark.asyncio async def test_multipart_upload_with_headers(authorized_transport, bucket, cleanup): - blob_name = u"some-multipart-stuff.bin" + blob_name = "some-multipart-stuff.bin" # Make sure to clean up the uploaded blob when we are done. await cleanup(blob_name, authorized_transport) await check_does_not_exist(authorized_transport, blob_name) @@ -336,7 +334,7 @@ async def test_multipart_upload_with_headers(authorized_transport, bucket, clean headers = utils.get_encryption_headers() upload = resumable_requests.MultipartUpload(upload_url, headers=headers) # Transmit the resource. - metadata = {u"name": blob_name} + metadata = {"name": blob_name} data = b"Other binary contents\x03\x04\x05." response = await upload.transmit( authorized_transport, data, metadata, BYTES_CONTENT_TYPE @@ -365,7 +363,7 @@ async def _resumable_upload_helper( utils.RESUMABLE_UPLOAD, chunk_size, headers=headers, checksum=checksum ) # Initiate the upload. - metadata = {u"name": blob_name, u"metadata": {u"direction": u"north"}} + metadata = {"name": blob_name, "metadata": {"direction": "north"}} response = await upload.initiate( authorized_transport, stream, metadata, JPEG_CONTENT_TYPE ) @@ -373,7 +371,7 @@ async def _resumable_upload_helper( await check_initiate(response, upload, stream, authorized_transport, metadata) # Actually upload the file in chunks. num_chunks = await transmit_chunks( - upload, authorized_transport, blob_name, metadata[u"metadata"] + upload, authorized_transport, blob_name, metadata["metadata"] ) assert num_chunks == get_num_chunks(upload.total_bytes, chunk_size) # Download the content to make sure it's "working as expected". @@ -401,7 +399,7 @@ async def test_resumable_upload_with_headers( ) -@pytest.mark.parametrize("checksum", [u"md5", u"crc32c"]) +@pytest.mark.parametrize("checksum", ["md5", "crc32c"]) @pytest.mark.asyncio async def test_resumable_upload_with_bad_checksum( authorized_transport, img_stream, bucket, cleanup, checksum @@ -441,7 +439,7 @@ async def test_resumable_upload_bad_chunk_size(authorized_transport, img_stream) upload._chunk_size = 1024 assert upload._chunk_size < _async_resumable_media.UPLOAD_CHUNK_SIZE # Initiate the upload. - metadata = {u"name": blob_name} + metadata = {"name": blob_name} response = await upload.initiate( authorized_transport, img_stream, metadata, JPEG_CONTENT_TYPE ) @@ -473,7 +471,7 @@ async def sabotage_and_recover(upload, stream, transport, chunk_size): async def _resumable_upload_recover_helper(authorized_transport, cleanup, headers=None): - blob_name = u"some-bytes.bin" + blob_name = "some-bytes.bin" chunk_size = _async_resumable_media.UPLOAD_CHUNK_SIZE data = b"123" * chunk_size # 3 chunks worth. # Make sure to clean up the uploaded blob when we are done. @@ -484,7 +482,7 @@ async def _resumable_upload_recover_helper(authorized_transport, cleanup, header utils.RESUMABLE_UPLOAD, chunk_size, headers=headers ) # Initiate the upload. - metadata = {u"name": blob_name} + metadata = {"name": blob_name} stream = io.BytesIO(data) response = await upload.initiate( authorized_transport, stream, metadata, BYTES_CONTENT_TYPE @@ -535,16 +533,16 @@ class TestResumableUploadUnknownSize(object): def _check_range_sent(response, start, end, total): headers_sent = response.request_info.headers if start is None and end is None: - expected_content_range = u"bytes */{:d}".format(total) + expected_content_range = "bytes */{:d}".format(total) else: # Allow total to be an int or a string "*" - expected_content_range = u"bytes {:d}-{:d}/{}".format(start, end, total) + expected_content_range = "bytes {:d}-{:d}/{}".format(start, end, total) - assert headers_sent[u"content-range"] == expected_content_range + assert headers_sent["content-range"] == expected_content_range @staticmethod def _check_range_received(response, size): - assert response.headers[u"range"] == u"bytes=0-{:d}".format(size - 1) + assert response.headers["range"] == "bytes=0-{:d}".format(size - 1) async def _check_partial(self, upload, response, chunk_size, num_chunks): start_byte = (num_chunks - 1) * chunk_size @@ -556,7 +554,7 @@ async def _check_partial(self, upload, response, chunk_size, num_chunks): content = await response.content.read() assert content == b"" - self._check_range_sent(response, start_byte, end_byte, u"*") + self._check_range_sent(response, start_byte, end_byte, "*") self._check_range_received(response, end_byte + 1) @pytest.mark.asyncio @@ -572,8 +570,8 @@ async def test_smaller_than_chunk_size(self, authorized_transport, bucket, clean # Create the actual upload object. upload = resumable_requests.ResumableUpload(utils.RESUMABLE_UPLOAD, chunk_size) # Initiate the upload. - metadata = {u"name": blob_name} - with open(ICO_FILE, u"rb") as stream: + metadata = {"name": blob_name} + with open(ICO_FILE, "rb") as stream: response = await upload.initiate( authorized_transport, stream, @@ -600,7 +598,7 @@ async def test_smaller_than_chunk_size(self, authorized_transport, bucket, clean @pytest.mark.asyncio async def test_finish_at_chunk(self, authorized_transport, bucket, cleanup): - blob_name = u"some-clean-stuff.bin" + blob_name = "some-clean-stuff.bin" chunk_size = _async_resumable_media.UPLOAD_CHUNK_SIZE # Make sure to clean up the uploaded blob when we are done. await cleanup(blob_name, authorized_transport) @@ -612,7 +610,7 @@ async def test_finish_at_chunk(self, authorized_transport, bucket, cleanup): # Create the actual upload object. upload = resumable_requests.ResumableUpload(utils.RESUMABLE_UPLOAD, chunk_size) # Initiate the upload. - metadata = {u"name": blob_name} + metadata = {"name": blob_name} response = await upload.initiate( authorized_transport, stream, @@ -653,7 +651,7 @@ def _add_bytes(stream, data): @pytest.mark.asyncio async def test_interleave_writes(self, authorized_transport, bucket, cleanup): - blob_name = u"some-moar-stuff.bin" + blob_name = "some-moar-stuff.bin" chunk_size = _async_resumable_media.UPLOAD_CHUNK_SIZE # Make sure to clean up the uploaded blob when we are done. await cleanup(blob_name, authorized_transport) @@ -663,7 +661,7 @@ async def test_interleave_writes(self, authorized_transport, bucket, cleanup): # Create the actual upload object. upload = resumable_requests.ResumableUpload(utils.RESUMABLE_UPLOAD, chunk_size) # Initiate the upload. - metadata = {u"name": blob_name} + metadata = {"name": blob_name} response = await upload.initiate( authorized_transport, stream, diff --git a/tests_async/system/utils.py b/tests_async/system/utils.py index 25ee5378..620b2c99 100644 --- a/tests_async/system/utils.py +++ b/tests_async/system/utils.py @@ -17,26 +17,26 @@ import time -BUCKET_NAME = u"grpm-systest-{}".format(int(1000 * time.time())) -BUCKET_POST_URL = u"https://www.googleapis.com/storage/v1/b/" -BUCKET_URL = u"https://www.googleapis.com/storage/v1/b/{}".format(BUCKET_NAME) +BUCKET_NAME = "grpm-systest-{}".format(int(1000 * time.time())) +BUCKET_POST_URL = "https://www.googleapis.com/storage/v1/b/" +BUCKET_URL = "https://www.googleapis.com/storage/v1/b/{}".format(BUCKET_NAME) -_DOWNLOAD_BASE = u"https://www.googleapis.com/download/storage/v1/b/{}".format( +_DOWNLOAD_BASE = "https://www.googleapis.com/download/storage/v1/b/{}".format( BUCKET_NAME ) -DOWNLOAD_URL_TEMPLATE = _DOWNLOAD_BASE + u"/o/{blob_name}?alt=media" +DOWNLOAD_URL_TEMPLATE = _DOWNLOAD_BASE + "/o/{blob_name}?alt=media" _UPLOAD_BASE = ( - u"https://www.googleapis.com/upload/storage/v1/b/{}".format(BUCKET_NAME) - + u"/o?uploadType=" + "https://www.googleapis.com/upload/storage/v1/b/{}".format(BUCKET_NAME) + + "/o?uploadType=" ) -SIMPLE_UPLOAD_TEMPLATE = _UPLOAD_BASE + u"media&name={blob_name}" -MULTIPART_UPLOAD = _UPLOAD_BASE + u"multipart" -RESUMABLE_UPLOAD = _UPLOAD_BASE + u"resumable" +SIMPLE_UPLOAD_TEMPLATE = _UPLOAD_BASE + "media&name={blob_name}" +MULTIPART_UPLOAD = _UPLOAD_BASE + "multipart" +RESUMABLE_UPLOAD = _UPLOAD_BASE + "resumable" -METADATA_URL_TEMPLATE = BUCKET_URL + u"/o/{blob_name}" +METADATA_URL_TEMPLATE = BUCKET_URL + "/o/{blob_name}" -GCS_RW_SCOPE = u"https://www.googleapis.com/auth/devstorage.read_write" +GCS_RW_SCOPE = "https://www.googleapis.com/auth/devstorage.read_write" # Generated using random.choice() with all 256 byte choices. ENCRYPTION_KEY = ( b"R\xb8\x1b\x94T\xea_\xa8\x93\xae\xd1\xf6\xfca\x15\x0ekA" @@ -63,7 +63,7 @@ def get_encryption_headers(key=ENCRYPTION_KEY): key_b64 = base64.b64encode(key) return { - u"x-goog-encryption-algorithm": u"AES256", - u"x-goog-encryption-key": key_b64.decode(u"utf-8"), - u"x-goog-encryption-key-sha256": key_hash_b64.decode(u"utf-8"), + "x-goog-encryption-algorithm": "AES256", + "x-goog-encryption-key": key_b64.decode("utf-8"), + "x-goog-encryption-key-sha256": key_hash_b64.decode("utf-8"), } diff --git a/tests_async/unit/requests/test__helpers.py b/tests_async/unit/requests/test__helpers.py index 4e79e4c5..6e412b12 100644 --- a/tests_async/unit/requests/test__helpers.py +++ b/tests_async/unit/requests/test__helpers.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import aiohttp +import http.client import io + +import aiohttp import mock -from six.moves import http_client import pytest from google._async_resumable_media.requests import _request_helpers as _helpers @@ -26,12 +27,12 @@ class TestRequestsMixin(object): def test__get_status_code(self): - status_code = int(http_client.OK) + status_code = int(http.client.OK) response = _make_response(status_code) assert status_code == _helpers.RequestsMixin._get_status_code(response) def test__get_headers(self): - headers = {u"fruit": u"apple"} + headers = {"fruit": "apple"} response = mock.Mock( _headers=headers, headers=headers, spec=["_headers", "headers"] ) @@ -69,11 +70,11 @@ async def test__get_body(self): @pytest.mark.asyncio async def test_http_request(): - transport, response = _make_transport(http_client.OK) - method = u"POST" - url = u"http://test.invalid" + transport, response = _make_transport(http.client.OK) + method = "POST" + url = "http://test.invalid" data = mock.sentinel.data - headers = {u"one": u"fish", u"blue": u"fish"} + headers = {"one": "fish", "blue": "fish"} timeout = mock.sentinel.timeout ret_val = await _helpers.http_request( transport, @@ -100,9 +101,9 @@ async def test_http_request(): @pytest.mark.asyncio async def test_http_request_defaults(): - transport, response = _make_transport(http_client.OK) - method = u"POST" - url = u"http://test.invalid" + transport, response = _make_transport(http.client.OK) + method = "POST" + url = "http://test.invalid" ret_val = await _helpers.http_request(transport, method, url) assert ret_val is response diff --git a/tests_async/unit/requests/test_download.py b/tests_async/unit/requests/test_download.py index b1c85fdd..60c3da5e 100644 --- a/tests_async/unit/requests/test_download.py +++ b/tests_async/unit/requests/test_download.py @@ -12,12 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import http.client import io import aiohttp import mock import pytest -from six.moves import http_client from google.resumable_media import common @@ -55,7 +55,7 @@ async def test__write_to_stream_with_hash_check_success(self, checksum): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - header_value = u"crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w==" + header_value = "crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w==" headers = {_helpers._HASH_HEADER: header_value} response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -75,8 +75,8 @@ async def test__write_to_stream_with_hash_check_fail(self, checksum): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "d3JvbmcgbiBtYWRlIHVwIQ==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -88,10 +88,10 @@ async def test__write_to_stream_with_hash_check_fail(self, checksum): error = exc_info.value assert error.response is response assert len(error.args) == 1 - if checksum == u"md5": - good_checksum = u"fPAJHnnoi/+NadyNxT2c2w==" + if checksum == "md5": + good_checksum = "fPAJHnnoi/+NadyNxT2c2w==" else: - good_checksum = u"qmNCyg==" + good_checksum = "qmNCyg==" msg = download_mod._CHECKSUM_MISMATCH.format( sync_test.EXAMPLE_URL, bad_checksum, @@ -112,8 +112,8 @@ async def test__write_to_stream_with_invalid_checksum_type(self): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "d3JvbmcgbiBtYWRlIHVwIQ==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -153,20 +153,20 @@ async def _consume_helper( assert ret_val is transport.request.return_value called_kwargs = { - u"data": None, - u"headers": download._headers, - u"timeout": EXPECTED_TIMEOUT if timeout is None else timeout, + "data": None, + "headers": download._headers, + "timeout": EXPECTED_TIMEOUT if timeout is None else timeout, } if chunks: assert stream is not None - called_kwargs[u"stream"] = True + called_kwargs["stream"] = True transport.request.assert_called_once_with( - u"GET", sync_test.EXAMPLE_URL, **called_kwargs + "GET", sync_test.EXAMPLE_URL, **called_kwargs ) - range_bytes = u"bytes={:d}-{:d}".format(0, end) - assert download._headers[u"range"] == range_bytes + range_bytes = "bytes={:d}-{:d}".format(0, end) + assert download._headers["range"] == range_bytes assert download.finished return transport @@ -194,7 +194,7 @@ async def test_consume_with_stream(self, checksum): async def test_consume_with_stream_hash_check_success(self, checksum): stream = io.BytesIO() chunks = (b"up down ", b"charlie ", b"brown") - header_value = u"crc32c=UNIQxg==,md5=JvS1wjMvfbCXgEGeaJJLDQ==" + header_value = "crc32c=UNIQxg==,md5=JvS1wjMvfbCXgEGeaJJLDQ==" headers = {_helpers._HASH_HEADER: header_value} await self._consume_helper( stream=stream, chunks=chunks, response_headers=headers, checksum=checksum @@ -211,8 +211,8 @@ async def test_consume_with_stream_hash_check_fail(self, checksum): ) chunks = (b"zero zero", b"niner tango") - bad_checksum = u"anVzdCBub3QgdGhpcyAxLA==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "anVzdCBub3QgdGhpcyAxLA==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} transport = mock.AsyncMock(spec=["request"]) @@ -230,10 +230,10 @@ async def test_consume_with_stream_hash_check_fail(self, checksum): error = exc_info.value assert error.response is transport.request.return_value assert len(error.args) == 1 - if checksum == u"md5": - good_checksum = u"1A/dxEpys717C6FH7FIWDw==" + if checksum == "md5": + good_checksum = "1A/dxEpys717C6FH7FIWDw==" else: - good_checksum = u"GvNZlg==" + good_checksum = "GvNZlg==" msg = download_mod._CHECKSUM_MISMATCH.format( sync_test.EXAMPLE_URL, bad_checksum, @@ -244,7 +244,7 @@ async def test_consume_with_stream_hash_check_fail(self, checksum): # Check mocks. transport.request.assert_called_once_with( - u"GET", + "GET", sync_test.EXAMPLE_URL, data=None, headers={}, @@ -257,9 +257,9 @@ async def test_consume_with_headers(self): headers = {} # Empty headers end = 16383 await self._consume_helper(end=end, headers=headers) - range_bytes = u"bytes={:d}-{:d}".format(0, end) + range_bytes = "bytes={:d}-{:d}".format(0, end) # Make sure the headers have been modified. - assert headers == {u"range": range_bytes} + assert headers == {"range": range_bytes} class TestRawDownload(object): @@ -287,7 +287,7 @@ async def test__write_to_stream_with_hash_check_success(self, checksum): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - header_value = u"crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w==" + header_value = "crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w==" headers = {_helpers._HASH_HEADER: header_value} response = _mock_raw_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -307,8 +307,8 @@ async def test__write_to_stream_with_hash_check_fail(self, checksum): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "d3JvbmcgbiBtYWRlIHVwIQ==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} response = _mock_raw_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -320,10 +320,10 @@ async def test__write_to_stream_with_hash_check_fail(self, checksum): error = exc_info.value assert error.response is response assert len(error.args) == 1 - if checksum == u"md5": - good_checksum = u"fPAJHnnoi/+NadyNxT2c2w==" + if checksum == "md5": + good_checksum = "fPAJHnnoi/+NadyNxT2c2w==" else: - good_checksum = u"qmNCyg==" + good_checksum = "qmNCyg==" msg = download_mod._CHECKSUM_MISMATCH.format( sync_test.EXAMPLE_URL, bad_checksum, @@ -344,8 +344,8 @@ async def test__write_to_stream_with_invalid_checksum_type(self): chunk1 = b"first chunk, count starting at 0. " chunk2 = b"second chunk, or chunk 1, which is better? " chunk3 = b"ordinals and numerals and stuff." - bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "d3JvbmcgbiBtYWRlIHVwIQ==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers) @@ -382,15 +382,15 @@ async def _consume_helper( if chunks: assert stream is not None transport.request.assert_called_once_with( - u"GET", + "GET", sync_test.EXAMPLE_URL, data=None, headers=download._headers, timeout=EXPECTED_TIMEOUT, ) - range_bytes = u"bytes={:d}-{:d}".format(0, end) - assert download._headers[u"range"] == range_bytes + range_bytes = "bytes={:d}-{:d}".format(0, end) + assert download._headers["range"] == range_bytes assert download.finished return transport @@ -413,7 +413,7 @@ async def test_consume_with_stream(self, checksum): async def test_consume_with_stream_hash_check_success(self, checksum): stream = io.BytesIO() chunks = (b"up down ", b"charlie ", b"brown") - header_value = u"crc32c=UNIQxg==,md5=JvS1wjMvfbCXgEGeaJJLDQ==" + header_value = "crc32c=UNIQxg==,md5=JvS1wjMvfbCXgEGeaJJLDQ==" headers = {_helpers._HASH_HEADER: header_value} await self._consume_helper( @@ -431,8 +431,8 @@ async def test_consume_with_stream_hash_check_fail(self, checksum): ) chunks = (b"zero zero", b"niner tango") - bad_checksum = u"anVzdCBub3QgdGhpcyAxLA==" - header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum) + bad_checksum = "anVzdCBub3QgdGhpcyAxLA==" + header_value = "crc32c={bad},md5={bad}".format(bad=bad_checksum) headers = {_helpers._HASH_HEADER: header_value} transport = mock.AsyncMock(spec=["request"]) mockResponse = _mock_raw_response(chunks=chunks, headers=headers) @@ -449,10 +449,10 @@ async def test_consume_with_stream_hash_check_fail(self, checksum): error = exc_info.value assert error.response is transport.request.return_value assert len(error.args) == 1 - if checksum == u"md5": - good_checksum = u"1A/dxEpys717C6FH7FIWDw==" + if checksum == "md5": + good_checksum = "1A/dxEpys717C6FH7FIWDw==" else: - good_checksum = u"GvNZlg==" + good_checksum = "GvNZlg==" msg = download_mod._CHECKSUM_MISMATCH.format( sync_test.EXAMPLE_URL, bad_checksum, @@ -463,7 +463,7 @@ async def test_consume_with_stream_hash_check_fail(self, checksum): # Check mocks. transport.request.assert_called_once_with( - u"GET", + "GET", sync_test.EXAMPLE_URL, data=None, headers={}, @@ -475,22 +475,22 @@ async def test_consume_with_headers(self): headers = {} # Empty headers end = 16383 await self._consume_helper(end=end, headers=headers) - range_bytes = u"bytes={:d}-{:d}".format(0, end) + range_bytes = "bytes={:d}-{:d}".format(0, end) # Make sure the headers have been modified. - assert headers == {u"range": range_bytes} + assert headers == {"range": range_bytes} class TestChunkedDownload(object): @staticmethod def _response_content_range(start_byte, end_byte, total_bytes): - return u"bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) + return "bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) def _response_headers(self, start_byte, end_byte, total_bytes): content_length = end_byte - start_byte + 1 resp_range = self._response_content_range(start_byte, end_byte, total_bytes) return { - u"content-length": u"{:d}".format(content_length), - u"content-range": resp_range, + "content-length": "{:d}".format(content_length), + "content-range": resp_range, } def _mock_response( @@ -522,7 +522,7 @@ def _mock_transport(self, start, chunk_size, total_bytes, content=b""): start + chunk_size - 1, total_bytes, content=content, - status_code=int(http_client.OK), + status_code=int(http.client.OK), ) transport.request = mock.AsyncMock(spec=["__call__"], return_value=mockResponse) @@ -547,10 +547,10 @@ async def test_consume_next_chunk(self): # Actually consume the chunk and check the output. ret_val = await download.consume_next_chunk(transport) assert ret_val is transport.request.return_value - range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1) - download_headers = {u"range": range_bytes} + range_bytes = "bytes={:d}-{:d}".format(start, start + chunk_size - 1) + download_headers = {"range": range_bytes} transport.request.assert_called_once_with( - u"GET", + "GET", sync_test.EXAMPLE_URL, data=None, headers=download_headers, @@ -577,10 +577,10 @@ async def test_consume_next_chunk_with_custom_timeout(self): # Actually consume the chunk and check the output. await download.consume_next_chunk(transport, timeout=14.7) - range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1) - download_headers = {u"range": range_bytes} + range_bytes = "bytes={:d}-{:d}".format(start, start + chunk_size - 1) + download_headers = {"range": range_bytes} transport.request.assert_called_once_with( - u"GET", + "GET", sync_test.EXAMPLE_URL, data=None, headers=download_headers, @@ -591,14 +591,14 @@ async def test_consume_next_chunk_with_custom_timeout(self): class TestRawChunkedDownload(object): @staticmethod def _response_content_range(start_byte, end_byte, total_bytes): - return u"bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) + return "bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) def _response_headers(self, start_byte, end_byte, total_bytes): content_length = end_byte - start_byte + 1 resp_range = self._response_content_range(start_byte, end_byte, total_bytes) return { - u"content-length": u"{:d}".format(content_length), - u"content-range": resp_range, + "content-length": "{:d}".format(content_length), + "content-range": resp_range, } def _mock_response( @@ -630,7 +630,7 @@ def _mock_transport(self, start, chunk_size, total_bytes, content=b""): start + chunk_size - 1, total_bytes, content=content, - status_code=int(http_client.OK), + status_code=int(http.client.OK), ) transport.request = mock.AsyncMock(spec=["__call__"], return_value=mockResponse) @@ -655,10 +655,10 @@ async def test_consume_next_chunk(self): # Actually consume the chunk and check the output. ret_val = await download.consume_next_chunk(transport) assert ret_val is transport.request.return_value - range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1) - download_headers = {u"range": range_bytes} + range_bytes = "bytes={:d}-{:d}".format(start, start + chunk_size - 1) + download_headers = {"range": range_bytes} transport.request.assert_called_once_with( - u"GET", + "GET", sync_test.EXAMPLE_URL, data=None, headers=download_headers, @@ -686,10 +686,10 @@ async def test_consume_next_chunk_with_custom_timeout(self): # Actually consume the chunk and check the output. await download.consume_next_chunk(transport, timeout=14.7) - range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1) - download_headers = {u"range": range_bytes} + range_bytes = "bytes={:d}-{:d}".format(start, start + chunk_size - 1) + download_headers = {"range": range_bytes} transport.request.assert_called_once_with( - u"GET", + "GET", sync_test.EXAMPLE_URL, data=None, headers=download_headers, @@ -712,7 +712,7 @@ def test_non_gzipped(self): assert md5_hash is mock.sentinel.md5_hash def test_gzipped(self): - headers = {u"content-encoding": u"gzip"} + headers = {"content-encoding": "gzip"} response_raw = mock.AsyncMock(headers=headers, spec=["headers", "_decoder"]) md5_hash = download_mod._add_decoder(response_raw, mock.sentinel.md5_hash) @@ -748,7 +748,7 @@ async def __aiter__(self): yield item -def _mock_response(status=http_client.OK, chunks=(), headers=None): +def _mock_response(status=http.client.OK, chunks=(), headers=None): if headers is None: headers = {} @@ -765,14 +765,14 @@ def _mock_response(status=http_client.OK, chunks=(), headers=None): raw=mock_raw, content=stream_content, spec=[ - u"__aenter__", - u"__aexit__", - u"_headers", - u"iter_chunked", - u"status", - u"headers", - u"raw", - u"content", + "__aenter__", + "__aexit__", + "_headers", + "iter_chunked", + "status", + "headers", + "raw", + "content", ], ) # i.e. context manager returns ``self``. @@ -788,7 +788,7 @@ def _mock_response(status=http_client.OK, chunks=(), headers=None): ) -def _mock_raw_response(status_code=http_client.OK, chunks=(), headers=None): +def _mock_raw_response(status_code=http.client.OK, chunks=(), headers=None): if headers is None: headers = {} chunklist = b"".join(chunks) @@ -803,14 +803,14 @@ def _mock_raw_response(status_code=http_client.OK, chunks=(), headers=None): raw=mock_raw, content=stream_content, spec=[ - u"__aenter__", - u"__aexit__", - u"_headers", - u"iter_chunked", - u"status", - u"headers", - u"raw", - u"content", + "__aenter__", + "__aexit__", + "_headers", + "iter_chunked", + "status", + "headers", + "raw", + "content", ], ) # i.e. context manager returns ``self``. diff --git a/tests_async/unit/requests/test_upload.py b/tests_async/unit/requests/test_upload.py index 75d8cefe..bb7a6679 100644 --- a/tests_async/unit/requests/test_upload.py +++ b/tests_async/unit/requests/test_upload.py @@ -53,9 +53,9 @@ async def test_transmit(self): assert ret_val is transport.request.return_value - upload_headers = {u"content-type": content_type} + upload_headers = {"content-type": content_type} transport.request.assert_called_once_with( - u"POST", + "POST", SIMPLE_URL, data=data, headers=upload_headers, @@ -81,9 +81,9 @@ async def test_transmit_w_custom_timeout(self): assert ret_val is transport.request.return_value - upload_headers = {u"content-type": content_type} + upload_headers = {"content-type": content_type} transport.request.assert_called_once_with( - u"POST", + "POST", SIMPLE_URL, data=data, headers=upload_headers, @@ -95,12 +95,12 @@ async def test_transmit_w_custom_timeout(self): class TestMultipartUpload(object): @mock.patch( - u"google._async_resumable_media._upload.get_boundary", return_value=b"==4==" + "google._async_resumable_media._upload.get_boundary", return_value=b"==4==" ) @pytest.mark.asyncio async def test_transmit(self, mock_get_boundary): data = b"Mock data here and there." - metadata = {u"Hey": u"You", u"Guys": u"90909"} + metadata = {"Hey": "You", "Guys": "90909"} content_type = BASIC_CONTENT upload = upload_mod.MultipartUpload(MULTIPART_URL) @@ -119,7 +119,7 @@ async def test_transmit(self, mock_get_boundary): b"--==4==\r\n" + JSON_TYPE_LINE + b"\r\n" - + json.dumps(metadata).encode(u"utf-8") + + json.dumps(metadata).encode("utf-8") + b"\r\n" + b"--==4==\r\n" b"content-type: text/plain\r\n" @@ -128,9 +128,9 @@ async def test_transmit(self, mock_get_boundary): b"--==4==--" ) multipart_type = b'multipart/related; boundary="==4=="' - upload_headers = {u"content-type": multipart_type} + upload_headers = {"content-type": multipart_type} transport.request.assert_called_once_with( - u"POST", + "POST", MULTIPART_URL, data=expected_payload, headers=upload_headers, @@ -140,12 +140,12 @@ async def test_transmit(self, mock_get_boundary): mock_get_boundary.assert_called_once_with() @mock.patch( - u"google._async_resumable_media._upload.get_boundary", return_value=b"==4==" + "google._async_resumable_media._upload.get_boundary", return_value=b"==4==" ) @pytest.mark.asyncio async def test_transmit_w_custom_timeout(self, mock_get_boundary): data = b"Mock data here and there." - metadata = {u"Hey": u"You", u"Guys": u"90909"} + metadata = {"Hey": "You", "Guys": "90909"} content_type = BASIC_CONTENT upload = upload_mod.MultipartUpload(MULTIPART_URL) @@ -160,7 +160,7 @@ async def test_transmit_w_custom_timeout(self, mock_get_boundary): b"--==4==\r\n" + JSON_TYPE_LINE + b"\r\n" - + json.dumps(metadata).encode(u"utf-8") + + json.dumps(metadata).encode("utf-8") + b"\r\n" + b"--==4==\r\n" b"content-type: text/plain\r\n" @@ -169,10 +169,10 @@ async def test_transmit_w_custom_timeout(self, mock_get_boundary): b"--==4==--" ) multipart_type = b'multipart/related; boundary="==4=="' - upload_headers = {u"content-type": multipart_type} + upload_headers = {"content-type": multipart_type} transport.request.assert_called_once_with( - u"POST", + "POST", MULTIPART_URL, data=expected_payload, headers=upload_headers, @@ -188,11 +188,11 @@ async def test_initiate(self): upload = upload_mod.ResumableUpload(RESUMABLE_URL, ONE_MB) data = b"Knock knock who is there" stream = io.BytesIO(data) - metadata = {u"name": u"got-jokes.txt"} + metadata = {"name": "got-jokes.txt"} transport = mock.AsyncMock(spec=["request"]) - location = (u"http://test.invalid?upload_id=AACODBBBxuw9u3AA",) - response_headers = {u"location": location} + location = ("http://test.invalid?upload_id=AACODBBBxuw9u3AA",) + response_headers = {"location": location} transport.request = mock.AsyncMock( spec=["__call__"], return_value=_make_response(headers=response_headers) ) @@ -216,12 +216,12 @@ async def test_initiate(self): # Make sure the mock was called as expected. json_bytes = b'{"name": "got-jokes.txt"}' expected_headers = { - u"content-type": JSON_TYPE, - u"x-upload-content-type": BASIC_CONTENT, - u"x-upload-content-length": u"{:d}".format(total_bytes), + "content-type": JSON_TYPE, + "x-upload-content-type": BASIC_CONTENT, + "x-upload-content-length": "{:d}".format(total_bytes), } transport.request.assert_called_once_with( - u"POST", + "POST", RESUMABLE_URL, data=json_bytes, headers=expected_headers, @@ -233,11 +233,11 @@ async def test_initiate_w_custom_timeout(self): upload = upload_mod.ResumableUpload(RESUMABLE_URL, ONE_MB) data = b"Knock knock who is there" stream = io.BytesIO(data) - metadata = {u"name": u"got-jokes.txt"} + metadata = {"name": "got-jokes.txt"} transport = mock.AsyncMock(spec=["request"]) - location = (u"http://test.invalid?upload_id=AACODBBBxuw9u3AA",) - response_headers = {u"location": location} + location = ("http://test.invalid?upload_id=AACODBBBxuw9u3AA",) + response_headers = {"location": location} transport.request = mock.AsyncMock( spec=["__call__"], return_value=_make_response(headers=response_headers) ) @@ -262,12 +262,12 @@ async def test_initiate_w_custom_timeout(self): # Make sure the mock was called as expected. json_bytes = b'{"name": "got-jokes.txt"}' expected_headers = { - u"content-type": JSON_TYPE, - u"x-upload-content-type": BASIC_CONTENT, - u"x-upload-content-length": u"{:d}".format(total_bytes), + "content-type": JSON_TYPE, + "x-upload-content-type": BASIC_CONTENT, + "x-upload-content-length": "{:d}".format(total_bytes), } transport.request.assert_called_once_with( - u"POST", + "POST", RESUMABLE_URL, data=json_bytes, headers=expected_headers, @@ -280,7 +280,7 @@ def _upload_in_flight(data, headers=None): upload._stream = io.BytesIO(data) upload._content_type = BASIC_CONTENT upload._total_bytes = len(data) - upload._resumable_url = u"http://test.invalid?upload_id=not-none" + upload._resumable_url = "http://test.invalid?upload_id=not-none" return upload @staticmethod @@ -300,7 +300,7 @@ async def test_transmit_next_chunk(self): assert chunk_size < len(data) upload._chunk_size = chunk_size # Make a fake 308 response. - response_headers = {u"range": u"bytes=0-{:d}".format(chunk_size - 1)} + response_headers = {"range": "bytes=0-{:d}".format(chunk_size - 1)} transport = self._chunk_mock( _async_resumable_media.PERMANENT_REDIRECT, response_headers ) @@ -314,13 +314,13 @@ async def test_transmit_next_chunk(self): assert upload._bytes_uploaded == chunk_size # Make sure the mock was called as expected. payload = data[:chunk_size] - content_range = u"bytes 0-{:d}/{:d}".format(chunk_size - 1, len(data)) + content_range = "bytes 0-{:d}/{:d}".format(chunk_size - 1, len(data)) expected_headers = { - u"content-range": content_range, - u"content-type": BASIC_CONTENT, + "content-range": content_range, + "content-type": BASIC_CONTENT, } transport.request.assert_called_once_with( - u"PUT", + "PUT", upload.resumable_url, data=payload, headers=expected_headers, @@ -336,7 +336,7 @@ async def test_transmit_next_chunk_w_custom_timeout(self): assert chunk_size < len(data) upload._chunk_size = chunk_size # Make a fake 308 response. - response_headers = {u"range": u"bytes=0-{:d}".format(chunk_size - 1)} + response_headers = {"range": "bytes=0-{:d}".format(chunk_size - 1)} transport = self._chunk_mock( _async_resumable_media.PERMANENT_REDIRECT, response_headers ) @@ -350,13 +350,13 @@ async def test_transmit_next_chunk_w_custom_timeout(self): assert upload._bytes_uploaded == chunk_size # Make sure the mock was called as expected. payload = data[:chunk_size] - content_range = u"bytes 0-{:d}/{:d}".format(chunk_size - 1, len(data)) + content_range = "bytes 0-{:d}/{:d}".format(chunk_size - 1, len(data)) expected_headers = { - u"content-range": content_range, - u"content-type": BASIC_CONTENT, + "content-range": content_range, + "content-type": BASIC_CONTENT, } transport.request.assert_called_once_with( - u"PUT", + "PUT", upload.resumable_url, data=payload, headers=expected_headers, @@ -368,10 +368,10 @@ async def test_recover(self): upload = upload_mod.ResumableUpload(RESUMABLE_URL, ONE_MB) upload._invalid = True # Make sure invalid. upload._stream = mock.Mock(spec=["seek"]) - upload._resumable_url = u"http://test.invalid?upload_id=big-deal" + upload._resumable_url = "http://test.invalid?upload_id=big-deal" end = 55555 - headers = {u"range": u"bytes=0-{:d}".format(end)} + headers = {"range": "bytes=0-{:d}".format(end)} transport = self._chunk_mock(_async_resumable_media.PERMANENT_REDIRECT, headers) ret_val = await upload.recover(transport) @@ -380,9 +380,9 @@ async def test_recover(self): assert upload.bytes_uploaded == end + 1 assert not upload.invalid upload._stream.seek.assert_called_once_with(end + 1) - expected_headers = {u"content-range": u"bytes */*"} + expected_headers = {"content-range": "bytes */*"} transport.request.assert_called_once_with( - u"PUT", + "PUT", upload.resumable_url, data=None, headers=expected_headers, diff --git a/tests_async/unit/test__download.py b/tests_async/unit/test__download.py index 89d2b487..87e30ce8 100644 --- a/tests_async/unit/test__download.py +++ b/tests_async/unit/test__download.py @@ -12,17 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import http.client import io +import google.auth.transport._aiohttp_requests as aiohttp_requests import mock import pytest -from six.moves import http_client from google._async_resumable_media import _download from google.resumable_media import common from tests.unit import test__download as sync_test -import google.auth.transport._aiohttp_requests as aiohttp_requests EXAMPLE_URL = sync_test.EXAMPLE_URL @@ -41,7 +41,7 @@ def test_constructor_defaults(self): def test_constructor_explicit(self): start = 11 end = 10001 - headers = {u"foof": u"barf"} + headers = {"foof": "barf"} download = _download.DownloadBase( EXAMPLE_URL, stream=mock.sentinel.stream, @@ -74,19 +74,19 @@ def test__get_status_code(self): with pytest.raises(NotImplementedError) as exc_info: _download.DownloadBase._get_status_code(None) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__get_headers(self): with pytest.raises(NotImplementedError) as exc_info: _download.DownloadBase._get_headers(None) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__get_body(self): with pytest.raises(NotImplementedError) as exc_info: _download.DownloadBase._get_body(None) - exc_info.match(u"virtual") + exc_info.match("virtual") class TestDownload(object): @@ -99,27 +99,27 @@ def test__prepare_request_already_finished(self): def test__prepare_request(self): download1 = _download.Download(EXAMPLE_URL) method1, url1, payload1, headers1 = download1._prepare_request() - assert method1 == u"GET" + assert method1 == "GET" assert url1 == EXAMPLE_URL assert payload1 is None assert headers1 == {} download2 = _download.Download(EXAMPLE_URL, start=53) method2, url2, payload2, headers2 = download2._prepare_request() - assert method2 == u"GET" + assert method2 == "GET" assert url2 == EXAMPLE_URL assert payload2 is None - assert headers2 == {u"range": u"bytes=53-"} + assert headers2 == {"range": "bytes=53-"} def test__prepare_request_with_headers(self): - headers = {u"spoonge": u"borb"} + headers = {"spoonge": "borb"} download = _download.Download(EXAMPLE_URL, start=11, end=111, headers=headers) method, url, payload, new_headers = download._prepare_request() - assert method == u"GET" + assert method == "GET" assert url == EXAMPLE_URL assert payload is None assert new_headers is headers - assert headers == {u"range": u"bytes=11-111", u"spoonge": u"borb"} + assert headers == {"range": "bytes=11-111", "spoonge": "borb"} @pytest.mark.asyncio async def test__process_response(self): @@ -128,7 +128,7 @@ async def test__process_response(self): # Make sure **not finished** before. assert not download.finished - response = mock.AsyncMock(status=int(http_client.OK), spec=["status"]) + response = mock.AsyncMock(status=int(http.client.OK), spec=["status"]) ret_val = download._process_response(response) assert ret_val is None # Make sure **finished** after. @@ -141,7 +141,7 @@ async def test__process_response_bad_status(self): # Make sure **not finished** before. assert not download.finished - response = mock.AsyncMock(status=int(http_client.NOT_FOUND), spec=["status"]) + response = mock.AsyncMock(status=int(http.client.NOT_FOUND), spec=["status"]) with pytest.raises(common.InvalidResponse) as exc_info: download._process_response(response) @@ -149,8 +149,8 @@ async def test__process_response_bad_status(self): assert error.response is response assert len(error.args) == 5 assert error.args[1] == response.status - assert error.args[3] == http_client.OK - assert error.args[4] == http_client.PARTIAL_CONTENT + assert error.args[3] == http.client.OK + assert error.args[4] == http.client.PARTIAL_CONTENT # Make sure **finished** even after a failure. assert download.finished @@ -159,7 +159,7 @@ def test_consume(self): with pytest.raises(NotImplementedError) as exc_info: download.consume(None) - exc_info.match(u"virtual") + exc_info.match("virtual") class TestChunkedDownload(object): @@ -244,14 +244,14 @@ def test__get_byte_range_with_total_bytes(self): @staticmethod def _response_content_range(start_byte, end_byte, total_bytes): - return u"bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) + return "bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes) def _response_headers(self, start_byte, end_byte, total_bytes): content_length = end_byte - start_byte + 1 resp_range = self._response_content_range(start_byte, end_byte, total_bytes) return { - u"content-length": u"{:d}".format(content_length), - u"content-range": resp_range, + "content-length": "{:d}".format(content_length), + "content-range": resp_range, } def _mock_response( @@ -273,7 +273,7 @@ def test__prepare_request_already_finished(self): with pytest.raises(ValueError) as exc_info: download._prepare_request() - assert exc_info.match(u"Download has finished.") + assert exc_info.match("Download has finished.") def test__prepare_request_invalid(self): download = _download.ChunkedDownload(EXAMPLE_URL, 64, None) @@ -281,39 +281,39 @@ def test__prepare_request_invalid(self): with pytest.raises(ValueError) as exc_info: download._prepare_request() - assert exc_info.match(u"Download is invalid and cannot be re-used.") + assert exc_info.match("Download is invalid and cannot be re-used.") def test__prepare_request(self): chunk_size = 2048 download1 = _download.ChunkedDownload(EXAMPLE_URL, chunk_size, None) method1, url1, payload1, headers1 = download1._prepare_request() - assert method1 == u"GET" + assert method1 == "GET" assert url1 == EXAMPLE_URL assert payload1 is None - assert headers1 == {u"range": u"bytes=0-2047"} + assert headers1 == {"range": "bytes=0-2047"} download2 = _download.ChunkedDownload( EXAMPLE_URL, chunk_size, None, start=19991 ) download2._total_bytes = 20101 method2, url2, payload2, headers2 = download2._prepare_request() - assert method2 == u"GET" + assert method2 == "GET" assert url2 == EXAMPLE_URL assert payload2 is None - assert headers2 == {u"range": u"bytes=19991-20100"} + assert headers2 == {"range": "bytes=19991-20100"} def test__prepare_request_with_headers(self): chunk_size = 2048 - headers = {u"patrizio": u"Starf-ish"} + headers = {"patrizio": "Starf-ish"} download = _download.ChunkedDownload( EXAMPLE_URL, chunk_size, None, headers=headers ) method, url, payload, new_headers = download._prepare_request() - assert method == u"GET" + assert method == "GET" assert url == EXAMPLE_URL assert payload is None assert new_headers is headers - expected = {u"patrizio": u"Starf-ish", u"range": u"bytes=0-2047"} + expected = {"patrizio": "Starf-ish", "range": "bytes=0-2047"} assert headers == expected def test__make_invalid(self): @@ -344,7 +344,7 @@ async def test__process_response(self): already + chunk_size - 1, total_bytes, content=data, - status_code=int(http_client.PARTIAL_CONTENT), + status_code=int(http.client.PARTIAL_CONTENT), ) await download._process_response(response) @@ -377,10 +377,10 @@ async def test__process_response_transfer_encoding(self): already + chunk_size - 1, total_bytes, content=data, - status_code=int(http_client.PARTIAL_CONTENT), + status_code=int(http.client.PARTIAL_CONTENT), ) - response.headers[u"transfer-encoding"] = "chunked" - del response.headers[u"content-length"] + response.headers["transfer-encoding"] = "chunked" + del response.headers["content-length"] await download._process_response(response) # Check internal state after. assert not download.finished @@ -403,7 +403,7 @@ async def test__process_response_bad_status(self): assert download.total_bytes is None # Actually call the method to update. response = self._mock_response( - 0, total_bytes - 1, total_bytes, status_code=int(http_client.NOT_FOUND) + 0, total_bytes - 1, total_bytes, status_code=int(http.client.NOT_FOUND) ) with pytest.raises(common.InvalidResponse) as exc_info: await download._process_response(response) @@ -412,8 +412,8 @@ async def test__process_response_bad_status(self): assert error.response is response assert len(error.args) == 5 assert error.args[1] == response.status - assert error.args[3] == http_client.OK - assert error.args[4] == http_client.PARTIAL_CONTENT + assert error.args[3] == http.client.OK + assert error.args[4] == http.client.PARTIAL_CONTENT # Check internal state after. assert not download.finished assert download.bytes_downloaded == 0 @@ -437,8 +437,8 @@ async def test__process_response_missing_content_length(self): spec=["__call__"], return_value=b"DEADBEEF" ) response = mock.AsyncMock( - headers={u"content-range": u"bytes 0-99/99"}, - status=int(http_client.PARTIAL_CONTENT), + headers={"content-range": "bytes 0-99/99"}, + status=int(http.client.PARTIAL_CONTENT), content=content_stream, spec=["headers", "status", "content"], ) @@ -448,7 +448,7 @@ async def test__process_response_missing_content_length(self): error = exc_info.value assert error.response is response assert len(error.args) == 2 - assert error.args[1] == u"content-length" + assert error.args[1] == "content-length" # Check internal state after. assert not download.finished assert download.bytes_downloaded == 0 @@ -468,15 +468,15 @@ async def test__process_response_bad_content_range(self): # Actually call the method to update. data = b"stuff" headers = { - u"content-length": u"{:d}".format(len(data)), - u"content-range": u"kites x-y/58", + "content-length": "{:d}".format(len(data)), + "content-range": "kites x-y/58", } content_stream = mock.AsyncMock(spec=["__call", "read"]) content_stream.read = mock.AsyncMock(spec=["__call__"], return_value=data) response = mock.AsyncMock( content=content_stream, headers=headers, - status=int(http_client.PARTIAL_CONTENT), + status=int(http.client.PARTIAL_CONTENT), spec=["content", "headers", "status"], ) with pytest.raises(common.InvalidResponse) as exc_info: @@ -485,7 +485,7 @@ async def test__process_response_bad_content_range(self): error = exc_info.value assert error.response is response assert len(error.args) == 3 - assert error.args[1] == headers[u"content-range"] + assert error.args[1] == headers["content-range"] # Check internal state after. assert not download.finished assert download.bytes_downloaded == 0 @@ -512,7 +512,7 @@ async def test__process_response_body_wrong_length(self): chunk_size - 1, total_bytes, content=data, - status_code=int(http_client.PARTIAL_CONTENT), + status_code=int(http.client.PARTIAL_CONTENT), ) with pytest.raises(common.InvalidResponse) as exc_info: await download._process_response(response) @@ -549,7 +549,7 @@ async def test__process_response_when_finished(self): total_bytes - 1, total_bytes, content=data, - status_code=int(http_client.OK), + status_code=int(http.client.OK), ) await download._process_response(response) # Check internal state after. @@ -582,7 +582,7 @@ async def test__process_response_when_reaching_end(self): end, 8 * chunk_size, content=data, - status_code=int(http_client.PARTIAL_CONTENT), + status_code=int(http.client.PARTIAL_CONTENT), ) await download._process_response(response) # Check internal state after. @@ -600,8 +600,8 @@ async def test__process_response_when_content_range_is_zero(self): _fix_up_virtual(download) content_range = _download._ZERO_CONTENT_RANGE_HEADER - headers = {u"content-range": content_range} - status_code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE + headers = {"content-range": content_range} + status_code = http.client.REQUESTED_RANGE_NOT_SATISFIABLE response = mock.AsyncMock( headers=headers, status=status_code, spec=["headers", "status"] ) @@ -616,7 +616,7 @@ def test_consume_next_chunk(self): with pytest.raises(NotImplementedError) as exc_info: download.consume_next_chunk(None) - exc_info.match(u"virtual") + exc_info.match("virtual") class Test__add_bytes_range(object): @@ -630,35 +630,35 @@ def test_both_vals(self): headers = {} ret_val = _download.add_bytes_range(17, 1997, headers) assert ret_val is None - assert headers == {u"range": u"bytes=17-1997"} + assert headers == {"range": "bytes=17-1997"} def test_end_only(self): headers = {} ret_val = _download.add_bytes_range(None, 909, headers) assert ret_val is None - assert headers == {u"range": u"bytes=0-909"} + assert headers == {"range": "bytes=0-909"} def test_start_only(self): headers = {} ret_val = _download.add_bytes_range(3735928559, None, headers) assert ret_val is None - assert headers == {u"range": u"bytes=3735928559-"} + assert headers == {"range": "bytes=3735928559-"} def test_start_as_offset(self): headers = {} ret_val = _download.add_bytes_range(-123454321, None, headers) assert ret_val is None - assert headers == {u"range": u"bytes=-123454321"} + assert headers == {"range": "bytes=-123454321"} class Test_get_range_info(object): @staticmethod def _make_response(content_range): - headers = {u"content-range": content_range} + headers = {"content-range": content_range} return mock.Mock(headers=headers, spec=["headers"]) def _success_helper(self, **kwargs): - content_range = u"Bytes 7-11/42" + content_range = "Bytes 7-11/42" response = self._make_response(content_range) start_byte, end_byte, total_bytes = _download.get_range_info( response, _get_headers, **kwargs @@ -676,7 +676,7 @@ def test_success_with_callback(self): callback.assert_not_called() def _failure_helper(self, **kwargs): - content_range = u"nope x-6/y" + content_range = "nope x-6/y" response = self._make_response(content_range) with pytest.raises(common.InvalidResponse) as exc_info: _download.get_range_info(response, _get_headers, **kwargs) @@ -702,7 +702,7 @@ def _missing_header_helper(self, **kwargs): error = exc_info.value assert error.response is response assert len(error.args) == 2 - assert error.args[1] == u"content-range" + assert error.args[1] == "content-range" def test_missing_header(self): self._missing_header_helper() @@ -716,22 +716,22 @@ def test_missing_header_with_callback(self): class Test__check_for_zero_content_range(object): @staticmethod def _make_response(content_range, status_code): - headers = {u"content-range": content_range} + headers = {"content-range": content_range} return mock.AsyncMock( headers=headers, status=status_code, spec=["headers", "status_code"] ) def test_status_code_416_and_test_content_range_zero_both(self): content_range = _download._ZERO_CONTENT_RANGE_HEADER - status_code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE + status_code = http.client.REQUESTED_RANGE_NOT_SATISFIABLE response = self._make_response(content_range, status_code) assert _download._check_for_zero_content_range( response, _get_status_code, _get_headers ) def test_status_code_416_only(self): - content_range = u"bytes 2-5/3" - status_code = http_client.REQUESTED_RANGE_NOT_SATISFIABLE + content_range = "bytes 2-5/3" + status_code = http.client.REQUESTED_RANGE_NOT_SATISFIABLE response = self._make_response(content_range, status_code) assert not _download._check_for_zero_content_range( response, _get_status_code, _get_headers @@ -739,7 +739,7 @@ def test_status_code_416_only(self): def test_content_range_zero_only(self): content_range = _download._ZERO_CONTENT_RANGE_HEADER - status_code = http_client.OK + status_code = http.client.OK response = self._make_response(content_range, status_code) assert not _download._check_for_zero_content_range( response, _get_status_code, _get_headers diff --git a/tests_async/unit/test__helpers.py b/tests_async/unit/test__helpers.py index 06f2bdf8..f14c0c2b 100644 --- a/tests_async/unit/test__helpers.py +++ b/tests_async/unit/test__helpers.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import http.client + import mock import pytest -from six.moves import http_client from google._async_resumable_media import _helpers from google.resumable_media import common @@ -27,9 +28,9 @@ def test_do_nothing(): class Test_header_required(object): def _success_helper(self, **kwargs): - name = u"some-header" - value = u"The Right Hand Side" - headers = {name: value, u"other-name": u"other-value"} + name = "some-header" + value = "The Right Hand Side" + headers = {name: value, "other-name": "other-value"} response = mock.Mock( _headers=headers, headers=headers, spec=["_headers", "headers"] ) @@ -46,7 +47,7 @@ def test_success_with_callback(self): def _failure_helper(self, **kwargs): response = mock.Mock(_headers={}, headers={}, spec=["_headers", "headers"]) - name = u"any-name" + name = "any-name" with pytest.raises(common.InvalidResponse) as exc_info: _helpers.header_required(response, name, _get_headers, **kwargs) @@ -70,12 +71,12 @@ def _get_status_code(response): return response.status_code def test_success(self): - status_codes = (http_client.OK, http_client.CREATED) + status_codes = (http.client.OK, http.client.CREATED) acceptable = ( - http_client.OK, - int(http_client.OK), - http_client.CREATED, - int(http_client.CREATED), + http.client.OK, + int(http.client.OK), + http.client.CREATED, + int(http.client.CREATED), ) for value in acceptable: response = _make_response(value) @@ -85,18 +86,18 @@ def test_success(self): assert value == status_code def test_success_with_callback(self): - status_codes = (http_client.OK,) - response = _make_response(http_client.OK) + status_codes = (http.client.OK,) + response = _make_response(http.client.OK) callback = mock.Mock(spec=[]) status_code = _helpers.require_status_code( response, status_codes, self._get_status_code, callback=callback ) - assert status_code == http_client.OK + assert status_code == http.client.OK callback.assert_not_called() def test_failure(self): - status_codes = (http_client.CREATED, http_client.NO_CONTENT) - response = _make_response(http_client.OK) + status_codes = (http.client.CREATED, http.client.NO_CONTENT) + response = _make_response(http.client.OK) with pytest.raises(common.InvalidResponse) as exc_info: _helpers.require_status_code(response, status_codes, self._get_status_code) @@ -107,8 +108,8 @@ def test_failure(self): assert error.args[3:] == status_codes def test_failure_with_callback(self): - status_codes = (http_client.OK,) - response = _make_response(http_client.NOT_FOUND) + status_codes = (http.client.OK,) + response = _make_response(http.client.NOT_FOUND) callback = mock.Mock(spec=[]) with pytest.raises(common.InvalidResponse) as exc_info: _helpers.require_status_code( @@ -124,7 +125,7 @@ def test_failure_with_callback(self): class Test_calculate_retry_wait(object): - @mock.patch(u"random.randint", return_value=125) + @mock.patch("random.randint", return_value=125) def test_past_limit(self, randint_mock): base_wait, wait_time = _helpers.calculate_retry_wait(70.0, 64.0) @@ -132,7 +133,7 @@ def test_past_limit(self, randint_mock): assert wait_time == 64.125 randint_mock.assert_called_once_with(0, 1000) - @mock.patch(u"random.randint", return_value=250) + @mock.patch("random.randint", return_value=250) def test_at_limit(self, randint_mock): base_wait, wait_time = _helpers.calculate_retry_wait(50.0, 50.0) @@ -140,7 +141,7 @@ def test_at_limit(self, randint_mock): assert wait_time == 50.25 randint_mock.assert_called_once_with(0, 1000) - @mock.patch(u"random.randint", return_value=875) + @mock.patch("random.randint", return_value=875) def test_under_limit(self, randint_mock): base_wait, wait_time = _helpers.calculate_retry_wait(16.0, 33.0) @@ -152,7 +153,7 @@ def test_under_limit(self, randint_mock): class Test_wait_and_retry(object): @pytest.mark.asyncio async def test_success_no_retry(self): - truthy = http_client.OK + truthy = http.client.OK assert truthy not in common.RETRYABLE response = _make_response(truthy) @@ -163,17 +164,17 @@ async def test_success_no_retry(self): assert ret_val is response func.assert_called_once_with() - @mock.patch(u"time.sleep") - @mock.patch(u"random.randint") + @mock.patch("time.sleep") + @mock.patch("random.randint") @pytest.mark.asyncio async def test_success_with_retry(self, randint_mock, sleep_mock): randint_mock.side_effect = [125, 625, 375] status_codes = ( - http_client.INTERNAL_SERVER_ERROR, - http_client.BAD_GATEWAY, - http_client.SERVICE_UNAVAILABLE, - http_client.NOT_FOUND, + http.client.INTERNAL_SERVER_ERROR, + http.client.BAD_GATEWAY, + http.client.SERVICE_UNAVAILABLE, + http.client.NOT_FOUND, ) responses = [_make_response(status_code) for status_code in status_codes] func = mock.AsyncMock(side_effect=responses, spec=[]) @@ -195,13 +196,13 @@ async def test_success_with_retry(self, randint_mock, sleep_mock): sleep_mock.assert_any_call(2.625) sleep_mock.assert_any_call(4.375) - @mock.patch(u"time.sleep") - @mock.patch(u"random.randint") + @mock.patch("time.sleep") + @mock.patch("random.randint") @pytest.mark.asyncio async def test_success_with_retry_connection_error(self, randint_mock, sleep_mock): randint_mock.side_effect = [125, 625, 375] - response = _make_response(http_client.NOT_FOUND) + response = _make_response(http.client.NOT_FOUND) responses = [ConnectionError, ConnectionError, ConnectionError, response] func = mock.AsyncMock(side_effect=responses, spec=[]) @@ -221,8 +222,8 @@ async def test_success_with_retry_connection_error(self, randint_mock, sleep_moc sleep_mock.assert_any_call(2.625) sleep_mock.assert_any_call(4.375) - @mock.patch(u"time.sleep") - @mock.patch(u"random.randint") + @mock.patch("time.sleep") + @mock.patch("random.randint") @pytest.mark.asyncio async def test_retry_exceeded_reraises_connection_error( self, randint_mock, sleep_mock @@ -251,20 +252,20 @@ async def test_retry_exceeded_reraises_connection_error( sleep_mock.assert_any_call(32.25) sleep_mock.assert_any_call(64.125) - @mock.patch(u"time.sleep") - @mock.patch(u"random.randint") + @mock.patch("time.sleep") + @mock.patch("random.randint") @pytest.mark.asyncio async def test_retry_exceeds_max_cumulative(self, randint_mock, sleep_mock): randint_mock.side_effect = [875, 0, 375, 500, 500, 250, 125] status_codes = ( - http_client.SERVICE_UNAVAILABLE, - http_client.GATEWAY_TIMEOUT, + http.client.SERVICE_UNAVAILABLE, + http.client.GATEWAY_TIMEOUT, common.TOO_MANY_REQUESTS, - http_client.INTERNAL_SERVER_ERROR, - http_client.SERVICE_UNAVAILABLE, - http_client.BAD_GATEWAY, - http_client.GATEWAY_TIMEOUT, + http.client.INTERNAL_SERVER_ERROR, + http.client.SERVICE_UNAVAILABLE, + http.client.BAD_GATEWAY, + http.client.GATEWAY_TIMEOUT, common.TOO_MANY_REQUESTS, ) responses = [_make_response(status_code) for status_code in status_codes] diff --git a/tests_async/unit/test__upload.py b/tests_async/unit/test__upload.py index fc777679..419eb186 100644 --- a/tests_async/unit/test__upload.py +++ b/tests_async/unit/test__upload.py @@ -12,19 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import http.client import io import sys import mock import pytest -from six.moves import http_client from google import _async_resumable_media from google._async_resumable_media import _upload - from google.resumable_media import common from google.resumable_media import _helpers as sync_helpers - from tests.unit import test__upload as sync_test @@ -37,7 +35,7 @@ def test_constructor_defaults(self): _check_retry_strategy(upload) def test_constructor_explicit(self): - headers = {u"spin": u"doctors"} + headers = {"spin": "doctors"} upload = _upload.UploadBase(sync_test.SIMPLE_URL, headers=headers) assert upload.upload_url == sync_test.SIMPLE_URL assert upload._headers is headers @@ -63,7 +61,7 @@ def test__process_response_bad_status(self): # Make sure **not finished** before. assert not upload.finished - status_code = http_client.SERVICE_UNAVAILABLE + status_code = http.client.SERVICE_UNAVAILABLE response = _make_response(status_code=status_code) with pytest.raises(common.InvalidResponse) as exc_info: upload._process_response(response) @@ -72,7 +70,7 @@ def test__process_response_bad_status(self): assert error.response is response assert len(error.args) == 4 assert error.args[1] == status_code - assert error.args[3] == http_client.OK + assert error.args[3] == http.client.OK # Make sure **finished** after (even in failure). assert upload.finished @@ -92,19 +90,19 @@ def test__get_status_code(self): with pytest.raises(NotImplementedError) as exc_info: _upload.UploadBase._get_status_code(None) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__get_headers(self): with pytest.raises(NotImplementedError) as exc_info: _upload.UploadBase._get_headers(None) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__get_body(self): with pytest.raises(NotImplementedError) as exc_info: _upload.UploadBase._get_body(None) - exc_info.match(u"virtual") + exc_info.match("virtual") class TestSimpleUpload(object): @@ -114,39 +112,39 @@ def test__prepare_request_already_finished(self): with pytest.raises(ValueError) as exc_info: upload._prepare_request(b"", None) - exc_info.match(u"An upload can only be used once.") + exc_info.match("An upload can only be used once.") def test__prepare_request_non_bytes_data(self): upload = _upload.SimpleUpload(sync_test.SIMPLE_URL) assert not upload.finished with pytest.raises(TypeError) as exc_info: - upload._prepare_request(u"", None) + upload._prepare_request("", None) - exc_info.match(u"must be bytes") + exc_info.match("must be bytes") def test__prepare_request(self): upload = _upload.SimpleUpload(sync_test.SIMPLE_URL) - content_type = u"image/jpeg" + content_type = "image/jpeg" data = b"cheetos and eetos" method, url, payload, headers = upload._prepare_request(data, content_type) - assert method == u"POST" + assert method == "POST" assert url == sync_test.SIMPLE_URL assert payload == data - assert headers == {u"content-type": content_type} + assert headers == {"content-type": content_type} def test__prepare_request_with_headers(self): - headers = {u"x-goog-cheetos": u"spicy"} + headers = {"x-goog-cheetos": "spicy"} upload = _upload.SimpleUpload(sync_test.SIMPLE_URL, headers=headers) - content_type = u"image/jpeg" + content_type = "image/jpeg" data = b"some stuff" method, url, payload, new_headers = upload._prepare_request(data, content_type) - assert method == u"POST" + assert method == "POST" assert url == sync_test.SIMPLE_URL assert payload == data assert new_headers is headers - expected = {u"content-type": content_type, u"x-goog-cheetos": u"spicy"} + expected = {"content-type": content_type, "x-goog-cheetos": "spicy"} assert headers == expected def test_transmit(self): @@ -154,7 +152,7 @@ def test_transmit(self): with pytest.raises(NotImplementedError) as exc_info: upload.transmit(None, None, None) - exc_info.match(u"virtual") + exc_info.match("virtual") class TestMultipartUpload(object): @@ -167,7 +165,7 @@ def test_constructor_defaults(self): _check_retry_strategy(upload) def test_constructor_explicit(self): - headers = {u"spin": u"doctors"} + headers = {"spin": "doctors"} upload = _upload.MultipartUpload( sync_test.MULTIPART_URL, headers=headers, checksum="md5" ) @@ -184,13 +182,13 @@ def test__prepare_request_already_finished(self): upload._prepare_request(b"Hi", {}, sync_test.BASIC_CONTENT) def test__prepare_request_non_bytes_data(self): - data = u"Nope not bytes." + data = "Nope not bytes." upload = _upload.MultipartUpload(sync_test.MULTIPART_URL) with pytest.raises(TypeError): upload._prepare_request(data, {}, sync_test.BASIC_CONTENT) @mock.patch( - u"google._async_resumable_media._upload.get_boundary", return_value=b"==3==" + "google._async_resumable_media._upload.get_boundary", return_value=b"==3==" ) def _prepare_request_helper( self, @@ -209,19 +207,19 @@ def _prepare_request_helper( # This should be fully overwritten by the calculated checksum, so # the output should not change even if this is set. if checksum == "md5": - metadata = {u"md5Hash": u"ZZZZZZZZZZZZZZZZZZZZZZ=="} + metadata = {"md5Hash": "ZZZZZZZZZZZZZZZZZZZZZZ=="} else: - metadata = {u"crc32c": u"ZZZZZZ=="} + metadata = {"crc32c": "ZZZZZZ=="} else: # To simplify parsing the response, omit other test metadata if a # checksum is specified. - metadata = {u"Some": u"Stuff"} if not checksum else {} + metadata = {"Some": "Stuff"} if not checksum else {} content_type = sync_test.BASIC_CONTENT method, url, payload, new_headers = upload._prepare_request( data, metadata, content_type ) - assert method == u"POST" + assert method == "POST" assert url == sync_test.MULTIPART_URL preamble = b"--==3==\r\n" + sync_test.JSON_TYPE_LINE + b"\r\n" @@ -253,7 +251,7 @@ def _prepare_request_helper( def test__prepare_request(self): headers, multipart_type = self._prepare_request_helper() - assert headers == {u"content-type": multipart_type} + assert headers == {"content-type": multipart_type} @pytest.mark.parametrize("checksum", ["md5", "crc32c"]) def test__prepare_request_with_checksum(self, checksum): @@ -265,7 +263,7 @@ def test__prepare_request_with_checksum(self, checksum): checksum=checksum, expected_checksum=checksums[checksum] ) assert headers == { - u"content-type": multipart_type, + "content-type": multipart_type, } @pytest.mark.parametrize("checksum", ["md5", "crc32c"]) @@ -280,17 +278,17 @@ def test__prepare_request_with_checksum_overwrite(self, checksum): test_overwrite=True, ) assert headers == { - u"content-type": multipart_type, + "content-type": multipart_type, } def test__prepare_request_with_headers(self): - headers = {u"best": u"shirt", u"worst": u"hat"} + headers = {"best": "shirt", "worst": "hat"} new_headers, multipart_type = self._prepare_request_helper(headers=headers) assert new_headers is headers expected_headers = { - u"best": u"shirt", - u"content-type": multipart_type, - u"worst": u"hat", + "best": "shirt", + "content-type": multipart_type, + "worst": "hat", } assert expected_headers == headers @@ -299,7 +297,7 @@ def test_transmit(self): with pytest.raises(NotImplementedError) as exc_info: upload.transmit(None, None, None, None) - exc_info.match(u"virtual") + exc_info.match("virtual") class TestResumableUpload(object): @@ -357,7 +355,7 @@ def test_resumable_url_property(self): assert upload.resumable_url is None # Make sure we cannot set it on public @property. - new_url = u"http://test.invalid?upload_id=not-none" + new_url = "http://test.invalid?upload_id=not-none" with pytest.raises(AttributeError): upload.resumable_url = new_url @@ -394,7 +392,7 @@ def test_total_bytes_property(self): def _prepare_initiate_request_helper(self, upload_headers=None, **method_kwargs): data = b"some really big big data." stream = io.BytesIO(data) - metadata = {u"name": u"big-data-file.txt"} + metadata = {"name": "big-data-file.txt"} upload = _upload.ResumableUpload( sync_test.RESUMABLE_URL, sync_test.ONE_MB, headers=upload_headers @@ -412,14 +410,14 @@ def _prepare_initiate_request_helper(self, upload_headers=None, **method_kwargs) # Make sure the ``upload``-s state was updated. assert upload._stream == stream assert upload._content_type == sync_test.BASIC_CONTENT - if method_kwargs == {u"stream_final": False}: + if method_kwargs == {"stream_final": False}: assert upload._total_bytes is None else: assert upload._total_bytes == len(data) # Make sure headers are untouched. assert headers is not upload._headers assert upload._headers == orig_headers - assert method == u"POST" + assert method == "POST" assert url == upload.upload_url # Make sure the stream is still at the beginning. assert stream.tell() == 0 @@ -429,23 +427,23 @@ def _prepare_initiate_request_helper(self, upload_headers=None, **method_kwargs) def test__prepare_initiate_request(self): data, headers = self._prepare_initiate_request_helper() expected_headers = { - u"content-type": sync_test.JSON_TYPE, - u"x-upload-content-length": u"{:d}".format(len(data)), - u"x-upload-content-type": sync_test.BASIC_CONTENT, + "content-type": sync_test.JSON_TYPE, + "x-upload-content-length": "{:d}".format(len(data)), + "x-upload-content-type": sync_test.BASIC_CONTENT, } assert headers == expected_headers def test__prepare_initiate_request_with_headers(self): - headers = {u"caviar": u"beluga", u"top": u"quark"} + headers = {"caviar": "beluga", "top": "quark"} data, new_headers = self._prepare_initiate_request_helper( upload_headers=headers ) expected_headers = { - u"caviar": u"beluga", - u"content-type": sync_test.JSON_TYPE, - u"top": u"quark", - u"x-upload-content-length": u"{:d}".format(len(data)), - u"x-upload-content-type": sync_test.BASIC_CONTENT, + "caviar": "beluga", + "content-type": sync_test.JSON_TYPE, + "top": "quark", + "x-upload-content-length": "{:d}".format(len(data)), + "x-upload-content-type": sync_test.BASIC_CONTENT, } assert new_headers == expected_headers @@ -454,24 +452,24 @@ def test__prepare_initiate_request_known_size(self): data, headers = self._prepare_initiate_request_helper(total_bytes=total_bytes) assert len(data) == total_bytes expected_headers = { - u"content-type": u"application/json; charset=UTF-8", - u"x-upload-content-length": u"{:d}".format(total_bytes), - u"x-upload-content-type": sync_test.BASIC_CONTENT, + "content-type": "application/json; charset=UTF-8", + "x-upload-content-length": "{:d}".format(total_bytes), + "x-upload-content-type": sync_test.BASIC_CONTENT, } assert headers == expected_headers def test__prepare_initiate_request_unknown_size(self): _, headers = self._prepare_initiate_request_helper(stream_final=False) expected_headers = { - u"content-type": u"application/json; charset=UTF-8", - u"x-upload-content-type": sync_test.BASIC_CONTENT, + "content-type": "application/json; charset=UTF-8", + "x-upload-content-type": sync_test.BASIC_CONTENT, } assert headers == expected_headers def test__prepare_initiate_request_already_initiated(self): upload = _upload.ResumableUpload(sync_test.RESUMABLE_URL, sync_test.ONE_MB) # Fake that the upload has been started. - upload._resumable_url = u"http://test.invalid?upload_id=definitely-started" + upload._resumable_url = "http://test.invalid?upload_id=definitely-started" with pytest.raises(ValueError): upload._prepare_initiate_request(io.BytesIO(), {}, sync_test.BASIC_CONTENT) @@ -506,7 +504,7 @@ def test__process_initiate_response(self): upload = _upload.ResumableUpload(sync_test.RESUMABLE_URL, sync_test.ONE_MB) _fix_up_virtual(upload) - headers = {u"location": u"http://test.invalid?upload_id=kmfeij3234"} + headers = {"location": "http://test.invalid?upload_id=kmfeij3234"} response = _make_response(headers=headers) # Check resumable_url before. assert upload._resumable_url is None @@ -514,14 +512,14 @@ def test__process_initiate_response(self): ret_val = upload._process_initiate_response(response) assert ret_val is None # Check resumable_url after. - assert upload._resumable_url == headers[u"location"] + assert upload._resumable_url == headers["location"] def test_initiate(self): upload = _upload.ResumableUpload(sync_test.RESUMABLE_URL, sync_test.ONE_MB) with pytest.raises(NotImplementedError) as exc_info: upload.initiate(None, None, {}, sync_test.BASIC_CONTENT) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__prepare_request_already_finished(self): upload = _upload.ResumableUpload(sync_test.RESUMABLE_URL, sync_test.ONE_MB) @@ -530,7 +528,7 @@ def test__prepare_request_already_finished(self): with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.value.args == (u"Upload has finished.",) + assert exc_info.value.args == ("Upload has finished.",) def test__prepare_request_invalid(self): upload = _upload.ResumableUpload(sync_test.RESUMABLE_URL, sync_test.ONE_MB) @@ -539,8 +537,8 @@ def test__prepare_request_invalid(self): with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.match(u"invalid state") - assert exc_info.match(u"recover()") + assert exc_info.match("invalid state") + assert exc_info.match("recover()") def test__prepare_request_not_initiated(self): upload = _upload.ResumableUpload(sync_test.RESUMABLE_URL, sync_test.ONE_MB) @@ -550,21 +548,21 @@ def test__prepare_request_not_initiated(self): with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.match(u"upload has not been initiated") - assert exc_info.match(u"initiate()") + assert exc_info.match("upload has not been initiated") + assert exc_info.match("initiate()") def test__prepare_request_invalid_stream_state(self): stream = io.BytesIO(b"some data here") upload = _upload.ResumableUpload(sync_test.RESUMABLE_URL, sync_test.ONE_MB) upload._stream = stream - upload._resumable_url = u"http://test.invalid?upload_id=not-none" + upload._resumable_url = "http://test.invalid?upload_id=not-none" # Make stream.tell() disagree with bytes_uploaded. upload._bytes_uploaded = 5 assert upload.bytes_uploaded != stream.tell() with pytest.raises(ValueError) as exc_info: upload._prepare_request() - assert exc_info.match(u"Bytes stream is in unexpected state.") + assert exc_info.match("Bytes stream is in unexpected state.") @staticmethod def _upload_in_flight(data, headers=None, checksum=None): @@ -577,7 +575,7 @@ def _upload_in_flight(data, headers=None, checksum=None): upload._stream = io.BytesIO(data) upload._content_type = sync_test.BASIC_CONTENT upload._total_bytes = len(data) - upload._resumable_url = u"http://test.invalid?upload_id=not-none" + upload._resumable_url = "http://test.invalid?upload_id=not-none" return upload def _prepare_request_helper(self, headers=None): @@ -585,7 +583,7 @@ def _prepare_request_helper(self, headers=None): upload = self._upload_in_flight(data, headers=headers) method, url, payload, new_headers = upload._prepare_request() # Check the response values. - assert method == u"PUT" + assert method == "PUT" assert url == upload.resumable_url assert payload == data # Make sure headers are **NOT** updated @@ -596,22 +594,22 @@ def _prepare_request_helper(self, headers=None): def test__prepare_request_success(self): headers = self._prepare_request_helper() expected_headers = { - u"content-range": u"bytes 0-32/33", - u"content-type": sync_test.BASIC_CONTENT, + "content-range": "bytes 0-32/33", + "content-type": sync_test.BASIC_CONTENT, } assert headers == expected_headers def test__prepare_request_success_with_headers(self): - headers = {u"cannot": u"touch this"} + headers = {"cannot": "touch this"} new_headers = self._prepare_request_helper(headers) assert new_headers is not headers expected_headers = { - u"content-range": u"bytes 0-32/33", - u"content-type": sync_test.BASIC_CONTENT, + "content-range": "bytes 0-32/33", + "content-type": sync_test.BASIC_CONTENT, } assert new_headers == expected_headers # Make sure the ``_headers`` are not incorporated. - assert u"cannot" not in new_headers + assert "cannot" not in new_headers @pytest.mark.parametrize("checksum", ["md5", "crc32c"]) def test__prepare_request_with_checksum(self, checksum): @@ -714,7 +712,7 @@ async def test__process_response_bad_status(self): # Make sure the upload is valid before the failure. assert not upload.invalid - response = _make_response(status_code=http_client.NOT_FOUND) + response = _make_response(status_code=http.client.NOT_FOUND) with pytest.raises(common.InvalidResponse) as exc_info: await upload._process_response(response, None) @@ -722,7 +720,7 @@ async def test__process_response_bad_status(self): assert error.response is response assert len(error.args) == 5 assert error.args[1] == response.status_code - assert error.args[3] == http_client.OK + assert error.args[3] == http.client.OK assert error.args[4] == _async_resumable_media.PERMANENT_REDIRECT # Make sure the upload is invalid after the failure. assert upload.invalid @@ -740,11 +738,11 @@ async def test__process_response_success(self): # Set the response body. bytes_sent = 158 total_bytes = upload._bytes_uploaded + bytes_sent - response_body = u'{{"size": "{:d}"}}'.format(total_bytes) - response_body = response_body.encode(u"utf-8") + response_body = '{{"size": "{:d}"}}'.format(total_bytes) + response_body = response_body.encode("utf-8") response = mock.Mock( content=response_body, - status_code=http_client.OK, + status_code=http.client.OK, spec=["content", "status_code"], ) ret_val = await upload._process_response(response, bytes_sent) @@ -770,7 +768,7 @@ async def test__process_response_partial_no_range(self): error = exc_info.value assert error.response is response assert len(error.args) == 2 - assert error.args[1] == u"range" + assert error.args[1] == "range" @pytest.mark.asyncio async def test__process_response_partial_bad_range(self): @@ -779,7 +777,7 @@ async def test__process_response_partial_bad_range(self): # Make sure the upload is valid before the failure. assert not upload.invalid - headers = {u"range": u"nights 1-81"} + headers = {"range": "nights 1-81"} response = _make_response( status_code=_async_resumable_media.PERMANENT_REDIRECT, headers=headers ) @@ -790,7 +788,7 @@ async def test__process_response_partial_bad_range(self): error = exc_info.value assert error.response is response assert len(error.args) == 3 - assert error.args[1] == headers[u"range"] + assert error.args[1] == headers["range"] # Make sure the upload is invalid after the failure. assert upload.invalid @@ -801,7 +799,7 @@ async def test__process_response_partial(self): # Check status before. assert upload._bytes_uploaded == 0 - headers = {u"range": u"bytes=0-171"} + headers = {"range": "bytes=0-171"} response = _make_response( status_code=_async_resumable_media.PERMANENT_REDIRECT, headers=headers ) @@ -924,7 +922,7 @@ async def test__validate_checksum_mismatch(self, checksum): error = exc_info.value assert error.response is response message = error.args[0] - correct_checksums = {"crc32c": u"Qg8thA==", "md5": u"GRvfKbqr5klAOwLkxgIf8w=="} + correct_checksums = {"crc32c": "Qg8thA==", "md5": "GRvfKbqr5klAOwLkxgIf8w=="} metadata_key = sync_helpers._get_metadata_key(checksum) assert message == _upload._UPLOAD_CHECKSUM_MISMATCH_MESSAGE.format( checksum.upper(), correct_checksums[checksum], metadata[metadata_key] @@ -935,7 +933,7 @@ def test_transmit_next_chunk(self): with pytest.raises(NotImplementedError) as exc_info: upload.transmit_next_chunk(None) - exc_info.match(u"virtual") + exc_info.match("virtual") def test__prepare_recover_request_not_invalid(self): upload = _upload.ResumableUpload(sync_test.RESUMABLE_URL, sync_test.ONE_MB) @@ -949,29 +947,29 @@ def test__prepare_recover_request(self): upload._invalid = True method, url, payload, headers = upload._prepare_recover_request() - assert method == u"PUT" + assert method == "PUT" assert url == upload.resumable_url assert payload is None - assert headers == {u"content-range": u"bytes */*"} + assert headers == {"content-range": "bytes */*"} # Make sure headers are untouched. assert upload._headers == {} def test__prepare_recover_request_with_headers(self): - headers = {u"lake": u"ocean"} + headers = {"lake": "ocean"} upload = _upload.ResumableUpload( sync_test.RESUMABLE_URL, sync_test.ONE_MB, headers=headers ) upload._invalid = True method, url, payload, new_headers = upload._prepare_recover_request() - assert method == u"PUT" + assert method == "PUT" assert url == upload.resumable_url assert payload is None - assert new_headers == {u"content-range": u"bytes */*"} + assert new_headers == {"content-range": "bytes */*"} # Make sure the ``_headers`` are not incorporated. - assert u"lake" not in new_headers + assert "lake" not in new_headers # Make sure headers are untouched. - assert upload._headers == {u"lake": u"ocean"} + assert upload._headers == {"lake": "ocean"} def test__process_recover_response_bad_status(self): upload = _upload.ResumableUpload(sync_test.RESUMABLE_URL, sync_test.ONE_MB) @@ -979,7 +977,7 @@ def test__process_recover_response_bad_status(self): upload._invalid = True - response = _make_response(status_code=http_client.BAD_REQUEST) + response = _make_response(status_code=http.client.BAD_REQUEST) with pytest.raises(common.InvalidResponse) as exc_info: upload._process_recover_response(response) @@ -1016,7 +1014,7 @@ def test__process_recover_response_bad_range(self): upload._stream = mock.Mock(spec=["seek"]) upload._bytes_uploaded = mock.sentinel.not_zero - headers = {u"range": u"bites=9-11"} + headers = {"range": "bites=9-11"} response = _make_response( status_code=_async_resumable_media.PERMANENT_REDIRECT, headers=headers ) @@ -1026,7 +1024,7 @@ def test__process_recover_response_bad_range(self): error = exc_info.value assert error.response is response assert len(error.args) == 3 - assert error.args[1] == headers[u"range"] + assert error.args[1] == headers["range"] # Check the state of ``upload`` after (untouched). assert upload.bytes_uploaded is mock.sentinel.not_zero assert upload.invalid @@ -1042,7 +1040,7 @@ def test__process_recover_response_with_range(self): assert upload.bytes_uploaded != 0 end = 11 - headers = {u"range": u"bytes=0-{:d}".format(end)} + headers = {"range": "bytes=0-{:d}".format(end)} response = _make_response( status_code=_async_resumable_media.PERMANENT_REDIRECT, headers=headers ) @@ -1058,10 +1056,10 @@ def test_recover(self): with pytest.raises(NotImplementedError) as exc_info: upload.recover(None) - exc_info.match(u"virtual") + exc_info.match("virtual") -@mock.patch(u"random.randrange", return_value=1234567890123456789) +@mock.patch("random.randrange", return_value=1234567890123456789) def test_get_boundary(mock_rand): result = _upload.get_boundary() assert result == b"===============1234567890123456789==" @@ -1070,12 +1068,12 @@ def test_get_boundary(mock_rand): class Test_construct_multipart_request(object): @mock.patch( - u"google._async_resumable_media._upload.get_boundary", return_value=b"==1==" + "google._async_resumable_media._upload.get_boundary", return_value=b"==1==" ) def test_binary(self, mock_get_boundary): data = b"By nary day tuh" - metadata = {u"name": u"hi-file.bin"} - content_type = u"application/octet-stream" + metadata = {"name": "hi-file.bin"} + content_type = "application/octet-stream" payload, multipart_boundary = _upload.construct_multipart_request( data, metadata, content_type ) @@ -1094,13 +1092,13 @@ def test_binary(self, mock_get_boundary): mock_get_boundary.assert_called_once_with() @mock.patch( - u"google._async_resumable_media._upload.get_boundary", return_value=b"==2==" + "google._async_resumable_media._upload.get_boundary", return_value=b"==2==" ) def test_unicode(self, mock_get_boundary): - data_unicode = u"\N{snowman}" + data_unicode = "\N{snowman}" # construct_multipart_request( ASSUMES callers pass bytes. - data = data_unicode.encode(u"utf-8") - metadata = {u"name": u"snowman.txt"} + data = data_unicode.encode("utf-8") + metadata = {"name": "snowman.txt"} content_type = sync_test.BASIC_CONTENT payload, multipart_boundary = _upload.construct_multipart_request( data, metadata, content_type @@ -1145,7 +1143,7 @@ def test_exhausted_known_size(self): with pytest.raises(ValueError) as exc_info: _upload.get_next_chunk(stream, 1, len(data)) - exc_info.match(u"Stream is already exhausted. There is no content remaining.") + exc_info.match("Stream is already exhausted. There is no content remaining.") def test_exhausted_known_size_zero(self): stream = io.BytesIO(b"") @@ -1156,7 +1154,7 @@ def test_exhausted_known_size_zero_nonempty(self): stream = io.BytesIO(b"not empty WAT!") with pytest.raises(ValueError) as exc_info: _upload.get_next_chunk(stream, 1, 0) - exc_info.match(u"Stream specified as empty, but produced non-empty content.") + exc_info.match("Stream specified as empty, but produced non-empty content.") def test_success_known_size_lt_stream_size(self): data = b"0123456789" @@ -1169,9 +1167,9 @@ def test_success_known_size_lt_stream_size(self): result1 = _upload.get_next_chunk(stream, chunk_size, total_bytes) result2 = _upload.get_next_chunk(stream, chunk_size, total_bytes) - assert result0 == (0, b"012", u"bytes 0-2/8") - assert result1 == (3, b"345", u"bytes 3-5/8") - assert result2 == (6, b"67", u"bytes 6-7/8") + assert result0 == (0, b"012", "bytes 0-2/8") + assert result1 == (3, b"345", "bytes 3-5/8") + assert result2 == (6, b"67", "bytes 6-7/8") def test_success_known_size(self): data = b"0123456789" @@ -1183,10 +1181,10 @@ def test_success_known_size(self): result1 = _upload.get_next_chunk(stream, chunk_size, total_bytes) result2 = _upload.get_next_chunk(stream, chunk_size, total_bytes) result3 = _upload.get_next_chunk(stream, chunk_size, total_bytes) - assert result0 == (0, b"012", u"bytes 0-2/10") - assert result1 == (3, b"345", u"bytes 3-5/10") - assert result2 == (6, b"678", u"bytes 6-8/10") - assert result3 == (9, b"9", u"bytes 9-9/10") + assert result0 == (0, b"012", "bytes 0-2/10") + assert result1 == (3, b"345", "bytes 3-5/10") + assert result2 == (6, b"678", "bytes 6-8/10") + assert result3 == (9, b"9", "bytes 9-9/10") assert stream.tell() == total_bytes def test_success_unknown_size(self): @@ -1196,8 +1194,8 @@ def test_success_unknown_size(self): # Splits into 4 chunks: abcdef, ghij result0 = _upload.get_next_chunk(stream, chunk_size, None) result1 = _upload.get_next_chunk(stream, chunk_size, None) - assert result0 == (0, b"abcdef", u"bytes 0-5/*") - assert result1 == (chunk_size, b"ghij", u"bytes 6-9/10") + assert result0 == (0, b"abcdef", "bytes 0-5/*") + assert result1 == (chunk_size, b"ghij", "bytes 6-9/10") assert stream.tell() == len(data) # Do the same when the chunk size evenly divides len(data) @@ -1206,22 +1204,22 @@ def test_success_unknown_size(self): # Splits into 2 chunks: `data` and empty string result0 = _upload.get_next_chunk(stream, chunk_size, None) result1 = _upload.get_next_chunk(stream, chunk_size, None) - assert result0 == (0, data, u"bytes 0-9/*") - assert result1 == (len(data), b"", u"bytes */10") + assert result0 == (0, data, "bytes 0-9/*") + assert result1 == (len(data), b"", "bytes */10") assert stream.tell() == len(data) class Test_get_content_range(object): def test_known_size(self): result = _upload.get_content_range(5, 10, 40) - assert result == u"bytes 5-10/40" + assert result == "bytes 5-10/40" def test_unknown_size(self): result = _upload.get_content_range(1000, 10000, None) - assert result == u"bytes 1000-10000/*" + assert result == "bytes 1000-10000/*" -def _make_response(status_code=http_client.OK, headers=None): +def _make_response(status_code=http.client.OK, headers=None): headers = headers or {} response = mock.AsyncMock(