Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: UploadSession.commit returns None when retry limit was reached #696

Merged
merged 10 commits into from Feb 11, 2022
10 changes: 6 additions & 4 deletions boxsdk/object/upload_session.py
Expand Up @@ -99,7 +99,7 @@ def commit(
parts: Iterable[Optional[dict]] = None,
file_attributes: dict = None,
etag: Optional[str] = None
) -> 'File':
) -> Optional['File']:
"""
Commit a multiput upload.

Expand All @@ -112,7 +112,7 @@ def commit(
:param etag:
If specified, instruct the Box API to delete the folder only if the current version's etag matches.
:returns:
The newly-uploaded file object.
The newly-uploaded file object or None if commit was not processed
"""
body = {}
if file_attributes is not None:
Expand All @@ -131,8 +131,10 @@ def commit(
self.get_url('commit'),
headers=headers,
data=json.dumps(body),
).json()
entry = response['entries'][0]
)
if response.status_code == 202:
return None
entry = response.json()['entries'][0]
return self.translator.translate(
session=self._session,
response_object=entry,
Expand Down
15 changes: 9 additions & 6 deletions boxsdk/util/chunked_uploader.py
@@ -1,5 +1,5 @@
import hashlib
from typing import TYPE_CHECKING, IO
from typing import TYPE_CHECKING, IO, Optional
from boxsdk.exception import BoxException

if TYPE_CHECKING:
Expand Down Expand Up @@ -31,25 +31,28 @@ def __init__(self, upload_session: 'UploadSession', content_stream: IO, file_siz
self._inflight_part = None
self._is_aborted = False

def start(self) -> 'File':
def start(self) -> Optional['File']:
"""
Starts the process of chunk uploading a file.
Starts the process of chunk uploading a file. Should return file. If commit was not processed will return None.
You can call ChunkedUploader.resume to retry committing upload.

:returns:
An uploaded :class:`File`
An uploaded :class:`File` or None if session was not processed
"""
if self._is_aborted:
raise BoxException('The upload has been previously aborted. Please retry upload with a new upload session.')
self._upload()
content_sha1 = self._sha1.digest()
return self._upload_session.commit(content_sha1=content_sha1, parts=self._part_array)

def resume(self) -> 'File':
def resume(self) -> Optional['File']:
"""
Resumes the process of chunk uploading a file from where upload failed.
Should return file. If commit was not processed will return None.
You can call ChunkedUploader.resume to retry committing upload.

:returns:
An uploaded :class:`File`
An uploaded :class:`File` or None if session was not processed
"""
if self._is_aborted:
raise BoxException('The upload has been previously aborted. Please retry upload with a new upload session.')
Expand Down
60 changes: 60 additions & 0 deletions test/unit/object/test_chunked_upload.py
Expand Up @@ -288,3 +288,63 @@ def test_abort_with_resume(mock_upload_session):
pass
mock_upload_session.abort.assert_called_once_with()
assert is_aborted is True


def test_resume_after_commit_failed(test_file, mock_upload_session):
# given
file_size = 7
part_bytes = b'abcdefg'
stream = io.BytesIO(part_bytes)
first_part = {
'part_id': 'CFEB4BA9',
'offset': 0,
'size': 2,
'sha1': '2iNhTgJGmg18e9G9q1ycR0sZBNw=',
}
second_part = {
'part_id': '4DBB872D',
'offset': 2,
'size': 2,
'sha1': 'A0d4GYoEXB7YC+JxzdApt2h09vw=',
}
third_part = {
'part_id': '6F2D3486',
'offset': 4,
'size': 2,
'sha1': '+CIFFHGVe3u+u4qwiP6b1tFPQmE=',
}
fourth_part = {
'part_id': '4DBC872D',
'offset': 6,
'size': 1,
'sha1': 'VP0XESCfscB4EJI3QTLGbnniJBs=',
}
parts = [first_part, second_part, third_part, fourth_part]
mock_iterator = MagicMock(LimitOffsetBasedDictCollection)
mock_iterator.__iter__.return_value = [first_part, second_part, third_part, fourth_part]
mock_upload_session.get_parts.return_value = mock_iterator
mock_upload_session.commit.side_effect = [None, test_file]
chunked_uploader = ChunkedUploader(mock_upload_session, stream, file_size)
mock_upload_session.upload_part_bytes.side_effect = [
first_part,
second_part,
third_part,
fourth_part
]

# when
uploaded_file = chunked_uploader.start()

# then
assert uploaded_file is None

# when
uploaded_file = chunked_uploader.resume()

# then
expected_call = call(
content_sha1=b'/\xb5\xe14\x19\xfc\x89$he\xe7\xa3$\xf4v\xecbN\x87@',
parts=parts
)
mock_upload_session.commit.assert_has_calls([expected_call, expected_call])
assert uploaded_file is test_file
38 changes: 38 additions & 0 deletions test/unit/object/test_upload_session.py
Expand Up @@ -179,6 +179,44 @@ def test_commit_with_missing_params(test_upload_session, mock_box_session):
assert created_file.type == file_type


def test_commit_returns_none_when_202_is_returned(test_upload_session, mock_box_session):
expected_url = f'{API.UPLOAD_URL}/files/upload_sessions/{test_upload_session.object_id}/commit'
sha1 = hashlib.sha1()
sha1.update(b'fake_file_data')
file_etag = '7'
file_attributes = {'description': 'This is a test description.'}
parts = [
{
'part_id': 'ABCDEF123',
'offset': 0,
'size': 8,
'sha1': 'fake_sha1',
},
{
'part_id': 'ABCDEF456',
'offset': 8,
'size': 8,
'sha1': 'fake_sha1',
},
]
expected_data = {
'attributes': file_attributes,
'parts': parts,
}
expected_headers = {
'Content-Type': 'application/json',
'Digest': f'SHA={base64.b64encode(sha1.digest()).decode("utf-8")}',
'If-Match': '7',
}
mock_box_session.post.return_value.status_code = 202
mock_box_session.post.return_value.json.return_value = b''

created_file = test_upload_session.commit(content_sha1=sha1.digest(), parts=parts, file_attributes=file_attributes, etag=file_etag)

mock_box_session.post.assert_called_once_with(expected_url, data=json.dumps(expected_data), headers=expected_headers)
assert created_file is None


def test_get_chunked_uploader_for_stream(test_upload_session):
file_size = 197520
part_bytes = b'abcdefgh'
Expand Down