Skip to content

Commit

Permalink
Merge 837dbc2 into a1d7f62
Browse files Browse the repository at this point in the history
  • Loading branch information
carycheng committed Oct 18, 2018
2 parents a1d7f62 + 837dbc2 commit d28b99e
Show file tree
Hide file tree
Showing 13 changed files with 596 additions and 3 deletions.
15 changes: 15 additions & 0 deletions boxsdk/client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,21 @@ def file(self, file_id):
"""
return self.translator.translate('file')(session=self._session, object_id=file_id)

def upload_session(self, session_id):
"""
Initialize a :class:`UploadSession` object, whose box id is session_id.
:param session_id:
The box id of the :class:`UploadSession` object.
:type session_id:
`unicode`
:return:
A :class:`UploadSession` object with the given session id.
:rtype:
:class`UploadSession`
"""
return self.translator.get('upload_session')(session=self._session, object_id=session_id)

def comment(self, comment_id):
"""
Initialize a :class:`Comment` object, whose Box ID is comment_id.
Expand Down
1 change: 1 addition & 0 deletions boxsdk/object/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
'task',
'task_assignment',
'user',
'upload_session',
'webhook',
'watermark',
'web_link',
Expand Down
34 changes: 33 additions & 1 deletion boxsdk/object/file.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
# coding: utf-8

from __future__ import unicode_literals
from __future__ import unicode_literals, absolute_import

import json

from boxsdk.config import API
from .item import Item
from ..util.api_call_decorator import api_call
from ..pagination.marker_based_object_collection import MarkerBasedObjectCollection
Expand Down Expand Up @@ -36,6 +37,37 @@ def preflight_check(self, size, name=None):
file_id=self._object_id,
)

def create_upload_session(self, file_size, file_name=None):
"""
Create a new chunked upload session for uploading a new version of the file.
:param file_size:
The size of the file in bytes that will be uploaded.
:type file_size:
`int`
:param file_name:
The new name of the file version that will be uploaded.
:type file_name:
`unicode` or None
:returns:
A :class:`ChunkedUploadSession` object.
:rtype:
:class:`ChunkedUploadSession`
"""
body_params = {
'file_id': self.object_id,
'file_size': file_size,
}
if file_name is not None:
body_params['file_name'] = file_name
url = self.get_url('{0}'.format('upload_sessions')).replace(API.BASE_API_URL, API.UPLOAD_URL)
response = self._session.post(url, data=json.dumps(body_params)).json()
return self.translator.translate(response['type'])(
session=self.session,
object_id=response['id'],
response_object=response,
)

def _get_accelerator_upload_url_for_update(self):
"""
Get Accelerator upload url for updating the file.
Expand Down
31 changes: 31 additions & 0 deletions boxsdk/object/folder.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import os
from six import text_type

from boxsdk.config import API
from boxsdk.object.group import Group
from boxsdk.object.item import Item
from boxsdk.object.user import User
Expand Down Expand Up @@ -112,6 +113,36 @@ def preflight_check(self, size, name):
parent_id=self._object_id,
)

def create_upload_session(self, file_size, file_name):
"""
Creates a new chunked upload session for upload a new file.
:param file_size:
The size of the file in bytes that will be uploaded.
:type file_size:
`int`
:param file_name:
The new name of the file that will be uploaded.
:type file_name:
`unicode`
:returns:
A :class:`ChunkedUploadSession` object.
:rtype:
:class:`ChunkedUploadSession`
"""
url = '{0}/files/upload_sessions'.format(API.UPLOAD_URL)
body_params = {
'folder_id': self.object_id,
'file_size': file_size,
'file_name': file_name,
}
response = self._session.post(url, data=json.dumps(body_params)).json()
return self.translator.translate(response['type'])(
session=self.session,
object_id=response['id'],
response_object=response,
)

def _get_accelerator_upload_url_fow_new_uploads(self):
"""
Get Accelerator upload url for uploading new files.
Expand Down
163 changes: 163 additions & 0 deletions boxsdk/object/upload_session.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
# coding: utf-8
from __future__ import unicode_literals, absolute_import

import base64
import hashlib
import json

from .base_object import BaseObject
from ..config import API
from ..pagination.chunked_upload_part_limit_offset_based_object_collection import ChunkedUploadPartLimitOffsetBasedObjectCollection


class UploadSession(BaseObject):
_item_type = 'upload_session'
_parent_item_type = 'file'

def get_url(self, *args):
"""
Base class override. Endpoint is a little different - it's /files/upload_sessions.
:rtype:
`unicode`
"""
return self.session.get_url(
'{0}s/{1}s'.format(self._parent_item_type, self._item_type),
self._object_id,
*args
).replace(API.BASE_API_URL, API.UPLOAD_URL)

def get_parts(self, limit=None, offset=None, fields=None):
"""
Get a list of parts uploaded so far.
:param limit:
The maximum number of items to return per page. If not specified, then will use the server-side default.
:type limit:
`int` or None
:param offset:
The index at which to start returning items.
:type offset:
`int` or None
:param fields:
Fields to include on the returned items.
:type fields:
`Iterable` of `unicode`
:returns:
Returns a `list` of parts uploaded so far.
:rtype:
`list` of `dict`
"""
return ChunkedUploadPartLimitOffsetBasedObjectCollection(
session=self.session,
url=self.get_url('parts'),
limit=limit,
fields=fields,
offset=offset,
return_full_pages=False,
)

def upload_part(self, part_bytes, offset, total_size, part_content_sha1=None):
"""
Upload a part of a file.
:param part_bytes:
Part bytes
:type part_bytes:
`bytes`
:param offset:
Offset, in number of bytes, of the part compared to the beginning of the file.
:type offset:
`int`
:param total_size:
The size of the file that this part belongs to.
:type total_size:
`int`
:param part_content_sha1:
SHA-1 hash of the part's content. If not specified, this will be calculated.
:type part_content_sha1:
`unicode`
:returns:
The uploaded part.
:rtype:
`dict`
"""

if part_content_sha1 is None:
sha1 = hashlib.sha1()
sha1.update(part_bytes)
part_content_sha1 = sha1.digest()

range_end = min(offset + self.part_size - 1, total_size - 1) # pylint:disable=no-member

return self._session.put(
self.get_url(),
headers={
'Content-Type': 'application/octet-stream',
'Digest': 'SHA={0}'.format(base64.b64encode(part_content_sha1).decode('utf-8')),
'Content-Range': 'bytes {0}-{1}/{2}'.format(offset, range_end, total_size),
},
data=part_bytes
)

def commit(self, content_sha1, parts=None, file_attributes=None, etag=None):
"""
Commit a multiput upload.
:param content_sha1:
SHA-1 has of the file contents that was uploaded.
:type content_sha1:
`unicode`
:param parts:
List of parts that were uploaded.
:type parts:
`Iterable` of `dict` or None
:param file_attributes:
An `dict` of attributes to set on file upload.
:type file_attributes:
`dict`
:returns:
A :class:`File` object.
:rtype:
:class:`File`
"""
body = {}
parts_list = []
if file_attributes is not None:
body['attributes'] = file_attributes
if parts is None:
parts = self.get_parts()
for part in parts:
parts_list.append(part)
body['parts'] = parts_list
else:
body['parts'] = parts
headers = {
'Content-Type': 'application/json',
'Digest': 'SHA={0}'.format(base64.b64encode(content_sha1).decode('utf-8')),
}
if etag is not None:
headers['If-Match'] = etag
response = self._session.post(
self.get_url('commit'),
headers=headers,
data=json.dumps(body),
).json()
entry = response['entries'][0]
return self.translator.translate(entry['type'])(
session=self.session,
object_id=entry['id'],
response_object=entry,
)

def abort(self):
"""
Abort an upload session, cancelling the upload and removing any parts that have already been uploaded.
:returns:
A boolean indication success of the upload abort.
:rtype:
`bool`
"""
response = self._session.delete(self.get_url())
return response.ok
4 changes: 3 additions & 1 deletion boxsdk/pagination/box_object_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ class BoxObjectCollection(collections.Iterator, object):
will be used to retrieve the next page of Box objects. This pointer can be used when requesting new
BoxObjectCollection instances that start off from a particular page, instead of from the very beginning.
"""
_page_constructor = Page

def __init__(
self,
session,
Expand Down Expand Up @@ -101,7 +103,7 @@ def _items_generator(self):

self._update_pointer_to_next_page(response_object)
self._has_retrieved_all_items = not self._has_more_pages(response_object)
page = Page(self._session, response_object)
page = self._page_constructor(self._session, response_object)

if self._return_full_pages:
yield page
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# coding: utf-8

from __future__ import unicode_literals, absolute_import

from .chunked_upload_part_page import ChunkedUploadPartPage
from .limit_offset_based_object_collection import LimitOffsetBasedObjectCollection


class ChunkedUploadPartLimitOffsetBasedObjectCollection(LimitOffsetBasedObjectCollection):
_page_constructor = ChunkedUploadPartPage
12 changes: 12 additions & 0 deletions boxsdk/pagination/chunked_upload_part_page.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# coding: utf-8

from __future__ import unicode_literals, absolute_import

from .page import Page


class ChunkedUploadPartPage(Page):
def __getitem__(self, key):
item_json = self._response_object[self._item_entries_key_name][key]

return item_json
Loading

0 comments on commit d28b99e

Please sign in to comment.