Skip to content

Commit

Permalink
Merge pull request #1516 from noirbizarre/uploads-tuning
Browse files Browse the repository at this point in the history
Uploads tuning
  • Loading branch information
noirbizarre committed Mar 20, 2018
2 parents 21c9d45 + 3b79bea commit a6c5279
Show file tree
Hide file tree
Showing 4 changed files with 48 additions and 12 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

## Current (in progress)

- Nothing yet
- Fixes on upload: prevent double upload and bad chunks upload [#1516](https://github.com/opendatateam/udata/pull/1516)

## 1.3.2 (2018-03-20)

Expand Down
19 changes: 9 additions & 10 deletions js/mixins/uploader.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,15 @@ export default {
};
},
ready() {
this.$dnd = new qq.DragAndDrop({
dropZoneElements: [this.$el],
classes: {
dropActive: this.$options.dropActive || 'drop-active'
},
callbacks: {
processingDroppedFilesComplete: this.on_dropped_files_complete
}
});
this._build_uploader();
},

Expand Down Expand Up @@ -123,16 +132,6 @@ export default {
messages: messages,
validation: {allowedExtensions: allowedExtensions.items}
});

this.$dnd = new qq.DragAndDrop({
dropZoneElements: [this.$el],
classes: {
dropActive: this.$options.dropActive || 'drop-active'
},
callbacks: {
processingDroppedFilesComplete: this.on_dropped_files_complete
}
});
},

/**
Expand Down
12 changes: 11 additions & 1 deletion udata/core/storages/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,17 @@ def chunk_filename(uuid, part):
return os.path.join(str(uuid), str(part))


def get_file_size(file):
file.seek(0, os.SEEK_END)
size = file.tell()
file.seek(0)
return size


def save_chunk(file, args):
# Check file size
if get_file_size(file) != args['chunksize']:
raise UploadProgress(ok=False, error='Chunk size mismatch')
filename = chunk_filename(args['uuid'], args['partindex'])
chunks.save(file, filename=filename)
meta_filename = chunk_filename(args['uuid'], META)
Expand All @@ -93,6 +103,7 @@ def save_chunk(file, args):
'totalparts': args['totalparts'],
'lastchunk': datetime.now(),
}), overwrite=True)
raise UploadProgress()


def combine_chunks(storage, args, prefix=None):
Expand Down Expand Up @@ -123,7 +134,6 @@ def handle_upload(storage, prefix=None):
if is_chunk:
if uploaded_file:
save_chunk(uploaded_file, args)
raise UploadProgress()
else:
filename = combine_chunks(storage, args, prefix=prefix)
elif not uploaded_file:
Expand Down
27 changes: 27 additions & 0 deletions udata/tests/test_storages.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,33 @@ def test_chunked_upload(self, client):
assert storages.tmp.read(response.json['filename']) == 'aaaa'
assert list(storages.chunks.list_files()) == []

def test_chunked_upload_bad_chunk(self, client):
client.login()
url = url_for('storage.upload', name='tmp')
uuid = str(uuid4())
parts = 4

response = client.post(url, {
'file': (StringIO(b'a'), 'blob'),
'uuid': uuid,
'filename': 'test.txt',
'partindex': 0,
'partbyteoffset': 0,
'totalfilesize': parts,
'totalparts': parts,
'chunksize': 10, # Does not match
})

assert400(response)
assert not response.json['success']
assert 'filename' not in response.json
assert 'url' not in response.json
assert 'size' not in response.json
assert 'sha1' not in response.json
assert 'url' not in response.json

assert list(storages.chunks.list_files()) == []

def test_upload_resource_bad_request(self, client):
client.login()
response = client.post(
Expand Down

0 comments on commit a6c5279

Please sign in to comment.