diff --git a/baselayer b/baselayer index dfdcda6..7ed580f 160000 --- a/baselayer +++ b/baselayer @@ -1 +1 @@ -Subproject commit dfdcda66601f0f78d6d5f1b201b5278c71fc29dd +Subproject commit 7ed580fce863f69fc0095e843aefbd3caa7ab003 diff --git a/cesium_app/handlers/dataset.py b/cesium_app/handlers/dataset.py index 2620d8b..207be29 100644 --- a/cesium_app/handlers/dataset.py +++ b/cesium_app/handlers/dataset.py @@ -1,5 +1,6 @@ from baselayer.app.handlers.base import BaseHandler from baselayer.app.custom_exceptions import AccessError +from baselayer.app.access import auth_or_token from ..models import DBSession, Project, Dataset, DatasetFile from .. import util @@ -9,39 +10,52 @@ import os from os.path import join as pjoin import uuid - -import tornado.web +import base64 +import tarfile class DatasetHandler(BaseHandler): - @tornado.web.authenticated + @auth_or_token def post(self): - if not 'tarFile' in self.request.files: + data = self.get_json() + if not 'tarFile' in data: return self.error('No tar file uploaded') - zipfile = self.request.files['tarFile'][0] + zipfile = data['tarFile'] + tarball_content_type_str = 'data:application/gzip;base64,' + + if not zipfile['body'].startswith(tarball_content_type_str): + return self.error('Invalid tar file - please ensure file is gzip ' + 'format.') - if zipfile.filename == '': + if zipfile['name'] == '': return self.error('Empty tar file uploaded') - dataset_name = self.get_argument('datasetName') - project_id = self.get_argument('projectID') + dataset_name = data['datasetName'] + project_id = data['projectID'] zipfile_name = (str(uuid.uuid4()) + "_" + - util.secure_filename(zipfile.filename)) + util.secure_filename(zipfile['name'])) zipfile_path = pjoin(self.cfg['paths:upload_folder'], zipfile_name) with open(zipfile_path, 'wb') as f: - f.write(zipfile['body']) + f.write(base64.b64decode( + zipfile['body'].replace(tarball_content_type_str, ''))) + try: + tarfile.open(zipfile_path) + except tarfile.ReadError: + os.remove(zipfile_path) + return self.error('Invalid tar file - please ensure file is gzip ' + 'format.') # Header file is optional for unlabled data w/o metafeatures - if 'headerFile' in self.request.files: - headerfile = self.request.files['headerFile'][0] + if 'headerFile' in data: + headerfile = data['headerFile'] headerfile_name = (str(uuid.uuid4()) + "_" + - util.secure_filename(headerfile.filename)) + util.secure_filename(headerfile['name'])) headerfile_path = pjoin(self.cfg['paths:upload_folder'], headerfile_name) - with open(headerfile_path, 'wb') as f: + with open(headerfile_path, 'w') as f: f.write(headerfile['body']) else: @@ -67,7 +81,7 @@ def post(self): return self.success(d, 'cesium/FETCH_DATASETS') - @tornado.web.authenticated + @auth_or_token def get(self, dataset_id=None): if dataset_id is not None: dataset = Dataset.get_if_owned_by(dataset_id, self.current_user) @@ -79,7 +93,7 @@ def get(self, dataset_id=None): return self.success(dataset_info) - @tornado.web.authenticated + @auth_or_token def delete(self, dataset_id): d = Dataset.get_if_owned_by(dataset_id, self.current_user) DBSession().delete(d) diff --git a/cesium_app/handlers/feature.py b/cesium_app/handlers/feature.py index cc9d12e..0a94737 100644 --- a/cesium_app/handlers/feature.py +++ b/cesium_app/handlers/feature.py @@ -1,11 +1,11 @@ import tornado.ioloop -import tornado.web from cesium import featurize, time_series from cesium.features import dask_feature_graph from baselayer.app.handlers.base import BaseHandler from baselayer.app.custom_exceptions import AccessError +from baselayer.app.access import auth_or_token from ..models import DBSession, Dataset, Featureset, Project from os.path import join as pjoin @@ -14,7 +14,7 @@ class FeatureHandler(BaseHandler): - @tornado.web.authenticated + @auth_or_token def get(self, featureset_id=None): if featureset_id is not None: featureset_info = Featureset.get_if_owned_by(featureset_id, @@ -25,7 +25,7 @@ def get(self, featureset_id=None): self.success(featureset_info) - @tornado.web.authenticated + @auth_or_token async def _await_featurization(self, future, fset): """Note: we cannot use self.error / self.success here. There is no longer an active, open request by the time this happens! @@ -53,7 +53,7 @@ async def _await_featurization(self, future, fset): self.action('cesium/FETCH_FEATURESETS') - @tornado.web.authenticated + @auth_or_token async def post(self): data = self.get_json() featureset_name = data.get('featuresetName', '') @@ -104,14 +104,14 @@ async def post(self): self.success(fset, 'cesium/FETCH_FEATURESETS') - @tornado.web.authenticated + @auth_or_token def delete(self, featureset_id): f = Featureset.get_if_owned_by(featureset_id, self.current_user) DBSession().delete(f) DBSession().commit() self.success(action='cesium/FETCH_FEATURESETS') - @tornado.web.authenticated + @auth_or_token def put(self, featureset_id): f = Featureset.get_if_owned_by(featureset_id, self.current_user) self.error("Functionality for this endpoint is not yet implemented.") diff --git a/cesium_app/handlers/model.py b/cesium_app/handlers/model.py index fe6dc65..5aa4fe1 100644 --- a/cesium_app/handlers/model.py +++ b/cesium_app/handlers/model.py @@ -1,5 +1,6 @@ from baselayer.app.handlers.base import BaseHandler from baselayer.app.custom_exceptions import AccessError +from baselayer.app.access import auth_or_token from ..models import DBSession, Project, Model, Featureset from ..ext.sklearn_models import ( model_descriptions as sklearn_model_descriptions, @@ -16,7 +17,7 @@ import joblib import tornado.ioloop -import tornado.web + def _build_model_compute_statistics(fset_path, model_type, model_params, params_to_optimize, model_path): @@ -70,7 +71,7 @@ def _build_model_compute_statistics(fset_path, model_type, model_params, class ModelHandler(BaseHandler): - @tornado.web.authenticated + @auth_or_token def get(self, model_id=None): if model_id is not None: model_info = Model.get_if_owned_by(model_id, self.current_user) @@ -80,7 +81,7 @@ def get(self, model_id=None): return self.success(model_info) - @tornado.web.authenticated + @auth_or_token async def _await_model_statistics(self, model_stats_future, model): try: score, best_params = await model_stats_future @@ -105,7 +106,7 @@ async def _await_model_statistics(self, model_stats_future, model): self.action('cesium/FETCH_MODELS') - @tornado.web.authenticated + @auth_or_token async def post(self): data = self.get_json() @@ -152,7 +153,7 @@ async def post(self): return self.success(data={'message': "Model training begun."}, action='cesium/FETCH_MODELS') - @tornado.web.authenticated + @auth_or_token def delete(self, model_id): m = Model.get_if_owned_by(model_id, self.current_user) DBSession().delete(m) diff --git a/cesium_app/handlers/plot_features.py b/cesium_app/handlers/plot_features.py index 1ceeda2..1f04ab9 100644 --- a/cesium_app/handlers/plot_features.py +++ b/cesium_app/handlers/plot_features.py @@ -2,8 +2,6 @@ from .. import plot from ..models import Featureset -import tornado.web - class PlotFeaturesHandler(BaseHandler): def get(self, featureset_id): diff --git a/cesium_app/handlers/prediction.py b/cesium_app/handlers/prediction.py index 42d195c..406f102 100644 --- a/cesium_app/handlers/prediction.py +++ b/cesium_app/handlers/prediction.py @@ -1,10 +1,10 @@ from baselayer.app.handlers.base import BaseHandler from baselayer.app.custom_exceptions import AccessError +from baselayer.app.access import auth_or_token from ..models import DBSession, Prediction, Dataset, Model, Project from .. import util import tornado.gen -from tornado.web import RequestHandler from tornado.escape import json_decode from cesium import featurize, time_series @@ -51,7 +51,7 @@ async def _await_prediction(self, future, prediction): self.action('cesium/FETCH_PREDICTIONS') - @tornado.web.authenticated + @auth_or_token async def post(self): data = self.get_json() @@ -118,7 +118,7 @@ async def post(self): return self.success(prediction.display_info(), 'cesium/FETCH_PREDICTIONS') - @tornado.web.authenticated + @auth_or_token def get(self, prediction_id=None, action=None): if action == 'download': pred_path = Prediction.get_if_owned_by(prediction_id, @@ -149,7 +149,7 @@ def get(self, prediction_id=None, action=None): return self.success(prediction_info) - @tornado.web.authenticated + @auth_or_token def delete(self, prediction_id): prediction = Prediction.get_if_owned_by(prediction_id, self.current_user) @@ -159,7 +159,7 @@ def delete(self, prediction_id): class PredictRawDataHandler(BaseHandler): - @tornado.web.authenticated + @auth_or_token def post(self): ts_data = json_decode(self.get_argument('ts_data')) model_id = json_decode(self.get_argument('modelID')) diff --git a/cesium_app/handlers/project.py b/cesium_app/handlers/project.py index 9e321fd..cfbe9be 100644 --- a/cesium_app/handlers/project.py +++ b/cesium_app/handlers/project.py @@ -1,11 +1,11 @@ from baselayer.app.handlers.base import BaseHandler from baselayer.app.custom_exceptions import AccessError +from baselayer.app.access import auth_or_token from ..models import DBSession, Project -import tornado.web class ProjectHandler(BaseHandler): - @tornado.web.authenticated + @auth_or_token def get(self, project_id=None): if project_id is not None: proj_info = Project.get_if_owned_by(project_id, self.current_user) @@ -14,7 +14,7 @@ def get(self, project_id=None): return self.success(proj_info) - @tornado.web.authenticated + @auth_or_token def post(self): data = self.get_json() @@ -26,7 +26,7 @@ def post(self): return self.success({"id": p.id}, 'cesium/FETCH_PROJECTS') - @tornado.web.authenticated + @auth_or_token def put(self, project_id): # This ensures that the user has access to the project they # want to modify @@ -39,7 +39,7 @@ def put(self, project_id): return self.success(action='cesium/FETCH_PROJECTS') - @tornado.web.authenticated + @auth_or_token def delete(self, project_id): p = Project.get_if_owned_by(project_id, self.current_user) DBSession().delete(p) diff --git a/cesium_app/models.py b/cesium_app/models.py index d2c8346..f538d17 100644 --- a/cesium_app/models.py +++ b/cesium_app/models.py @@ -4,7 +4,8 @@ import sqlalchemy as sa from sqlalchemy.orm import relationship -from baselayer.app.models import (init_db, join_model, Base, DBSession, User) +from baselayer.app.models import (init_db, join_model, Base, DBSession, User, + Token) from cesium import featurize diff --git a/static/js/actions.js b/static/js/actions.js index 19c6b2b..4483524 100644 --- a/static/js/actions.js +++ b/static/js/actions.js @@ -186,14 +186,18 @@ export function deleteProject(id) { export function uploadDataset(form) { - const formData = new FormData(); - for (const key in form) { - if (form[key] && objectType(form[key][0]) === 'File') { - formData.append(key, form[key][0]); - } else { - formData.append(key, form[key]); - } + function fileReaderPromise(form, fileName, binary = false){ + return new Promise(resolve => { + var filereader = new FileReader(); + if (binary) { + filereader.readAsDataURL(form[fileName][0]); + } else { + filereader.readAsText(form[fileName][0]); + } + filereader.onloadend = () => resolve({ body: filereader.result, + name: form[fileName][0].name }); + }); } return dispatch => @@ -201,11 +205,21 @@ export function uploadDataset(form) { dispatch, UPLOAD_DATASET, - fetch('/dataset', { - credentials: 'same-origin', - method: 'POST', - body: formData - }) + Promise.all([fileReaderPromise(form, 'headerFile'), + fileReaderPromise(form, 'tarFile', true)]) + .then(([headerData, tarData]) => { + form['headerFile'] = headerData; + form['tarFile'] = tarData; + + return fetch('/dataset', { + credentials: 'same-origin', + method: 'POST', + body: JSON.stringify(form), + headers: new Headers({ + 'Content-Type': 'application/json' + }) + }) + }) .then(response => response.json()) .then((json) => { if (json.status == 'success') {