Skip to content

Commit

Permalink
Merge a180c46 into ea00518
Browse files Browse the repository at this point in the history
  • Loading branch information
talavis committed Apr 23, 2019
2 parents ea00518 + a180c46 commit 369f4c6
Show file tree
Hide file tree
Showing 5 changed files with 65 additions and 33 deletions.
17 changes: 9 additions & 8 deletions backend/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def build_dataset_structure(dataset_version, user=None, dataset=None):

if user:
r['is_admin'] = user.is_admin(dataset)
if user.has_access(dataset):
if user.has_access(dataset, dataset_version.version):
r['authorization_level'] = 'has_access'
elif user.has_requested_access(dataset):
r['authorization_level'] = 'has_requested_access'
Expand Down Expand Up @@ -218,10 +218,10 @@ def get(self, dataset):


class GenerateTemporaryLink(handlers.AuthorizedHandler):
def post(self, dataset, version=None):
dataset, version = utils.parse_dataset(dataset, version)
def post(self, dataset, ds_version=None):
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
user = self.current_user
dataset_version = db.get_dataset_version(dataset, version)
dataset_version = db.get_dataset_version(dataset, ds_version)
if dataset_version is None:
self.send_error(status_code=404)
return
Expand All @@ -248,9 +248,9 @@ def post(self, dataset, version=None):


class DatasetFiles(handlers.AuthorizedHandler):
def get(self, dataset, version=None):
dataset, version = utils.parse_dataset(dataset, version)
dataset_version = db.get_dataset_version(dataset, version)
def get(self, dataset, ds_version=None):
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
dataset_version = db.get_dataset_version(dataset, ds_version)
if dataset_version is None:
self.send_error(status_code=404)
return
Expand All @@ -264,14 +264,15 @@ def get(self, dataset, version=None):

self.finish({'files': ret})


def format_bytes(nbytes):
postfixes = ['b', 'Kb', 'Mb', 'Gb', 'Tb', 'Pb', 'Eb']
exponent = math.floor( math.log(nbytes) / math.log(1000) )
return "{} {}".format( round(nbytes/1000**exponent, 2), postfixes[exponent])


class Collection(handlers.UnsafeHandler):
def get(self, dataset):
def get(self, dataset, ds_version=None):
dataset, _ = utils.parse_dataset(dataset)
dataset = db.get_dataset(dataset)

Expand Down
35 changes: 29 additions & 6 deletions backend/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@ class Meta:
class Dataset(BaseModel):
"""
A dataset is part of a study, and usually include a certain population.
Most studies only have a single dataset, but multiple are allowed.
"""
class Meta:
Expand Down Expand Up @@ -213,6 +214,9 @@ class Meta:
data_contact_link = CharField(null=True)
num_variants = IntegerField(null=True)
coverage_levels = ArrayField(IntegerField, null=True)
portal_avail = BooleanField(null=True)
file_access = EnumField(null=False, choices=['None', 'Controlled', 'Registered', 'Public'])
beacon_access = EnumField(null=False, choices=['None', 'Controlled', 'Registered', 'Public'])


class DatasetFile(BaseModel):
Expand Down Expand Up @@ -334,16 +338,35 @@ def is_admin(self, dataset):
DatasetAccess.is_admin
).count()

def has_access(self, dataset):
return DatasetAccessCurrent.select().where(
DatasetAccessCurrent.dataset == dataset,
DatasetAccessCurrent.user == self,
).count()
def has_access(self, dataset, ds_version=None):
"""
Check whether user has permission to access a dataset
Args:
dataset (Database): peewee Database object
ds_version (str): the dataset version
Returns:
bool: allowed to access
"""
dsv = get_dataset_version(dataset.short_name, ds_version)
if not dsv:
return False
if dsv.file_access in ('Registered', 'Public'):
return True
elif dsv.file_access == 'None':
return False

return (DatasetAccessCurrent.select()
.where(DatasetAccessCurrent.dataset == dataset,
DatasetAccessCurrent.user == self)
.count()) > 0

def has_requested_access(self, dataset):
return DatasetAccessPending.select().where(
DatasetAccessPending.dataset == dataset,
DatasetAccessPending.user == self
DatasetAccessPending.user == self
).count()


Expand Down
30 changes: 19 additions & 11 deletions backend/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,12 +130,15 @@ def prepare(self):
return

kwargs = self.path_kwargs
if not kwargs['dataset']:
if not 'dataset' in kwargs:
logging.debug("No dataset: Send error 403")
self.send_error(status_code=403)
if not self.current_user.has_access( db.get_dataset(kwargs['dataset']) ):
return
ds_version = kwargs['ds_version'] if 'ds_version' in kwargs else None
if not self.current_user.has_access(db.get_dataset(kwargs['dataset']), ds_version):
logging.debug("No user access: Send error 403")
self.send_error(status_code=403)
return
logging.debug("User is authorized")


Expand All @@ -150,9 +153,11 @@ def prepare(self):
if not kwargs['dataset']:
logging.debug("No dataset: Send error 403")
self.send_error(status_code=403)
return
if not self.current_user.is_admin( db.get_dataset(kwargs['dataset']) ):
logging.debug("No user admin: Send error 403")
self.send_error(status_code=403)
return


class SafeStaticFileHandler(tornado.web.StaticFileHandler, SafeHandler):
Expand All @@ -179,20 +184,23 @@ def initialize(self, path):
path = "/" + path
self.root = path

def get(self, dataset, file, user=None):
def get(self, dataset, file, ds_version=None, user=None):
logging.debug("Want to download dataset {} ({})".format(dataset, file))

if not user:
user = self.current_user

dbfile = (db.DatasetFile
.select()
.where(db.DatasetFile.name == file)
.get())
db.UserDownloadLog.create(
user = user,
dataset_file = dbfile
)
try:
dbfile = (db.DatasetFile.select()
.join(db.DatasetVersion)
.where((db.DatasetFile.name == file) &
(db.DatasetVersion.version == ds_version))
.get())
except db.DatasetFile.DoesNotExist:
self.send_error(status_code=403)
return

db.UserDownloadLog.create(user = user, dataset_file = dbfile)

abspath = os.path.abspath(os.path.join(self.root, file))
self.set_header("X-Accel-Redirect", abspath)
Expand Down
8 changes: 4 additions & 4 deletions backend/route.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def __init__(self, settings):
{"path": "static/img/"}),
(r"/release/(?P<dataset>[^\/]+)/(?P<hash_value>[^\/]+)/(?P<file>[^\/]+)",handlers.TemporaryStaticNginxFileHandler,
{"path": "/release-files/"}),
(r"/release/(?P<dataset>[^\/]+)/(?P<file>[^\/]+)", handlers.AuthorizedStaticNginxFileHandler,
(r"/release/(?P<dataset>[^\/]+)/(?:versions/(?P<ds_version>[^/]+)/)?(?P<file>[^\/]+)", handlers.AuthorizedStaticNginxFileHandler,
{"path": "/release-files/"}),
## Authentication
(r"/logout", auth.ElixirLogoutHandler),
Expand All @@ -65,11 +65,11 @@ def __init__(self, settings):
(r"/api/dataset/(?P<dataset>[^\/]+)", application.GetDataset),
(r"/api/dataset/(?P<dataset>[^\/]+)/log/(?P<event>[^\/]+)/(?P<target>[^\/]+)", application.LogEvent),
(r"/api/dataset/(?P<dataset>[^\/]+)/logo", application.ServeLogo),
(r"/api/dataset/(?P<dataset>[^\/]+)/files", application.DatasetFiles),
(r"/api/dataset/(?P<dataset>[^\/]+)/collection", application.Collection),
(r"/api/dataset/(?P<dataset>[^\/]+)/(?:versions/(?P<ds_version>[^/]+)/)?files", application.DatasetFiles),
(r"/api/dataset/(?P<dataset>[^\/]+)/(?:versions/(?P<ds_version>[^/]+)/)?collection", application.Collection),
(r"/api/dataset/(?P<dataset>[^\/]+)/users_current", application.DatasetUsersCurrent),
(r"/api/dataset/(?P<dataset>[^\/]+)/users_pending", application.DatasetUsersPending),
(r"/api/dataset/(?P<dataset>[^\/]+)/temporary_link", application.GenerateTemporaryLink),
(r"/api/dataset/(?P<dataset>[^\/]+)/(?:versions/(?P<ds_version>[^/]+)/)?temporary_link", application.GenerateTemporaryLink),
(r"/api/dataset/(?P<dataset>[^\/]+)/users/[^\/]+/request", application.RequestAccess),
(r"/api/dataset/(?P<dataset>[^\/]+)/users/(?P<email>[^\/]+)/approve", application.ApproveUser),
(r"/api/dataset/(?P<dataset>[^\/]+)/users/(?P<email>[^\/]+)/revoke", application.RevokeUser),
Expand Down
8 changes: 4 additions & 4 deletions test/data/load_dummy_data.sql
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ INSERT INTO data.sample_sets (id, dataset, "collection", sample_size, phenotype)
(1000003, 1000002, 1000003, 20, 'SamplePheno2 Coll2');

INSERT INTO data.dataset_versions (id, dataset, reference_set, dataset_version, dataset_description, terms, var_call_ref, available_from, ref_doi, data_contact_name, data_contact_link, num_variants, coverage_levels, portal_avail, file_access, beacon_access)
VALUES (1000001, 1000001, 1000001, 'Version 1-1', 'Dataset 1-1, description', 'Dataset 1-1, terms', 'CallRef11', '2017-01-01', 'datset11DOI', 'Gunnar Green', 'gunnar.green@example.com', 10, ARRAY[1,5,10], TRUE, 'Registered', 'Public'),
(1000002, 1000002, 1000001, 'Version 2-1', 'Dataset 2-1, description', 'Dataset 2-1, terms', 'CallRef21', '2017-02-01', 'datset21DOI', NULL, NULL, 100, ARRAY[1,5,10], TRUE, 'Registered', 'Public'),
(1000003, 1000002, 1000002, 'Version 2-2', 'Dataset 2-2, description', 'Dataset 2-2, terms', 'CallRef22', '2017-02-02', 'datset22DOI', 'Strummer project', 'https://example.com/strummer', 1000, ARRAY[1,5,10], TRUE, 'Registered', 'Public'),
(1000004, 1000002, 1000002, 'InvVer 2-3', 'Dataset 2-3, description', 'Dataset 2-3, terms', 'CallRef23', '2030-02-03', 'datset23DOI', 'Drummer project', 'https://example.com/drummer', 10000, ARRAY[1,5,10], TRUE, 'Registered', 'Public');
VALUES (1000001, 1000001, 1000001, 'Version 1-1', 'Dataset 1-1, description', 'Dataset 1-1, terms', 'CallRef11', '2017-01-01', 'datset11DOI', 'Gunnar Green', 'gunnar.green@example.com', 10, ARRAY[1,5,10], TRUE, 'Controlled', 'Public'),
(1000002, 1000002, 1000001, 'Version 2-1', 'Dataset 2-1, description', 'Dataset 2-1, terms', 'CallRef21', '2017-02-01', 'datset21DOI', NULL, NULL, 100, ARRAY[1,5,10], TRUE, 'Controlled', 'Public'),
(1000003, 1000002, 1000002, 'Version 2-2', 'Dataset 2-2, description', 'Dataset 2-2, terms', 'CallRef22', '2017-02-02', 'datset22DOI', 'Strummer project', 'https://example.com/strummer', 1000, ARRAY[1,5,10], TRUE, 'Controlled', 'Public'),
(1000004, 1000002, 1000002, 'InvVer 2-3', 'Dataset 2-3, description', 'Dataset 2-3, terms', 'CallRef23', '2030-02-03', 'datset23DOI', 'Drummer project', 'https://example.com/drummer', 10000, ARRAY[1,5,10], TRUE, 'Controlled', 'Public');

INSERT INTO data.dataset_files(id, dataset_version, basename, uri, file_size)
VALUES (1000001, 1000001, 'File11-1', '/release/file111.txt', 100),
Expand Down

0 comments on commit 369f4c6

Please sign in to comment.