Skip to content

Commit

Permalink
Merge pull request #563 from NBISweden/develop
Browse files Browse the repository at this point in the history
Fix bugs with file dl for new accounts and temporary links
  • Loading branch information
viklund committed May 20, 2019
2 parents 2446814 + 80a7d65 commit 24032e4
Show file tree
Hide file tree
Showing 10 changed files with 52 additions and 44 deletions.
1 change: 1 addition & 0 deletions backend/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,6 +414,7 @@ def post(self, dataset, event, target):
user = self.current_user

if event == 'consent':
user.save()
dv = (db.DatasetVersion
.select()
.join(db.Dataset)
Expand Down
10 changes: 5 additions & 5 deletions backend/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,15 +226,15 @@ class AuthorizedStaticNginxFileHandler(AuthorizedHandler, BaseStaticNginxFileHan


class TemporaryStaticNginxFileHandler(BaseStaticNginxFileHandler):
def get(self, dataset, hash_value, file):
def get(self, dataset, ds_version, hash_value, file):
logging.debug("Want to download hash {} ({})".format(hash_value, file))
linkhash = (db.Linkhash
.select()
.join(db.DatasetVersion)
.join(db.DatasetFile)
.where(db.Linkhash.hash == hash_value,
db.Linkhash.expires_on > datetime.datetime.now(),
db.DatasetFile.name == file))
.where(db.Linkhash.hash == hash_value,
db.Linkhash.expires_on > datetime.datetime.now(),
db.DatasetFile.name == file))
if linkhash.count() > 0:
logging.debug("Linkhash valid")
# Get temporary user from hash_value
Expand All @@ -243,7 +243,7 @@ def get(self, dataset, hash_value, file):
.join(db.Linkhash)
.where(db.Linkhash.hash == hash_value)
).get()
super().get(dataset, file, user)
super().get(dataset, file, ds_version, user)
else:
logging.debug("Linkhash invalid")
self.send_error(status_code=403)
Expand Down
1 change: 1 addition & 0 deletions backend/modules/browser/browser_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ def get(self, dataset:str, gene:str, ds_version:str=None):
gene = lookups.get_gene(dataset, gene_id, ds_version)
if not gene:
self.send_error(status_code=404, reason='Gene not found')
return
ret['gene'] = gene

# Add exons from transcript
Expand Down
2 changes: 2 additions & 0 deletions backend/modules/browser/lookups.py
Original file line number Diff line number Diff line change
Expand Up @@ -507,6 +507,8 @@ def get_variants_in_gene(dataset:str, gene_id:str, ds_version:str=None):
if not dataset_version:
return None
gene = get_gene(dataset, gene_id, ds_version)
if not gene:
return None

variants = [variant for variant in db.Variant.select()
.join(db.VariantGenes)
Expand Down
59 changes: 31 additions & 28 deletions backend/modules/browser/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,9 @@ def get_coverage_pos(dataset:str, datatype:str, item:str, ds_version:str=None):
else:
if datatype == 'gene':
gene = lookups.get_gene(dataset, item)
transcript = lookups.get_transcript(dataset, gene['canonical_transcript'], ds_version)
if gene:
transcript = lookups.get_transcript(dataset, gene['canonical_transcript'], ds_version)
else: transcript = None
elif datatype == 'transcript':
transcript = lookups.get_transcript(dataset, item, ds_version)
if transcript:
Expand Down Expand Up @@ -361,33 +363,34 @@ def get_variant_list(dataset:str, datatype:str, item:str, ds_version:str=None):
return None
refgene = transcript['gene_id']

for variant in variants:
if datatype in ('gene', 'transcript'):
anno = None
if datatype == 'transcript':
anno = [ann for ann in variant['vep_annotations'] if ann['Feature'] == item]
if not anno:
anno = [ann for ann in variant['vep_annotations'] if ann['Gene'] == refgene]
else:
anno = [ann for ann in variant['vep_annotations'] if ann['Gene'] == item]
if anno:
variant['vep_annotations'] = anno

add_consequence_to_variants(variants)

for variant in variants:
remove_extraneous_information(variant)

# Format output
def format_variant(variant):
variant['major_consequence'] = (variant['major_consequence'].replace('_variant','')
.replace('_prime_', '\'')
.replace('_', ' '))

# This is so an array values turns into a comma separated string instead
return {k: ", ".join(v) if isinstance(v,list) else v for k, v in variant.items()}

variants = list(map(format_variant, variants))
if variants:
for variant in variants:
if datatype in ('gene', 'transcript'):
anno = None
if datatype == 'transcript':
anno = [ann for ann in variant['vep_annotations'] if ann['Feature'] == item]
if not anno:
anno = [ann for ann in variant['vep_annotations'] if ann['Gene'] == refgene]
else:
anno = [ann for ann in variant['vep_annotations'] if ann['Gene'] == item]
if anno:
variant['vep_annotations'] = anno

add_consequence_to_variants(variants)

for variant in variants:
remove_extraneous_information(variant)

# Format output
def format_variant(variant):
variant['major_consequence'] = (variant['major_consequence'].replace('_variant','')
.replace('_prime_', '\'')
.replace('_', ' '))

# This is so an array values turns into a comma separated string instead
return {k: ", ".join(v) if isinstance(v,list) else v for k, v in variant.items()}

variants = list(map(format_variant, variants))

return {'variants': variants, 'headers': headers}

Expand Down
2 changes: 1 addition & 1 deletion backend/route.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def __init__(self, settings):
{"path": "static/"}),
(r'/(favicon.ico)', tornado.web.StaticFileHandler,
{"path": "static/img/"}),
(r"/release/(?P<dataset>[^\/]+)/(?P<hash_value>[^\/]+)/(?P<file>[^\/]+)", handlers.TemporaryStaticNginxFileHandler,
(r"/release/(?P<dataset>[^\/]+)/versions/(?P<ds_version>[^/]+)/(?P<hash_value>[^\/]+)/(?P<file>[^\/]+)", handlers.TemporaryStaticNginxFileHandler,
{"path": "/release-files/"}),
(r"/release/(?P<dataset>[^\/]+)/versions/(?P<ds_version>[^/]+)/(?P<file>[^\/]+)", handlers.AuthorizedStaticNginxFileHandler,
{"path": "/release-files/"}),
Expand Down
9 changes: 0 additions & 9 deletions docs/source/backend.beacon.rst

This file was deleted.

1 change: 0 additions & 1 deletion docs/source/backend.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ Backend

backend.application
backend.auth
backend.beacon
backend.db
backend.handlers
backend.route
Expand Down
10 changes: 10 additions & 0 deletions docs/source/docs.merge-accounts.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
Merge accounts for Elixir AAI
=============================

It is possible to maintain the same dataset permissions with other logins (e.g. you are a dataset admin with your institutional account and want to be able to login with your ORCID account and still be an admin).

To merge the accounts:

1. Log in to the Perun Identity consolidator at https://perun.elixir-czech.cz/fed/gui with your account with admin access.
2. Go to the authentication tab and click `identity consolidator >>`.
3. Log in with your second account.
1 change: 1 addition & 0 deletions docs/source/docs.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ Documentation

docs.dev-setup.md
docs.import-data.md
docs.merge-accounts.md

0 comments on commit 24032e4

Please sign in to comment.