Skip to content

Commit

Permalink
Merge 70d75fa into 67d42e5
Browse files Browse the repository at this point in the history
  • Loading branch information
talavis committed Jun 18, 2019
2 parents 67d42e5 + 70d75fa commit 1b770df
Show file tree
Hide file tree
Showing 9 changed files with 350 additions and 197 deletions.
77 changes: 47 additions & 30 deletions backend/modules/browser/browser_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import db
import handlers

from . import error
from . import lookups
from . import utils

Expand All @@ -24,7 +25,7 @@ def get(self, dataset:str, query:str, ds_version:str=None):
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
ret = {}

results = lookups.get_autocomplete(dataset, query, ds_version)
results = lookups.autocomplete(dataset, query, ds_version)
ret = {'values': sorted(list(set(results)))[:20]}

self.finish(ret)
Expand Down Expand Up @@ -87,12 +88,13 @@ def get(self, dataset:str, datatype:str, item:str, ds_version:str=None):
ds_version (str): dataset version
"""
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
ret = utils.get_coverage(dataset, datatype, item, ds_version)
if 'bad_region' in ret:
self.send_error(status_code=400, reason="Unable to parse the region")
try:
ret = utils.get_coverage(dataset, datatype, item, ds_version)
except error.NotFoundError as err:
self.send_error(status_code=404, reason=str(err))
return
if 'region_too_large' in ret:
self.send_error(status_code=400, reason="The region is too large")
except (error.ParsingError, error.MalformedRequest) as err:
self.send_error(status_code=400, reason=str(err))
return
self.finish(ret)

Expand All @@ -114,7 +116,7 @@ def get(self, dataset:str, datatype:str, item:str, ds_version:str=None):
try:
ret = utils.get_coverage_pos(dataset, datatype, item, ds_version)
except ValueError:
logging.error('GetCoveragePos: unable to parse region ({})'.format(region))
logging.error('GetCoveragePos: unable to parse region ({})'.format(item))
self.send_error(status_code=400, reason='Unable to parse region')
return

Expand All @@ -139,7 +141,15 @@ def get(self, dataset:str, gene:str, ds_version:str=None):
ret = {'gene':{'gene_id': gene_id}}

# Gene
gene = lookups.get_gene(dataset, gene_id, ds_version)
try:
gene = lookups.get_gene(dataset, gene_id, ds_version)
except error.NotFoundError as err:
self.send_error(status_code=404, reason=str(err))
return
except (error.ParsingError, error.MalformedRequest) as err:
self.send_error(status_code=400, reason=str(err))
return

if not gene:
self.send_error(status_code=404, reason='Gene not found')
return
Expand Down Expand Up @@ -181,9 +191,9 @@ def get(self, dataset:str, region:str, ds_version:str=None):

try:
chrom, start, stop = utils.parse_region(region)
except ValueError:
logging.error('GetRegion: unable to parse region ({})'.format(region))
self.send_error(status_code=400, reason='Unable to parse region')
except error.ParsingError as err:
self.send_error(status_code=400, reason=str(err))
logging.warning('GetRegion: unable to parse region ({})'.format(region))
return

ret = {'region':{'chrom': chrom,
Expand All @@ -193,7 +203,7 @@ def get(self, dataset:str, region:str, ds_version:str=None):
}

if utils.is_region_too_large(start, stop):
self.send_error(status_code=400, reason="The region is too large")
self.send_error(status_code=400, reason='Region too large')
return

genes_in_region = lookups.get_genes_in_region(dataset, chrom, start, stop, ds_version)
Expand Down Expand Up @@ -229,10 +239,12 @@ def get(self, dataset:str, transcript:str, ds_version:str=None):
}

# Add transcript information
transcript = lookups.get_transcript(dataset, transcript_id, ds_version)
if not transcript:
self.send_error(status_code=404, reason='Transcript not found')
try:
transcript = lookups.get_transcript(dataset, transcript_id, ds_version)
except error.NotFoundError as err:
self.send_error(status_code=404, reason=str(err))
return

ret['transcript']['id'] = transcript['transcript_id']
ret['transcript']['number_of_CDS'] = len([t for t in transcript['exons'] if t['feature_type'] == 'CDS'])

Expand Down Expand Up @@ -270,18 +282,21 @@ def get(self, dataset:str, variant:str, ds_version:str=None):
ret = {'variant':{}}
# Variant
v = variant.split('-')
if len(v) != 4:
logging.error('GetVariant: unable to parse variant ({})'.format(variant))
self.send_error(status_code=400, reason=f'Unable to parse variant {variant}')
try:
v[1] = int(v[1])
except ValueError:
logging.error('GetVariant: unable to parse variant ({})'.format(variant))
self.send_error(status_code=400, reason="Unable to parse variant")
logging.error('GetVariant: position not an integer ({})'.format(variant))
self.send_error(status_code=400, reason=f'Position is not an integer in variant {variant}')
return
orig_variant = variant
variant = lookups.get_variant(dataset, v[1], v[0], v[2], v[3], ds_version)

if not variant:
try:
variant = lookups.get_variant(dataset, v[1], v[0], v[2], v[3], ds_version)
except error.NotFoundError as err:
logging.error('Variant not found ({})'.format(orig_variant))
self.send_error(status_code=404, reason='Variant not found')
self.send_error(status_code=404, reason=str(err))
return

# Just get the information we need
Expand Down Expand Up @@ -320,7 +335,6 @@ def get(self, dataset:str, variant:str, ds_version:str=None):
'canonical': annotation['CANONICAL'],
'modification': annotation['HGVSp'].split(":")[1] if ':' in annotation['HGVSp'] else None}]


# Dataset frequencies.
# This is reported per variable in the database data, with dataset
# information inside the variables, so here we reorder to make the
Expand All @@ -332,9 +346,11 @@ def get(self, dataset:str, variant:str, ds_version:str=None):
dsvs = [dsv for dsv in dsvs if dsv.reference_set == curr_dsv.reference_set]
dsv_groups = [(curr_dsv, variant)]
for dsv in dsvs:
hit = lookups.get_variant(dsv.dataset.short_name, v[1], v[0], v[2], v[3], dsv.version)
if hit:
dsv_groups.append((dsv, hit))
try:
hit = lookups.get_variant(dsv.dataset.short_name, v[1], v[0], v[2], v[3], dsv.version)
except error.NotFoundError:
continue
dsv_groups.append((dsv, hit))

frequencies = {'headers':[['Dataset','pop'],
['Allele Count','acs'],
Expand Down Expand Up @@ -379,12 +395,13 @@ def get(self, dataset:str, datatype:str, item:str, ds_version:str=None):
item (str): item to query
"""
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
ret = utils.get_variant_list(dataset, datatype, item, ds_version)
if not ret:
self.send_error(status_code=500, reason='Unable to retrieve variants')
try:
ret = utils.get_variant_list(dataset, datatype, item, ds_version)
except error.NotFoundError as err:
self.send_error(status_code=404, reason=str(err))
return
if 'region_too_large' in ret:
self.send_error(status_code=400, reason="The region is too large")
except (error.ParsingError, error.MalformedRequest) as err:
self.send_error(status_code=400, reason=str(err))
return

# inconvenient way of doing humpBack-conversion
Expand Down
11 changes: 11 additions & 0 deletions backend/modules/browser/error.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
class NotFoundError(Exception):
"""The query returned nothing from the database."""
pass

class ParsingError(Exception):
"""Failed to parse the request."""
pass

class MalformedRequest(Exception):
"""Bad request (e.g. too large region)."""
pass

0 comments on commit 1b770df

Please sign in to comment.