Skip to content

Commit

Permalink
Merge pull request #595 from NBISweden/testing/extended
Browse files Browse the repository at this point in the history
Increase test coverage
  • Loading branch information
talavis committed Aug 22, 2019
2 parents 61cc6c6 + 811c070 commit 8172f2e
Show file tree
Hide file tree
Showing 5 changed files with 192 additions and 34 deletions.
44 changes: 19 additions & 25 deletions backend/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,33 +102,27 @@ def get(self):
if dataset:
dataset_schema = {'@type': "Dataset"}

try:
dataset_version = db.get_dataset_version(dataset, version)
if dataset_version is None:
self.send_error(status_code=404)
dataset_version = db.get_dataset_version(dataset, version)
if dataset_version is None:
self.send_error(status_code=404)
return

if dataset_version.available_from > datetime.now():
# If it's not available yet, only return if user is admin.
if not (self.current_user and
self.current_user.is_admin(dataset_version.dataset)):
self.send_error(status_code=403)
return

if dataset_version.available_from > datetime.now():
# If it's not available yet, only return if user is admin.
if not (self.current_user and
self.current_user.is_admin(dataset_version.dataset)):
self.send_error(status_code=403)
return

base_url = "%s://%s" % (self.request.protocol, self.request.host)
dataset_schema['url'] = base_url + "/dataset/" + dataset_version.dataset.short_name
dataset_schema['@id'] = dataset_schema['url']
dataset_schema['name'] = dataset_version.dataset.short_name
dataset_schema['description'] = dataset_version.description
dataset_schema['identifier'] = dataset_schema['name']
dataset_schema['citation'] = dataset_version.ref_doi

base["dataset"] = dataset_schema

except db.DatasetVersion.DoesNotExist as err:
logging.error(f"Dataset version does not exist: {err}")
except db.DatasetVersionCurrent.DoesNotExist as err:
logging.error(f"Dataset does not exist: {err}")
base_url = "%s://%s" % (self.request.protocol, self.request.host)
dataset_schema['url'] = base_url + "/dataset/" + dataset_version.dataset.short_name
dataset_schema['@id'] = dataset_schema['url']
dataset_schema['name'] = dataset_version.dataset.short_name
dataset_schema['description'] = dataset_version.description
dataset_schema['identifier'] = dataset_schema['name']
dataset_schema['citation'] = dataset_version.ref_doi

base["dataset"] = dataset_schema

if beacon:
base = {"@context": "http://schema.org",
Expand Down
21 changes: 12 additions & 9 deletions backend/modules/browser/browser_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def get(self, dataset: str, query: str, ds_version: str = None):
dataset (str): dataset short name
query (str): query
ds_version (str): dataset version
"""
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
ret = {}
Expand All @@ -47,6 +48,7 @@ def get(self, dataset: str, datatype: str, item: str, # pylint: disable=too-man
item (str): query item
ds_version (str): dataset version
filter_type (str): type of filter to apply
"""
# ctrl.filterVariantsBy~ctrl.filterIncludeNonPass
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
Expand Down Expand Up @@ -89,6 +91,7 @@ def get(self, dataset: str, datatype: str, item: str, ds_version: str = None):
datatype (str): type of data
item (str): query item
ds_version (str): dataset version
"""
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
try:
Expand All @@ -114,6 +117,7 @@ def get(self, dataset: str, datatype: str, item: str, ds_version: str = None):
datatype (str): type of data
item (str): query item
ds_version (str): dataset version
"""
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
try:
Expand All @@ -139,6 +143,7 @@ def get(self, dataset: str, gene: str, ds_version: str = None):
dataset (str): short name of the dataset
gene (str): the gene id
ds_version (str): dataset version
"""
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
gene_id = gene
Expand All @@ -151,13 +156,7 @@ def get(self, dataset: str, gene: str, ds_version: str = None):
except error.NotFoundError as err:
self.send_error(status_code=404, reason=str(err))
return
except (error.ParsingError, error.MalformedRequest) as err:
self.send_error(status_code=400, reason=str(err))
return

if not gene:
self.send_error(status_code=404, reason='Gene not found')
return
ret['gene'] = gene

# Add exons from transcript
Expand Down Expand Up @@ -193,6 +192,7 @@ def get(self, dataset: str, region: str, ds_version: str = None):
dataset (str): short name of the dataset
region (str): the region in the format chr-startpos-endpos
ds_version (str): dataset version
"""
dataset, ds_version = utils.parse_dataset(dataset, ds_version)

Expand Down Expand Up @@ -232,9 +232,6 @@ def get(self, dataset: str, transcript: str, ds_version: str = None):
dataset (str): short name of the dataset
transcript (str): the transcript id
Returns:
dict: transcript (transcript and exons), gene (gene information)
"""
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
transcript_id = transcript
Expand Down Expand Up @@ -282,6 +279,7 @@ def get(self, dataset: str, variant: str, ds_version: str = None):
Args:
dataset (str): short name of the dataset
variant (str): variant in the format chrom-pos-ref-alt
"""
# pylint: disable=too-many-locals,too-many-branches,too-many-statements
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
Expand Down Expand Up @@ -355,6 +353,9 @@ def get(self, dataset: str, variant: str, ds_version: str = None):
curr_dsv = db.get_dataset_version(dataset, ds_version)
dsvs = [db.get_dataset_version(dset.short_name) for dset in db.Dataset.select()
if dset.short_name != dataset]
# if the only available version is not released yet
dsvs = list(filter(lambda dsv: dsv, dsvs))
logging.error(dsvs)
dsvs = [dsv for dsv in dsvs if dsv.reference_set == curr_dsv.reference_set]
dsv_groups = [(curr_dsv, variant)]
for dsv in dsvs:
Expand Down Expand Up @@ -410,6 +411,7 @@ def get(self, dataset: str, datatype: str, item: str, ds_version: str = None):
dataset (str): short name of the dataset
datatype (str): gene, region, or transcript
item (str): item to query
"""
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
try:
Expand Down Expand Up @@ -440,6 +442,7 @@ def get(self, dataset: str, query: str, ds_version: str = None):
Args:
dataset (str): short name of the dataset
query (str): search query
"""
dataset, ds_version = utils.parse_dataset(dataset, ds_version)
ret = {"dataset": dataset, "value": None, "type": None}
Expand Down
20 changes: 20 additions & 0 deletions backend/modules/browser/tests/test_browser_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,22 @@ def test_get_coverage_pos():
assert cov_pos['stop'] == 100101
assert cov_pos['chrom'] == '22'

data_type = 'region'
data_item = '22-100001-200101'
response = requests.get('{}/api/dataset/{}/browser/coverage_pos/{}/{}'.format(BASE_URL, dataset, data_type, data_item))
assert response.status_code == 400

data_type = 'region'
data_item = '22-1-11-101'
response = requests.get('{}/api/dataset/{}/browser/coverage_pos/{}/{}'.format(BASE_URL, dataset, data_type, data_item))
assert response.status_code == 400

dataset = 'SweGen'
data_type = 'transcript'
data_item = 'BAD_TRANSCRIPT'
response = requests.get('{}/api/dataset/{}/browser/coverage_pos/{}/{}'.format(BASE_URL, dataset, data_type, data_item))
assert response.status_code == 404


def test_get_gene():
"""
Expand Down Expand Up @@ -197,6 +213,10 @@ def test_get_variant():
response = requests.get('{}/api/dataset/{}/browser/variant/{}'.format(BASE_URL, dataset, variant_id))
assert response.status_code == 400

variant_id = '1-2-3-4-5-6'
response = requests.get('{}/api/dataset/{}/browser/variant/{}'.format(BASE_URL, dataset, variant_id))
assert response.status_code == 400


def test_get_variants():
"""
Expand Down
138 changes: 138 additions & 0 deletions backend/tests/test_application.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
"""
Test the browser handlers
"""
import json

import requests

BASE_URL = "http://localhost:4000"

def test_get_schema():
"""
Test GetSchema.get()
"""
response = requests.get(f'{BASE_URL}/api/schema')
data = json.loads(response.text)
expected = {'@context': 'http://schema.org/',
'@type': 'DataCatalog',
'name': 'SweFreq',
'alternateName': ['The Swedish Frequency resource for genomics']}
assert len(data) == 10
for value in expected:
assert data[value] == expected[value]

ds_name = 'SweGen'
response = requests.get(f'{BASE_URL}/api/schema?url={BASE_URL}/dataset/{ds_name}/browser')
data = json.loads(response.text)
expected = {"@type": "Dataset",
"url": f"{BASE_URL}/dataset/{ds_name}",
"@id": f"{BASE_URL}/dataset/{ds_name}",
"name": f"{ds_name}",
"description": "desc",
"identifier": f"{ds_name}",
"citation": "doi"}
assert data['dataset'] == expected

response = requests.get(f'{BASE_URL}/api/schema?url={BASE_URL}/dataset/{ds_name}/version/123456/browser')
assert not response.text
assert response.status_code == 404

response = requests.get(f'{BASE_URL}/api/schema?url={BASE_URL}/dataset/bad_ds_name/browser')
assert not response.text
assert response.status_code == 404

ds_name = 'SweGen2'
response = requests.get(f'{BASE_URL}/api/schema?url={BASE_URL}/dataset/{ds_name}/version/UNRELEASED/browser')
assert not response.text
assert response.status_code == 403

response = requests.get(f'{BASE_URL}/api/schema?url={BASE_URL}/dataset/{ds_name}/beacon')
data = json.loads(response.text)
expected = {'@id': 'https://swefreq.nbis.se/api/beacon-elixir/',
'@type': 'Beacon',
'dct:conformsTo': 'https://bioschemas.org/specifications/drafts/Beacon/',
'name': 'Swefreq Beacon',
'provider': {'@type': 'Organization',
'name': 'National Bioinformatics Infrastructure Sweden',
'alternateName': ['NBIS', 'ELIXIR Sweden'],
'logo': 'http://nbis.se/assets/img/logos/nbislogo-green.svg',
'url': 'https://nbis.se/'},
'supportedRefs': ['GRCh37'],
'description': 'Beacon API Web Server based on the GA4GH Beacon API',
'aggregator': False,
'url': 'https://swefreq.nbis.se/api/beacon-elixir/'}
for value in expected:
assert data[value] == expected[value]


def test_get_countrylist():
"""
Test CountryList.get()
"""
response = requests.get(f'{BASE_URL}/api/countries')
data = json.loads(response.text)

assert len(data['countries']) == 240


def test_get_dataset():
"""
Test GetDataset.get()
"""
ds_name = 'SweGen'
response = requests.get(f'{BASE_URL}/api/dataset/{ds_name}')
data = json.loads(response.text)
expected = {"study": 1,
"shortName": "SweGen",
"fullName": "SweGen",
"version": {"version": "20180409",
"description": "desc",
"terms": "terms",
"availableFrom": "2001-01-04",
"refDoi": "doi",
"dataContactName": "place",
"dataContactLink": "email",
"numVariants": None,
"coverageLevels": [1, 5, 10, 15, 20, 25, 30, 50, 100],
"portalAvail": True,
"fileAccess": "REGISTERED",
"beaconAccess": "PUBLIC",
"dataset": 1,
"referenceSet": 1,
"varCallRef": None},
"future": False}
for value in expected:
assert data[value] == expected[value]
assert len(data) == 14

ds_name = 'SweGen2'
response = requests.get(f'{BASE_URL}/api/dataset/{ds_name}')
data = json.loads(response.text)
expected = {"study": 1,
"shortName": "SweGen2",
"fullName": "SweGen2",
"version": {"version": "20190409",
"description": "desc",
"terms": "terms",
"availableFrom": "2001-01-05",
"refDoi": "doi",
"dataContactName": "place",
"dataContactLink": "email",
"numVariants": None,
"coverageLevels": [1, 5, 10, 15, 20, 25, 30, 50, 100],
"portalAvail": True,
"fileAccess": "REGISTERED",
"beaconAccess": "PUBLIC",
"dataset": 2,
"referenceSet": 1,
"varCallRef":None},
"future": False}
for value in expected:
assert data[value] == expected[value]
assert len(data) == 14

ds_name = 'Unrel'
response = requests.get(f'{BASE_URL}/api/dataset/{ds_name}')
assert not response.text
assert response.status_code == 404

3 changes: 3 additions & 0 deletions test/data/browser_test_data.sql
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ COPY data.studies (id, pi_name, pi_email, contact_name, contact_email, title, st
COPY data.datasets (id, study, short_name, full_name, browser_uri, beacon_uri, beacon_description, avg_seq_depth, seq_type, seq_tech, seq_center, dataset_size) FROM stdin;
1 1 SweGen SweGen url \N \N 0 type method place 0
2 1 SweGen2 SweGen2 url \N \N 0 type method place 0
3 1 Unrel Unreleased dataset url \N \N 0 type method place 0
\.

COPY data.reference_sets (id, reference_build, reference_name, ensembl_version, gencode_version, dbnsfp_version) FROM stdin;
Expand All @@ -32,6 +33,8 @@ COPY data.dataset_versions (id, dataset, reference_set, dataset_version, dataset
3 1 1 20171025 desc terms 2001-01-03 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
4 1 1 20180409 desc terms 2001-01-04 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
5 2 1 20190409 desc terms 2001-01-05 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
6 2 1 UNRELEASED desc terms 9999-12-31 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
7 3 1 UNRELEASED desc terms 9999-12-31 00:00:00 doi place email \N {1,5,10,15,20,25,30,50,100} TRUE REGISTERED PUBLIC
\.

COPY data.coverage (id, dataset_version, chrom, pos, mean, median, coverage) FROM stdin;
Expand Down

0 comments on commit 8172f2e

Please sign in to comment.