diff --git a/.coveragerc b/.coveragerc index 7e254e0b..0fb74614 100644 --- a/.coveragerc +++ b/.coveragerc @@ -14,6 +14,7 @@ omit = [report] # Regexes for lines to exclude from consideration exclude_lines = + pragma: no cover # Don't complain about missing debug-only code: def __repr__ if self\.debug diff --git a/Pipfile b/Pipfile deleted file mode 100644 index d3fda1c9..00000000 --- a/Pipfile +++ /dev/null @@ -1,18 +0,0 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" - -[dev-packages] - -[packages] -aiohttp = "*" -asyncpg = "*" -cryptography = "*" -jsonschema = "==3.0.0" -"cyvcf2" = "*" -PyJWT = "*" -Cython = "*" - -[requires] -python_version = "3.6" diff --git a/beacon_api/__init__.py b/beacon_api/__init__.py index 2d06d62e..f92eecc2 100644 --- a/beacon_api/__init__.py +++ b/beacon_api/__init__.py @@ -13,7 +13,7 @@ __license__ = CONFIG_INFO.license __copyright__ = CONFIG_INFO.copyright __docs_url__ = CONFIG_INFO.docs_url -__handover_drs__ = CONFIG_INFO.handover_drs +__handover_drs__ = CONFIG_INFO.handover_drs.rstrip("/") __handover_datasets__ = CONFIG_INFO.handover_datasets __handover_beacon__ = CONFIG_INFO.handover_beacon __handover_base__ = CONFIG_INFO.handover_base diff --git a/beacon_api/app.py b/beacon_api/app.py index 7b97801d..cac8d399 100644 --- a/beacon_api/app.py +++ b/beacon_api/app.py @@ -82,7 +82,8 @@ async def initialize(app): async def destroy(app): """Upon server close, close the DB connection pool.""" - await app['pool'].close() + # will defer this to asyncpg + await app['pool'].close() # pragma: no cover def set_cors(server): diff --git a/beacon_api/extensions/handover.py b/beacon_api/extensions/handover.py index 5f25f33c..245caa8d 100644 --- a/beacon_api/extensions/handover.py +++ b/beacon_api/extensions/handover.py @@ -22,7 +22,7 @@ def make_handover(paths, datasetIds, chr='', start=0, end=0, ref='', alt='', var for dataset in set(datasetIds): handovers.append({"handoverType": {"id": "CUSTOM", "label": label}, "description": desc, - "url": __handover_drs__ + path.format(dataset=dataset, chr=chr, start=start, - end=end, ref=ref, alt=alt)}) + "url": __handover_drs__ + "/" + path.format(dataset=dataset, chr=chr, start=start, + end=end, ref=ref, alt=alt)}) return handovers diff --git a/beacon_api/utils/db_load.py b/beacon_api/utils/db_load.py index 8ccac2af..679cdfa6 100644 --- a/beacon_api/utils/db_load.py +++ b/beacon_api/utils/db_load.py @@ -222,11 +222,11 @@ async def load_metadata(self, vcf, metafile, datafile): LOG.info(f'Calculate number of samples from {datafile}') len_samples = len(vcf.samples) LOG.info(f'Parse metadata from {metafile}') - with open(metafile, 'r') as metafile: + with open(metafile, 'r') as meta_file: # read metadata from given JSON file # TO DO: parse metadata directly from datafile if possible - LOG.info(metafile) - metadata = json.load(metafile) + LOG.info(meta_file) + metadata = json.load(meta_file) LOG.info(metadata) LOG.info('Metadata has been parsed') try: @@ -255,7 +255,8 @@ async def load_metadata(self, vcf, metafile, datafile): LOG.error(f'AN ERROR OCCURRED WHILE ATTEMPTING TO INSERT METADATA -> {e}') except Exception as e: LOG.error(f'AN ERROR OCCURRED WHILE ATTEMPTING TO PARSE METADATA -> {e}') - return metadata['datasetId'] + else: + return metadata['datasetId'] def _chunks(self, iterable, size): """Chunk records. diff --git a/beacon_api/utils/validate.py b/beacon_api/utils/validate.py index 151694fe..817eaedb 100644 --- a/beacon_api/utils/validate.py +++ b/beacon_api/utils/validate.py @@ -54,7 +54,8 @@ def set_defaults(validator, properties, instance, schema): for error in validate_properties( validator, properties, instance, schema, ): - yield error + # Difficult to unit test + yield error # pragma: no cover return validators.extend( validator_class, {"properties": set_defaults}, @@ -76,8 +77,6 @@ def wrapper(func): @wraps(func) async def wrapped(*args): request = args[-1] - if not isinstance(request, web.Request): - raise BeaconBadRequest(request, request.host, "invalid request", "This does not seem a valid HTTP Request.") try: _, obj = await parse_request_object(request) except Exception: @@ -121,7 +120,21 @@ def token_scheme_check(token, scheme, obj, host): raise BeaconUnauthorised(obj, host, "invalid_token", 'Invalid token scheme, Bearer required.') if token is None: - raise BeaconUnauthorised(obj, host, "invalid_token", 'Token cannot be empty.') + # Might never happen + raise BeaconUnauthorised(obj, host, "invalid_token", 'Token cannot be empty.') # pragma: no cover + + +def verify_aud_claim(): + """Verify audience claim.""" + aud = [] + verify_aud = OAUTH2_CONFIG.verify_aud # Option to skip verification of `aud` claim + if verify_aud: + aud = os.environ.get('JWT_AUD', OAUTH2_CONFIG.audience) # List of intended audiences of token + # if verify_aud is set to True, we expect that a desired aud is then supplied. + # However, if verify_aud=True and no aud is supplied, we use aud=[None] which will fail for + # all tokens as a security measure. If aud=[], all tokens will pass (as is the default value). + aud = aud.split(',') if aud is not None else [None] + return verify_aud, aud def token_auth(): @@ -132,8 +145,6 @@ def token_auth(): """ @web.middleware async def token_middleware(request, handler): - if not isinstance(request, web.Request): - raise BeaconBadRequest(request, request.host, "invalid request", "This does not seem a valid HTTP Request.") if request.path in ['/query'] and 'Authorization' in request.headers: _, obj = await parse_request_object(request) try: @@ -147,14 +158,7 @@ async def token_middleware(request, handler): # Token decoding parameters key = await get_key() # JWK used to decode token with - aud = [] - verify_aud = OAUTH2_CONFIG.verify_aud # Option to skip verification of `aud` claim - if verify_aud: - aud = os.environ.get('JWT_AUD', OAUTH2_CONFIG.audience) # List of intended audiences of token - # if verify_aud is set to True, we expect that a desired aud is then supplied. - # However, if verify_aud=True and no aud is supplied, we use aud=[None] which will fail for - # all tokens as a security measure. If aud=[], all tokens will pass (as is the default value). - aud = aud.split(',') if aud is not None else [None] + verify_aud, aud = verify_aud_claim() # Prepare JWTClaims validation # can be populated with claims that are required to be present in the payload of the token claims_options = { @@ -195,14 +199,15 @@ async def token_middleware(request, handler): # currently if a token is valid that means request is authenticated "authenticated": True} return await handler(request) + # Testing the exceptions is done in integration tests except MissingClaimError as e: - raise BeaconUnauthorised(obj, request.host, "invalid_token", f'Missing claim(s): {e}') + raise BeaconUnauthorised(obj, request.host, "invalid_token", f'Missing claim(s): {e}') # pragma: no cover except ExpiredTokenError as e: - raise BeaconUnauthorised(obj, request.host, "invalid_token", f'Expired signature: {e}') + raise BeaconUnauthorised(obj, request.host, "invalid_token", f'Expired signature: {e}') # pragma: no cover except InvalidClaimError as e: - raise BeaconForbidden(obj, request.host, f'Token info not corresponding with claim: {e}') + raise BeaconForbidden(obj, request.host, f'Token info not corresponding with claim: {e}') # pragma: no cover except InvalidTokenError as e: - raise BeaconUnauthorised(obj, request.host, "invalid_token", f'Invalid authorization token: {e}') + raise BeaconUnauthorised(obj, request.host, "invalid_token", f'Invalid authorization token: {e}') # pragma: no cover else: request["token"] = {"bona_fide_status": False, "permissions": None, diff --git a/docs/deploy.rst b/docs/deploy.rst index 07f284cd..e8858988 100644 --- a/docs/deploy.rst +++ b/docs/deploy.rst @@ -76,7 +76,7 @@ For use with Kubernetes we provide ``YAML`` configuration. role: beacon spec: containers: - - image: cscfi/beacon + - image: cscfi/beacon-python imagePullPolicy: Always name: beacon ports: @@ -88,8 +88,9 @@ For use with Kubernetes we provide ``YAML`` configuration. name: data volumes: - name: data - persistentVolumeClaim: - claimName: beaconpy + # change below with preferred volume class + hostPath: + path: /local/disk/path --- apiVersion: v1 kind: Service diff --git a/docs/permissions.rst b/docs/permissions.rst index ce23362c..bb70bc75 100644 --- a/docs/permissions.rst +++ b/docs/permissions.rst @@ -61,7 +61,7 @@ The permissions are then passed in :meth:`beacon_api.utils.validate` as illustra .. literalinclude:: /../beacon_api/utils/validate.py :language: python :dedent: 16 - :lines: 175-188 + :lines: 179-192 If there is no claim for GA4GH permissions as illustrated above, they will not be added to ``controlled_datasets``. diff --git a/requirements.txt b/requirements.txt index 7ea21431..1b167433 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ aiohttp aiohttp_cors asyncpg -jsonschema==3.0.2 +jsonschema Cython numpy cyvcf2==0.10.1; python_version < '3.7' diff --git a/setup.py b/setup.py index 5987b3b9..ab7b6dae 100644 --- a/setup.py +++ b/setup.py @@ -46,7 +46,7 @@ 'Programming Language :: Python :: 3.7', ], install_requires=['aiohttp', 'asyncpg', 'authlib', - 'jsonschema==3.0.2', 'gunicorn'], + 'jsonschema', 'gunicorn'], extras_require={ 'test': ['coverage', 'pytest', 'pytest-cov', 'coveralls', 'testfixtures', 'tox', diff --git a/tests/test.ini b/tests/test.ini index bc5d48ad..060c87d6 100644 --- a/tests/test.ini +++ b/tests/test.ini @@ -50,7 +50,7 @@ environment=test [handover_info] # The base url for all handovers -drs=https://examplebrowser.org +drs=https://examplebrowser.org/ # Make the handovers 1- or 0-based handover_base = 1 diff --git a/tests/test_basic.py b/tests/test_basic.py index 0606d4df..f8709227 100644 --- a/tests/test_basic.py +++ b/tests/test_basic.py @@ -3,9 +3,11 @@ from beacon_api.utils.db_load import parse_arguments, init_beacon_db, main from beacon_api.conf.config import init_db_pool from beacon_api.api.query import access_resolution +from beacon_api.utils.validate import token_scheme_check, verify_aud_claim from beacon_api.permissions.ga4gh import get_ga4gh_controlled, get_ga4gh_bona_fide from .test_app import PARAMS from testfixtures import TempDirectory +from test.support import EnvironmentVarGuard def mock_token(bona_fide, permissions, auth): @@ -105,6 +107,22 @@ def test_main_db(self, mock_init): main() mock_init.assert_called() + def test_aud_claim(self): + """Test aud claim function.""" + env = EnvironmentVarGuard() + env.set('JWT_AUD', "aud1,aud2") + result = verify_aud_claim() + # Because it is false we expect it not to be parsed + expected = (False, []) + self.assertEqual(result, expected) + env.unset('JWT_AUD') + + def test_token_scheme_check_bad(self): + """Test token scheme no token.""" + # This might never happen, yet lets prepare for it + with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized): + token_scheme_check("", 'https', {}, 'localhost') + def test_access_resolution_base(self): """Test assumptions for access resolution. diff --git a/tests/test_data_query.py b/tests/test_data_query.py index 2f6317a6..587f0e90 100644 --- a/tests/test_data_query.py +++ b/tests/test_data_query.py @@ -1,4 +1,5 @@ import asynctest +import aiohttp from unittest import mock from beacon_api.utils.data_query import filter_exists, transform_record from beacon_api.utils.data_query import transform_misses, transform_metadata, find_datasets, add_handover @@ -6,7 +7,7 @@ from beacon_api.extensions.handover import make_handover from datetime import datetime from beacon_api.utils.data_query import handle_wildcard -from .test_db_load import Connection +from .test_db_load import Connection, ConnectionException class Record: @@ -154,6 +155,13 @@ async def test_datasets_access_call_public(self): # in Connection() class self.assertEqual(result, (['mock:public:id'], [], [])) + async def test_datasets_access_call_exception(self): + """Test db call of getting public datasets access with exception.""" + pool = asynctest.CoroutineMock() + pool.acquire().__aenter__.return_value = ConnectionException() + with self.assertRaises(aiohttp.web_exceptions.HTTPInternalServerError): + await fetch_datasets_access(pool, None) + async def test_datasets_access_call_registered(self): """Test db call of getting registered datasets access.""" pool = asynctest.CoroutineMock() @@ -195,10 +203,21 @@ async def test_fetch_dataset_metadata_call(self): # in Connection() class self.assertEqual(result, []) + async def test_fetch_dataset_metadata_call_exception(self): + """Test db call of getting datasets metadata with exception.""" + pool = asynctest.CoroutineMock() + pool.acquire().__aenter__.return_value = ConnectionException() + with self.assertRaises(aiohttp.web_exceptions.HTTPInternalServerError): + await fetch_dataset_metadata(pool, None, None) + async def test_fetch_filtered_dataset_call(self): """Test db call for retrieving main data.""" pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection() + db_response = {"referenceBases": '', "alternateBases": '', "variantType": "", + "referenceName": 'Chr38', + "frequency": 0, "callCount": 0, "sampleCount": 0, "variantCount": 0, + "start": 0, "end": 0, "accessType": "PUBLIC", "datasetId": "test"} + pool.acquire().__aenter__.return_value = Connection(accessData=[db_response]) assembly_id = 'GRCh38' position = (10, 20, None, None, None, None) chromosome = 1 @@ -208,10 +227,45 @@ async def test_fetch_filtered_dataset_call(self): # for now it can return empty dataset # in order to get a response we will have to mock it # in Connection() class - self.assertEqual(result, []) + expected = {'referenceName': 'Chr38', 'callCount': 0, 'sampleCount': 0, 'variantCount': 0, 'datasetId': 'test', + 'referenceBases': '', 'alternateBases': '', 'variantType': '', 'start': 0, 'end': 0, 'frequency': 0, + 'info': {'accessType': 'PUBLIC'}, + 'datasetHandover': [{'handoverType': {'id': 'CUSTOM', 'label': 'Variants'}, + 'description': 'browse the variants matched by the query', + 'url': 'https://examplebrowser.org/dataset/test/browser/variant/Chr38-1--'}, + {'handoverType': {'id': 'CUSTOM', 'label': 'Region'}, + 'description': 'browse data of the region matched by the query', + 'url': 'https://examplebrowser.org/dataset/test/browser/region/Chr38-1-1'}, + {'handoverType': {'id': 'CUSTOM', 'label': 'Data'}, + 'description': 'retrieve information of the datasets', + 'url': 'https://examplebrowser.org/dataset/test/browser'}]} + + self.assertEqual(result, [expected]) + + async def test_fetch_filtered_dataset_call_misses(self): + """Test db call for retrieving miss data.""" + pool = asynctest.CoroutineMock() + pool.acquire().__aenter__.return_value = Connection() # db_response is [] + assembly_id = 'GRCh38' + position = (10, 20, None, None, None, None) + chromosome = 1 + reference = 'A' + alternate = ('DUP', None) result_miss = await fetch_filtered_dataset(pool, assembly_id, position, chromosome, reference, alternate, None, None, True) self.assertEqual(result_miss, []) + async def test_fetch_filtered_dataset_call_exception(self): + """Test db call of retrieving main data with exception.""" + assembly_id = 'GRCh38' + position = (10, 20, None, None, None, None) + chromosome = 1 + reference = 'A' + alternate = ('DUP', None) + pool = asynctest.CoroutineMock() + pool.acquire().__aenter__.return_value = ConnectionException() + with self.assertRaises(aiohttp.web_exceptions.HTTPInternalServerError): + await fetch_filtered_dataset(pool, assembly_id, position, chromosome, reference, alternate, None, None, False) + def test_handle_wildcard(self): """Test PostgreSQL wildcard handling.""" sequence1 = 'ATCG' diff --git a/tests/test_db_load.py b/tests/test_db_load.py index f69467da..8fc78045 100644 --- a/tests/test_db_load.py +++ b/tests/test_db_load.py @@ -11,7 +11,7 @@ class Variant: Mock this for Variant calculations. """ - def __init__(self, ALT, REF, INF, call_rate, var_type, num_called): + def __init__(self, ALT, REF, INF, call_rate, var_type, num_called, is_sv=False): """Initialize class.""" self.INFO = INF self.ALT = ALT @@ -19,7 +19,7 @@ def __init__(self, ALT, REF, INF, call_rate, var_type, num_called): self.call_rate = call_rate self.var_type = var_type self.num_called = num_called - self.is_sv = False + self.is_sv = is_sv class INFO: @@ -28,12 +28,13 @@ class INFO: Mock this for storing VCF info. """ - def __init__(self, AC, VT, AN): + def __init__(self, AC, VT, AN, AF, SVTYPE=None): """Initialize class.""" self.AC = AC self.VT = VT self.AN = AN - self.AF = None + self.AF = AF + self.SVTYPE = SVTYPE def get(self, key): """Inside `__getitem__` method.""" @@ -119,6 +120,30 @@ def transaction(self, *args, **kwargs): return Transaction(*args, **kwargs) +class ConnectionException: + """Class Connection with Exception. + + Mock this from asyncpg. + """ + + def __init__(self): + """Initialize class.""" + pass + + def transaction(self, *args, **kwargs): + """Mimic transaction.""" + return Transaction(*args, **kwargs) + + async def execute(self, query, *args): + """Mimic execute.""" + return Exception + + @asyncio.coroutine + def prepare(self, query): + """Mimic prepare.""" + return Exception + + class DatabaseTestCase(asynctest.TestCase): """Test database operations.""" @@ -150,7 +175,8 @@ def setUp(self): #CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA00001 NA00002 NA00003 19 111 . A C 9.6 . . GT:HQ 0|0:10,10 0|0:10,10 0/1:3,3 19 112 . A G 10 . . GT:HQ 0|0:10,10 0|0:10,10 0/1:3,3 - 20 14370 rs6054257 G A 29 PASS NS=3;DP=14;AF=0.5;DB;H2 GT:GQ:DP:HQ 0|0:48:1:51,51 1|0:48:8:51,51 1/1:43:5:.,.""" + 20 14370 rs6054257 G A 29 PASS NS=3;DP=14;AF=0.5;DB;H2 GT:GQ:DP:HQ 0|0:48:1:51,51 1|0:48:8:51,51 1/1:43:5:.,. + chrM 15011 . T C . PASS . GT:GQ:DP:RO:QR:AO:QA:GL 1:160:970:0:0:968:31792:-2860.58,0 1:160:970:0:0:968:31792:-2860.58,0""" self.datafile = self._dir.write('data.csv', self.data.encode('utf-8')) def tearDown(self): @@ -164,6 +190,18 @@ async def test_rchop(self, db_mock): await self._db.connection() result = self._db._rchop('INS:ME:LINE1', ":LINE1") self.assertEqual('INS:ME', result) + result_no_ending = self._db._rchop('INS', ":LINE1") + self.assertEqual('INS', result_no_ending) + + @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + async def test_handle_type(self, db_mock): + """Test handle type.""" + db_mock.return_value = Connection() + await self._db.connection() + result = self._db._handle_type(1, int) + self.assertEqual([1], result) + result_tuple = self._db._handle_type((0.1, 0.2), float) + self.assertEqual([0.1, 0.2], result_tuple) @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') async def test_bnd_parts(self, db_mock): @@ -205,6 +243,16 @@ async def test_create_tables(self, db_mock, mock_log): # Should assert logs mock_log.info.assert_called_with('Tables have been created') + @asynctest.mock.patch('beacon_api.utils.db_load.LOG') + @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + async def test_create_tables_exception(self, db_mock, mock_log): + """Test creating tables exception.""" + db_mock.return_value = ConnectionException() + await self._db.connection() + await self._db.create_tables('sql.init') + log = "AN ERROR OCCURRED WHILE ATTEMPTING TO CREATE TABLES -> [Errno 2] No such file or directory: 'sql.init'" + mock_log.error.assert_called_with(log) + @asynctest.mock.patch('beacon_api.utils.db_load.LOG') @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') @asynctest.mock.patch('beacon_api.utils.db_load.VCF') @@ -230,6 +278,18 @@ async def test_load_metadata(self, mock_vcf, db_mock, mock_log): mock_log.info.mock_calls = [f'Parsing metadata from {metafile}', 'Metadata has been parsed'] + @asynctest.mock.patch('beacon_api.utils.db_load.LOG') + @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + async def test_load_metadata_exception(self, db_mock, mock_log): + """Test load metadata error.""" + db_mock.return_value = ConnectionException() + await self._db.connection() + vcf = asynctest.mock.MagicMock(name='samples') + vcf.samples.return_value = [1, 2, 3] + await self._db.load_metadata(vcf, 'meta.are', 'datafile') + log = "AN ERROR OCCURRED WHILE ATTEMPTING TO PARSE METADATA -> [Errno 2] No such file or directory: 'meta.are'" + mock_log.error.assert_called_with(log) + @asynctest.mock.patch('beacon_api.utils.db_load.LOG') @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') async def test_load_datafile(self, db_mock, mock_log): @@ -257,12 +317,6 @@ async def test_insert_variants(self, db_mock, mock_log): mock_log.info.mock_calls = [f'Received 1 variants for insertion to DATASET1', 'Insert variants into the database'] - # This was the case when BeaconDB() was initiated with a URL parameter, now it happens with environment variables - # def test_bad_init(self): - # """Capture error in case of anything wrong with initializing BeaconDB.""" - # with self.assertRaises(TypeError): - # BeaconDB() - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') async def test_close(self, db_mock, mock_log): @@ -273,24 +327,42 @@ async def test_close(self, db_mock, mock_log): mock_log.info.mock_calls = ['Mark the database connection to be closed', 'The database connection has been closed'] + @asynctest.mock.patch('beacon_api.utils.db_load.LOG') + @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + async def test_close_error(self, db_mock, mock_log): + """Test database URL close error.""" + db_mock.return_value = ConnectionException() + await self._db.connection() + await self._db.close() + log = "AN ERROR OCCURRED WHILE ATTEMPTING TO CLOSE DATABASE CONNECTION -> 'ConnectionException' object has no attribute 'close'" + mock_log.error.assert_called_with(log) + @asynctest.mock.patch('beacon_api.utils.db_load.LOG') @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') async def test_unpack(self, db_mock, mock_log): """Test database URL fetching.""" db_mock.return_value = Connection() await self._db.connection() - inf1 = INFO((1), 'S', 3) - variant = Variant(['C'], 'T', inf1, 0.7, 'snp', 3) - result = self._db._unpack(variant) + inf1 = INFO((1), 'i', 3, None) + variant_1 = Variant(['C'], 'T', inf1, 0.7, 'indel', 3) + result = self._db._unpack(variant_1) self.assertEqual(([0.3333333333333333], [1], ['SNP'], ['C'], 3, []), result) - inf2 = INFO(1, 'S', 3) - variant = Variant(['AT', 'A'], 'ATA', inf2, 0.7, 'snp', 3) - result = self._db._unpack(variant) + inf2 = INFO(1, 'M', 3, None) + variant_2 = Variant(['AT', 'A'], 'ATA', inf2, 0.7, 'mnp', 3) + result = self._db._unpack(variant_2) self.assertEqual(([0.3333333333333333], [1], ['DEL', 'DEL'], ['AT', 'A'], 3, []), result) - inf3 = INFO((1), 'S', 3) - variant = Variant(['TC'], 'T', inf3, 0.7, 'snp', 3) - result = self._db._unpack(variant) - self.assertEqual(([0.3333333333333333], [1], ['INS'], ['TC'], 3, []), result) + inf3 = INFO((1), 'S', 3, 0.5) + variant_3 = Variant(['TC'], 'T', inf3, 0.7, 'snp', 3) + result = self._db._unpack(variant_3) + self.assertEqual(([0.5], [1], ['INS'], ['TC'], 3, []), result) + inf4 = INFO((1), '', 3, None, 'BND') + variant_4 = Variant(['TC'], 'T', inf4, 0.7, 'snp', 3) + result = self._db._unpack(variant_4) + self.assertEqual(([0.3333333333333333], [1], ['SNP'], ['TC'], 3, []), result) + inf5 = INFO((1), 'S', 3, None, '') + variant_5 = Variant(['TC'], 'T', inf5, 0.7, 'ins', 3) + result5 = self._db._unpack(variant_5) + self.assertEqual(([0.3333333333333333], [1], ['INS'], ['TC'], 3, []), result5) @asynctest.mock.patch('beacon_api.utils.db_load.LOG') @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') diff --git a/tests/test_mate_name.py b/tests/test_mate_name.py index ca7d6d86..2265a9e9 100644 --- a/tests/test_mate_name.py +++ b/tests/test_mate_name.py @@ -1,6 +1,7 @@ import asynctest +import aiohttp from beacon_api.extensions.mate_name import find_fusion, fetch_fusion_dataset -from .test_db_load import Connection +from .test_db_load import Connection, ConnectionException class TestDataQueryFunctions(asynctest.TestCase): @@ -28,7 +29,11 @@ async def test_find_fusion(self, mock_filtered): async def test_fetch_fusion_dataset_call(self): """Test db call for retrieving mate data.""" pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection() + db_response = {"referenceBases": '', "alternateBases": '', "variantType": "", + "referenceName": 'Chr38', + "frequency": 0, "callCount": 0, "sampleCount": 0, "variantCount": 0, + "start": 0, "end": 0, "accessType": "PUBLIC", "datasetId": "test"} + pool.acquire().__aenter__.return_value = Connection(accessData=[db_response]) assembly_id = 'GRCh38' position = (10, 20, None, None, None, None) chromosome = 1 @@ -37,10 +42,42 @@ async def test_fetch_fusion_dataset_call(self): # for now it can return empty dataset # in order to get a response we will have to mock it # in Connection() class - self.assertEqual(result, []) + expected = {'referenceName': 'Chr38', 'callCount': 0, 'sampleCount': 0, 'variantCount': 0, 'datasetId': 'test', + 'referenceBases': '', 'alternateBases': '', 'variantType': '', 'start': 0, 'end': 0, 'frequency': 0, + 'info': {'accessType': 'PUBLIC'}, + 'datasetHandover': [{'handoverType': {'id': 'CUSTOM', 'label': 'Variants'}, + 'description': 'browse the variants matched by the query', + 'url': 'https://examplebrowser.org/dataset/test/browser/variant/Chr38-1--'}, + {'handoverType': {'id': 'CUSTOM', 'label': 'Region'}, + 'description': 'browse data of the region matched by the query', + 'url': 'https://examplebrowser.org/dataset/test/browser/region/Chr38-1-1'}, + {'handoverType': {'id': 'CUSTOM', 'label': 'Data'}, + 'description': 'retrieve information of the datasets', + 'url': 'https://examplebrowser.org/dataset/test/browser'}]} + self.assertEqual(result, [expected]) + + async def test_fetch_fusion_dataset_call_miss(self): + """Test db call for retrieving mate miss data.""" + pool = asynctest.CoroutineMock() + pool.acquire().__aenter__.return_value = Connection() + assembly_id = 'GRCh38' + position = (10, 20, None, None, None, None) + chromosome = 1 + reference = 'A' result_miss = await fetch_fusion_dataset(pool, assembly_id, position, chromosome, reference, None, None, None, True) self.assertEqual(result_miss, []) + async def test_fetch_fusion_dataset_call_exception(self): + """Test db call for retrieving mate data with exception.""" + pool = asynctest.CoroutineMock() + pool.acquire().__aenter__.return_value = ConnectionException() + assembly_id = 'GRCh38' + position = (10, 20, None, None, None, None) + chromosome = 1 + reference = 'A' + with self.assertRaises(aiohttp.web_exceptions.HTTPInternalServerError): + await fetch_fusion_dataset(pool, assembly_id, position, chromosome, reference, None, None, None, False) + if __name__ == '__main__': asynctest.main() diff --git a/tox.ini b/tox.ini index c8597cb0..e1bb94c4 100644 --- a/tox.ini +++ b/tox.ini @@ -16,6 +16,7 @@ commands = sphinx-build -W -c docs/ -b html docs/ docs/_build/html [testenv:bandit] skip_install = true +; plain search for known vulnerable code deps = bandit commands = bandit -r beacon_api/ -c .bandit.yml @@ -23,7 +24,7 @@ commands = bandit -r beacon_api/ -c .bandit.yml [testenv:flake8] skip_install = true deps = - pydocstyle==3.0.0 + pydocstyle flake8 flake8-docstrings commands = flake8 . @@ -36,7 +37,7 @@ deps = .[test] -rrequirements.txt # Stop after first failure -commands = py.test -x --cov=beacon_api tests/ +commands = py.test -x --cov=beacon_api tests/ --cov-fail-under=80 python {toxinidir}/tests/coveralls.py [travis]