diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index f7999a43..14704d96 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,10 +12,9 @@ jobs: runs-on: ${{ matrix.os }} steps: - - name: Spell check install - run: curl -L https://git.io/misspell | bash - - name: Spell check docs - run: bin/misspell -error docs/* + - uses: actions/checkout@master + - uses: rojopolis/spellcheck-github-actions@0.20.0 + name: Spellcheck code_docs: strategy: diff --git a/.github/workflows/s2i-build.yml b/.github/workflows/s2i-build.yml index a27edff7..e969f038 100644 --- a/.github/workflows/s2i-build.yml +++ b/.github/workflows/s2i-build.yml @@ -18,9 +18,9 @@ jobs: - name: Install requirements run: | - wget https://github.com/openshift/source-to-image/releases/download/v1.2.0/source-to-image-v1.2.0-2a579ecd-linux-amd64.tar.gz - tar -xvf source-to-image-v1.2.0-2a579ecd-linux-amd64.tar.gz + wget https://github.com/openshift/source-to-image/releases/download/v1.3.1/source-to-image-v1.3.1-a5a77147-linux-amd64.tar.gz + tar -xvf source-to-image-v1.3.1-a5a77147-linux-amd64.tar.gz sudo cp s2i /usr/local/bin - name: Build image run: | - s2i build . centos/python-36-centos7 cscfi/beacon-python + s2i build . centos/python-38-centos7 cscfi/beacon-python diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index ee9ac8bd..316bd6f8 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -26,9 +26,11 @@ jobs: run: tox -e flake8 - name: Do bandit static check with tox run: tox -e bandit + - name: Black formatting check + run: tox -e black - name: Install libcurl-devel run: | sudo apt-get update sudo apt-get install libcurl4-openssl-dev - - name: Do typing check with tox + - name: Type hints check run: tox -e mypy diff --git a/.spellcheck.yml b/.spellcheck.yml new file mode 100644 index 00000000..142e44eb --- /dev/null +++ b/.spellcheck.yml @@ -0,0 +1,27 @@ +matrix: +- name: Markdown + aspell: + lang: en + dictionary: + wordlists: + - .wordlist.txt + encoding: utf-8 + pipeline: + - pyspelling.filters.markdown: + - pyspelling.filters.context: + context_visible_first: true + escapes: '\\[\\`~]' + delimiters: + # Ignore text between inline back ticks as this is code or hightlight words + - open: '(?P`+)' + close: '(?P=open)' + # Ignore surrounded in <> as in RST it is link + - open: '<([A-Za-z0-9-_:.]+)|(https?://[^\\s/$.?#].[^\\s]+|[A-Za-z0-9-_:.]+)' + close: '>' + # Ignore code in RST starting with $ + - open: '\$.+' + close: '' + sources: + - 'docs/*.rst' + - '**/*.md' + default_encoding: utf-8 diff --git a/.wordlist.txt b/.wordlist.txt new file mode 100644 index 00000000..7f8497bc --- /dev/null +++ b/.wordlist.txt @@ -0,0 +1,144 @@ +AAI +accessType +aiohttp +alleleCount +alleleRequest +alternateBases +alternativeUrl +api +APIBehavior +APITest +apiVersion +assemblyId +automodule +autosummary +beacondb +beaconId +beaconpy +BND +BONA +btree +callCount +cd +chr +CHR +claimName +conf +config +contactUrl +containerPort +createdAt +createDateTime +csc +CSC +cscfi +CSCfi +datafile +datafiles +dataloader +dataset +DATASET +datasetAlleleResponses +datasetHandover +datasetId +datasetIds +datasets +dedent +documentationUrl +ebi +ega +EGA +endMax +endMin +env +ENV +Espoo +examplebrowser +exampleid +externalUrl +fi +FIDE +finland +ga +genomic +gh +GH +GHBeacon +github +GRCh +Gunicorn +GunicornUVLoopWebWorker +handoverType +hg +hostPath +html +http +HttpLocust +https +ICT +ietf +imagePullPolicy +includeDatasetResponses +ini +init +io +javascript +jpg +json +JSON +JWK +jwt +JWT +Keilaranta +literalinclude +localhost +logoUrl +matchLabels +mateID +mateName +mateStart +mountPath +namespace +NodePort +OAuth +orgInfo +persistentVolumeClaim +pgtune +postgres +POSTGRES +py +readthedocs +referenceBases +referenceID +referenceName +restartPolicy +rfc +RGB +sampleAlleleRequests +sampleCount +schemas +secretKeyRef +SNP +sql +startMax +startMin +targetPort +TaskSet +TCP +toctree +txt +ua +uk +updatedAt +updateDateTime +uri +url +utils +valueFrom +variantCount +varianttype +variantType +vcf +volumeMounts +welcomeUrl +www \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index dd309e1f..0e4f4057 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,22 +1,24 @@ -FROM python:3.8-alpine3.13 as BUILD +FROM python:3.8-alpine3.15 as BUILD RUN apk add --update \ && apk add --no-cache build-base curl-dev linux-headers bash git musl-dev\ - && apk add --no-cache libressl-dev libffi-dev autoconf bzip2-dev xz-dev\ + && apk add --no-cache openssl-dev libffi-dev autoconf bzip2-dev xz-dev\ && apk add --no-cache python3-dev rust cargo \ && rm -rf /var/cache/apk/* COPY requirements.txt /root/beacon/requirements.txt -COPY setup.py /root/beacon/setup.py -COPY beacon_api /root/beacon/beacon_api ENV CYTHONIZE=1 RUN pip install --upgrade pip && \ - pip install -r /root/beacon/requirements.txt && \ - pip install /root/beacon + pip install Cython==0.29.26 && \ + pip install -r /root/beacon/requirements.txt + +COPY setup.py /root/beacon/setup.py +COPY beacon_api /root/beacon/beacon_api +RUN pip install /root/beacon -FROM python:3.8-alpine3.13 +FROM python:3.8-alpine3.15 RUN apk add --no-cache --update bash diff --git a/README.md b/README.md index 7fc1b721..2f86b872 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,8 @@ Documentation: https://beacon-python.readthedocs.io ### Quick start `beacon-python` Web Server requires: -* Python 3.6+; -* running DB [PostgreSQL Server](https://www.postgresql.org/) 9.6+ (recommended 11.6). +* Python 3.8+; +* running DB [PostgreSQL Server](https://www.postgresql.org/) 9.6+ (recommended 13). ```shell git clone https://github.com/CSCfi/beacon-python @@ -30,7 +30,7 @@ docker run -e POSTGRES_USER=beacon \ -e POSTGRES_PASSWORD=beacon \ -v "$PWD/data":/docker-entrypoint-initdb.d \ -e POSTGRES_DB=beacondb \ - -p 5432:5432 postgres:11.6 + -p 5432:5432 postgres:13 ``` #### Run beacon-python diff --git a/beacon_api/__init__.py b/beacon_api/__init__.py index efd42324..7d95534d 100644 --- a/beacon_api/__init__.py +++ b/beacon_api/__init__.py @@ -24,7 +24,7 @@ __url__ = CONFIG_INFO.url __alturl__ = CONFIG_INFO.alturl __createtime__ = CONFIG_INFO.createtime -__updatetime__ = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ') # Every restart of the application means an update to it +__updatetime__ = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") # Every restart of the application means an update to it __org_id__ = CONFIG_INFO.org_id __org_name__ = CONFIG_INFO.org_name @@ -33,12 +33,10 @@ __org_welcomeUrl__ = CONFIG_INFO.org_welcomeUrl __org_contactUrl__ = CONFIG_INFO.org_contactUrl __org_logoUrl__ = CONFIG_INFO.org_logoUrl -__org_info__ = {'orgInfo': CONFIG_INFO.org_info} +__org_info__ = {"orgInfo": CONFIG_INFO.org_info} __sample_queries__ = SAMPLE_QUERIES # GA4GH Discovery -__service_type__ = {'group': f'{CONFIG_INFO.service_group}', - 'artifact': f'{CONFIG_INFO.service_artifact}', - 'version': f'{__apiVersion__}'} +__service_type__ = {"group": f"{CONFIG_INFO.service_group}", "artifact": f"{CONFIG_INFO.service_artifact}", "version": f"{__apiVersion__}"} __service_env__ = CONFIG_INFO.environment diff --git a/beacon_api/api/exceptions.py b/beacon_api/api/exceptions.py index a7699eff..8ebb7e3b 100644 --- a/beacon_api/api/exceptions.py +++ b/beacon_api/api/exceptions.py @@ -3,7 +3,7 @@ API specification requires custom messages upon error. """ -import json +import ujson from typing import Dict from aiohttp import web from .. import __apiVersion__ @@ -35,7 +35,17 @@ def process_exception_data(request: Dict, host: str, error_code: int, error: str # include datasetIds only if they are specified # as per specification if they don't exist all datatsets will be queried # Only one of `alternateBases` or `variantType` is required, validated by schema - oneof_fields = ["alternateBases", "variantType", "start", "end", "startMin", "startMax", "endMin", "endMax", "datasetIds"] + oneof_fields = [ + "alternateBases", + "variantType", + "start", + "end", + "startMin", + "startMax", + "endMin", + "endMax", + "datasetIds", + ] data["alleleRequest"].update({k: request.get(k) for k in oneof_fields if k in request}) return data @@ -51,7 +61,7 @@ class BeaconBadRequest(web.HTTPBadRequest): def __init__(self, request: Dict, host: str, error: str) -> None: """Return custom bad request exception.""" data = process_exception_data(request, host, 400, error) - super().__init__(text=json.dumps(data), content_type="application/json") + super().__init__(text=ujson.dumps(data, escape_forward_slashes=False), content_type="application/json") LOG.error(f"401 ERROR MESSAGE: {error}") @@ -65,14 +75,10 @@ class BeaconUnauthorised(web.HTTPUnauthorized): def __init__(self, request: Dict, host: str, error: str, error_message: str) -> None: """Return custom unauthorized exception.""" data = process_exception_data(request, host, 401, error) - headers_401 = { - "WWW-Authenticate": f'Bearer realm="{CONFIG_INFO.url}"\n\ - error="{error}"\n\ - error_description="{error_message}"' - } + headers_401 = {"WWW-Authenticate": f"""Bearer realm=\"{CONFIG_INFO.url}\",error=\"{error},\" error_description=\"{error_message}\""""} super().__init__( content_type="application/json", - text=json.dumps(data), + text=ujson.dumps(data, escape_forward_slashes=False), # we use auth scheme Bearer by default headers=headers_401, ) @@ -90,7 +96,7 @@ class BeaconForbidden(web.HTTPForbidden): def __init__(self, request: Dict, host: str, error: str) -> None: """Return custom forbidden exception.""" data = process_exception_data(request, host, 403, error) - super().__init__(content_type="application/json", text=json.dumps(data)) + super().__init__(content_type="application/json", text=ujson.dumps(data, escape_forward_slashes=False)) LOG.error(f"403 ERROR MESSAGE: {error}") @@ -103,5 +109,5 @@ class BeaconServerError(web.HTTPInternalServerError): def __init__(self, error: str) -> None: """Return custom forbidden exception.""" data = {"errorCode": 500, "errorMessage": error} - super().__init__(content_type="application/json", text=json.dumps(data)) + super().__init__(content_type="application/json", text=ujson.dumps(data, escape_forward_slashes=False)) LOG.error(f"500 ERROR MESSAGE: {error}") diff --git a/beacon_api/app.py b/beacon_api/app.py index add2944f..357e5179 100644 --- a/beacon_api/app.py +++ b/beacon_api/app.py @@ -17,7 +17,7 @@ from .utils.validate_jwt import token_auth import uvloop import asyncio -import json +import ujson routes = web.RouteTableDef() asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) @@ -59,7 +59,7 @@ async def beacon_get_query(request: web.Request) -> web.Response: method, processed_request = await parse_request_object(request) params = request.app["pool"], method, processed_request, request["token"], request.host response = await query_request_handler(params) - return web.json_response(response, content_type="application/json", dumps=json.dumps) + return web.json_response(response, content_type="application/json", dumps=ujson.dumps) @routes.post("/query") @@ -69,7 +69,7 @@ async def beacon_post_query(request: web.Request) -> web.Response: method, processed_request = await parse_request_object(request) params = request.app["pool"], method, processed_request, request["token"], request.host response = await query_request_handler(params) - return web.json_response(response, content_type="application/json", dumps=json.dumps) + return web.json_response(response, content_type="application/json", dumps=ujson.dumps) async def initialize(app: web.Application) -> None: @@ -125,11 +125,17 @@ def main(): # sslcontext.load_cert_chain(ssl_certfile, ssl_keyfile) # sslcontext = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) # sslcontext.check_hostname = False - web.run_app(init(), host=os.environ.get("HOST", "0.0.0.0"), port=os.environ.get("PORT", "5050"), shutdown_timeout=0, ssl_context=None) # nosec # nosec + web.run_app( + init(), + host=os.environ.get("HOST", "0.0.0.0"), # nosec + port=os.environ.get("PORT", "5050"), + shutdown_timeout=0, + ssl_context=None, + ) if __name__ == "__main__": - if sys.version_info < (3, 6): - LOG.error("beacon-python requires python 3.6") + if sys.version_info < (3, 8): + LOG.error("beacon-python requires python 3.8") sys.exit(1) main() diff --git a/beacon_api/conf/__init__.py b/beacon_api/conf/__init__.py index 9128be6e..6b02a8cd 100644 --- a/beacon_api/conf/__init__.py +++ b/beacon_api/conf/__init__.py @@ -12,12 +12,12 @@ def convert(dictionary: Dict) -> tuple: """Convert dictionary to Named tuple.""" - return namedtuple('Config', dictionary.keys())(**dictionary) + return namedtuple("Config", dictionary.keys())(**dictionary) def parse_drspaths(paths: str) -> List[List[str]]: """Parse handover configuration.""" - return [p.strip().split(',', 2) for p in paths.split('\n') if p.split()] + return [p.strip().split(",", 2) for p in paths.split("\n") if p.split()] def parse_config_file(path) -> Any: @@ -25,38 +25,38 @@ def parse_config_file(path) -> Any: config = ConfigParser() config.read(path) config_vars: Dict[str, Union[str, int, List[List[str]]]] = { - 'title': config.get('beacon_general_info', 'title'), - 'version': config.get('beacon_general_info', 'version'), - 'author': config.get('beacon_general_info', 'author'), - 'license': config.get('beacon_general_info', 'license'), - 'copyright': config.get('beacon_general_info', 'copyright'), - 'docs_url': config.get('beacon_general_info', 'docs_url'), - 'handover_drs': config.get('handover_info', 'drs', fallback=''), - 'handover_datasets': parse_drspaths(config.get('handover_info', 'dataset_paths', fallback='')), - 'handover_beacon': parse_drspaths(config.get('handover_info', 'beacon_paths', fallback='')), - 'handover_base': int(config.get('handover_info', 'handover_base', fallback=0)), - 'apiVersion': config.get('beacon_api_info', 'apiVersion'), - 'beaconId': config.get('beacon_api_info', 'beaconId'), - 'description': config.get('beacon_api_info', 'description'), - 'url': config.get('beacon_api_info', 'url'), - 'alturl': config.get('beacon_api_info', 'alturl'), - 'createtime': config.get('beacon_api_info', 'createtime'), - 'service_group': config.get('beacon_api_info', 'service_group'), - 'service_artifact': config.get('beacon_api_info', 'service_artifact'), - 'environment': config.get('beacon_api_info', 'environment'), - 'org_id': config.get('organisation_info', 'org_id'), - 'org_name': config.get('organisation_info', 'org_name'), - 'org_description': config.get('organisation_info', 'org_description'), - 'org_address': config.get('organisation_info', 'org_address'), - 'org_welcomeUrl': config.get('organisation_info', 'org_welcomeUrl'), - 'org_contactUrl': config.get('organisation_info', 'org_contactUrl'), - 'org_logoUrl': config.get('organisation_info', 'org_logoUrl'), - 'org_info': config.get('organisation_info', 'org_info') + "title": config.get("beacon_general_info", "title"), + "version": config.get("beacon_general_info", "version"), + "author": config.get("beacon_general_info", "author"), + "license": config.get("beacon_general_info", "license"), + "copyright": config.get("beacon_general_info", "copyright"), + "docs_url": config.get("beacon_general_info", "docs_url"), + "handover_drs": config.get("handover_info", "drs", fallback=""), + "handover_datasets": parse_drspaths(config.get("handover_info", "dataset_paths", fallback="")), + "handover_beacon": parse_drspaths(config.get("handover_info", "beacon_paths", fallback="")), + "handover_base": int(config.get("handover_info", "handover_base", fallback=0)), + "apiVersion": config.get("beacon_api_info", "apiVersion"), + "beaconId": config.get("beacon_api_info", "beaconId"), + "description": config.get("beacon_api_info", "description"), + "url": config.get("beacon_api_info", "url"), + "alturl": config.get("beacon_api_info", "alturl"), + "createtime": config.get("beacon_api_info", "createtime"), + "service_group": config.get("beacon_api_info", "service_group"), + "service_artifact": config.get("beacon_api_info", "service_artifact"), + "environment": config.get("beacon_api_info", "environment"), + "org_id": config.get("organisation_info", "org_id"), + "org_name": config.get("organisation_info", "org_name"), + "org_description": config.get("organisation_info", "org_description"), + "org_address": config.get("organisation_info", "org_address"), + "org_welcomeUrl": config.get("organisation_info", "org_welcomeUrl"), + "org_contactUrl": config.get("organisation_info", "org_contactUrl"), + "org_logoUrl": config.get("organisation_info", "org_logoUrl"), + "org_info": config.get("organisation_info", "org_info"), } return convert(config_vars) -CONFIG_INFO = parse_config_file(environ.get('CONFIG_FILE', str(Path(__file__).resolve().parent.joinpath('config.ini')))) +CONFIG_INFO = parse_config_file(environ.get("CONFIG_FILE", str(Path(__file__).resolve().parent.joinpath("config.ini")))) def parse_oauth2_config_file(path: str) -> Any: @@ -64,17 +64,17 @@ def parse_oauth2_config_file(path: str) -> Any: config = ConfigParser() config.read(path) config_vars: Dict[str, Union[str, bool, None]] = { - 'server': config.get('oauth2', 'server'), - 'issuers': config.get('oauth2', 'issuers'), - 'userinfo': config.get('oauth2', 'userinfo'), - 'audience': config.get('oauth2', 'audience') or None, - 'verify_aud': bool(strtobool(config.get('oauth2', 'verify_aud'))), - 'bona_fide_value': config.get('oauth2', 'bona_fide_value') + "server": config.get("oauth2", "server"), + "issuers": config.get("oauth2", "issuers"), + "userinfo": config.get("oauth2", "userinfo"), + "audience": config.get("oauth2", "audience") or None, + "verify_aud": bool(strtobool(config.get("oauth2", "verify_aud"))), + "bona_fide_value": config.get("oauth2", "bona_fide_value"), } return convert(config_vars) -OAUTH2_CONFIG = parse_oauth2_config_file(environ.get('CONFIG_FILE', str(Path(__file__).resolve().parent.joinpath('config.ini')))) +OAUTH2_CONFIG = parse_oauth2_config_file(environ.get("CONFIG_FILE", str(Path(__file__).resolve().parent.joinpath("config.ini")))) # Sample query file should be of format [{BeaconAlleleRequest}] https://github.com/ga4gh-beacon/specification/ -sampleq_file = Path(environ.get('SAMPLEQUERY_FILE', str(Path(__file__).resolve().parent.joinpath('sample_queries.json')))) +sampleq_file = Path(environ.get("SAMPLEQUERY_FILE", str(Path(__file__).resolve().parent.joinpath("sample_queries.json")))) SAMPLE_QUERIES = json.load(open(sampleq_file)) if sampleq_file.is_file() else [] diff --git a/beacon_api/conf/config.py b/beacon_api/conf/config.py index 2570523a..706f7b5c 100644 --- a/beacon_api/conf/config.py +++ b/beacon_api/conf/config.py @@ -18,7 +18,7 @@ import asyncpg from typing import Awaitable -DB_SCHEMA = os.environ.get('DATABASE_SCHEMA', None) +DB_SCHEMA = os.environ.get("DATABASE_SCHEMA", None) async def init_db_pool() -> Awaitable: @@ -26,20 +26,22 @@ async def init_db_pool() -> Awaitable: As we will have frequent requests to the database it is recommended to create a connection pool. """ - return await asyncpg.create_pool(host=os.environ.get('DATABASE_URL', 'localhost'), - port=os.environ.get('DATABASE_PORT', '5432'), - user=os.environ.get('DATABASE_USER', 'beacon'), - password=os.environ.get('DATABASE_PASSWORD', 'beacon'), - database=os.environ.get('DATABASE_NAME', 'beacondb'), - # Multiple schemas can be used, and they need to be comma separated - server_settings={'search_path': DB_SCHEMA if DB_SCHEMA else 'public'}, - # initializing with 0 connections allows the web server to - # start and also continue to live - min_size=0, - # for now limiting the number of connections in the pool - max_size=20, - max_queries=50000, - timeout=120, - command_timeout=180, - max_cached_statement_lifetime=0, - max_inactive_connection_lifetime=180) + return await asyncpg.create_pool( + host=os.environ.get("DATABASE_URL", "localhost"), + port=os.environ.get("DATABASE_PORT", "5432"), + user=os.environ.get("DATABASE_USER", "beacon"), + password=os.environ.get("DATABASE_PASSWORD", "beacon"), + database=os.environ.get("DATABASE_NAME", "beacondb"), + # Multiple schemas can be used, and they need to be comma separated + server_settings={"search_path": DB_SCHEMA if DB_SCHEMA else "public"}, + # initializing with 0 connections allows the web server to + # start and also continue to live + min_size=0, + # for now limiting the number of connections in the pool + max_size=20, + max_queries=50000, + timeout=120, + command_timeout=180, + max_cached_statement_lifetime=0, + max_inactive_connection_lifetime=180, + ) diff --git a/beacon_api/permissions/ga4gh.py b/beacon_api/permissions/ga4gh.py index 912de728..4ec6def6 100644 --- a/beacon_api/permissions/ga4gh.py +++ b/beacon_api/permissions/ga4gh.py @@ -82,7 +82,7 @@ """ import base64 -import json +import ujson from typing import Dict, List, Tuple import aiohttp @@ -137,7 +137,7 @@ async def decode_passport(encoded_passport: str) -> List[Dict]: decoded_segments = [base64.urlsafe_b64decode(seg) for seg in verified_segments] # Convert the decoded segment bytes into dicts for easy access - decoded_data = [json.loads(seg.decode("utf-8")) for seg in decoded_segments] + decoded_data = [ujson.loads(seg.decode("utf-8")) for seg in decoded_segments] return decoded_data diff --git a/beacon_api/schemas/__init__.py b/beacon_api/schemas/__init__.py index b92310b1..b56828e0 100644 --- a/beacon_api/schemas/__init__.py +++ b/beacon_api/schemas/__init__.py @@ -9,7 +9,7 @@ * ``response.json`` - beacon API JSON response. """ -import json +import ujson from typing import Dict from pathlib import Path @@ -22,4 +22,4 @@ def load_schema(name: str) -> Dict: with open(str(path), "r") as fp: data = fp.read() - return json.loads(data) + return ujson.loads(data) diff --git a/beacon_api/utils/db_load.py b/beacon_api/utils/db_load.py index bd100e6d..d31c2bcc 100644 --- a/beacon_api/utils/db_load.py +++ b/beacon_api/utils/db_load.py @@ -39,7 +39,7 @@ import os import sys import argparse -import json +import ujson import itertools import re @@ -232,7 +232,7 @@ async def load_metadata(self, vcf, metafile, datafile): # read metadata from given JSON file # TO DO: parse metadata directly from datafile if possible LOG.info(meta_file) - metadata = json.load(meta_file) + metadata = ujson.load(meta_file) LOG.info(metadata) LOG.info("Metadata has been parsed") try: @@ -302,83 +302,82 @@ async def insert_variants(self, dataset_id, variants, min_ac): async with self._conn.transaction(): LOG.info("Insert variants into the database") for variant in variants: - # params = (frequency, count, actual variant Type) - params = self._unpack(variant) - # Coordinates that are read from VCF are 1-based, - # cyvcf2 reads them as 0-based, and they are inserted into the DB as such - - # params may carry single variants [1] or packed variants [20, 15, 10, 1] - # The first check prunes for single variants, packed variants must be removed afterwards - if params[1][0] >= min_ac: - # Remove packed variants that don't meet the minimum allele count requirements - # Packed variants are always ordered from largest to smallest, this process starts - # popping values from the right (small) side until there are no more small values to pop - while params[1][-1] < min_ac: - params[0].pop() # aaf - params[1].pop() # ac - params[2].pop() # vt - params[3].pop() # alt - if len(params[5]) > 0: - params[5].pop() # bnd - - # Nothing interesting on the variant with no aaf - # because none of the samples have it - if variant.aaf > 0: - - # We Process Breakend Records into a different table for now - if params[5] != []: - # await self.insert_mates(dataset_id, variant, params) - # Most likely there will be only one BND per Record - for bnd in params[5]: - await self._conn.execute( - """INSERT INTO beacon_mate_table - (datasetId, chromosome, chromosomeStart, chromosomePos, - mate, mateStart, matePos, reference, alternate, alleleCount, - callCount, frequency, "end") - SELECT ($1), ($2), ($3), ($4), - ($5), ($6), ($7), ($8), t.alt, t.ac, ($11), t.freq, ($13) - FROM (SELECT unnest($9::varchar[]) alt, unnest($10::integer[]) ac, - unnest($12::float[]) freq) t - ON CONFLICT (datasetId, chromosome, mate, chromosomePos, matePos) - DO NOTHING""", - dataset_id, - variant.CHROM.replace("chr", ""), - variant.start, - variant.ID, - bnd[0].replace("chr", ""), - bnd[1], - bnd[6], - variant.REF, - params[3], - params[1], - params[4], - params[0], - variant.end, - ) - else: + # Nothing interesting on the variant with no aaf + # because none of the samples have it + if variant.aaf > 0: + # params = (frequency, count, actual variant Type) + params = self._unpack(variant) + # Coordinates that are read from VCF are 1-based, + # cyvcf2 reads them as 0-based, and they are inserted into the DB as such + + # params may carry single variants [1] or packed variants [20, 15, 10, 1] + # The first check prunes for single variants, packed variants must be removed afterwards + if params[1][0] >= min_ac: + # Remove packed variants that don't meet the minimum allele count requirements + # Packed variants are always ordered from largest to smallest, this process starts + # popping values from the right (small) side until there are no more small values to pop + while params[1][-1] < min_ac: + params[0].pop() # aaf + params[1].pop() # ac + params[2].pop() # vt + params[3].pop() # alt + if len(params[5]) > 0: + params[5].pop() # bnd + + # We Process Breakend Records into a different table for now + if params[5] != []: + # await self.insert_mates(dataset_id, variant, params) + # Most likely there will be only one BND per Record + for bnd in params[5]: await self._conn.execute( - """INSERT INTO beacon_data_table - (datasetId, chromosome, start, reference, alternate, - "end", aggregatedVariantType, alleleCount, callCount, frequency, variantType) - SELECT ($1), ($2), ($3), ($4), t.alt, ($6), ($7), t.ac, ($9), t.freq, t.vt - FROM (SELECT unnest($5::varchar[]) alt, unnest($8::integer[]) ac, - unnest($10::float[]) freq, unnest($11::varchar[]) as vt) t - ON CONFLICT (datasetId, chromosome, start, reference, alternate) + """INSERT INTO beacon_mate_table + (datasetId, chromosome, chromosomeStart, chromosomePos, + mate, mateStart, matePos, reference, alternate, alleleCount, + callCount, frequency, "end") + SELECT ($1), ($2), ($3), ($4), + ($5), ($6), ($7), ($8), t.alt, t.ac, ($11), t.freq, ($13) + FROM (SELECT unnest($9::varchar[]) alt, unnest($10::integer[]) ac, + unnest($12::float[]) freq) t + ON CONFLICT (datasetId, chromosome, mate, chromosomePos, matePos) DO NOTHING""", dataset_id, variant.CHROM.replace("chr", ""), variant.start, + variant.ID, + bnd[0].replace("chr", ""), + bnd[1], + bnd[6], variant.REF, params[3], - variant.end, - variant.var_type.upper(), params[1], params[4], params[0], - params[2], + variant.end, ) - - LOG.debug("Variants have been inserted") + else: + await self._conn.execute( + """INSERT INTO beacon_data_table + (datasetId, chromosome, start, reference, alternate, + "end", aggregatedVariantType, alleleCount, callCount, frequency, variantType) + SELECT ($1), ($2), ($3), ($4), t.alt, ($6), ($7), t.ac, ($9), t.freq, t.vt + FROM (SELECT unnest($5::varchar[]) alt, unnest($8::integer[]) ac, + unnest($10::float[]) freq, unnest($11::varchar[]) as vt) t + ON CONFLICT (datasetId, chromosome, start, reference, alternate) + DO NOTHING""", + dataset_id, + variant.CHROM.replace("chr", ""), + variant.start, + variant.REF, + params[3], + variant.end, + variant.var_type.upper(), + params[1], + params[4], + params[0], + params[2], + ) + + LOG.debug("Variants have been inserted") except Exception as e: LOG.error(f"AN ERROR OCCURRED WHILE ATTEMPTING TO INSERT VARIANTS -> {e}") diff --git a/data/example_metadata.json b/data/example_metadata.json index 98d502e7..3a7880dc 100644 --- a/data/example_metadata.json +++ b/data/example_metadata.json @@ -1,5 +1,5 @@ { - "name": "1000 genomoe", + "name": "1000 genome", "datasetId": "urn:hg:1000genome", "description": "Data from the 1000 Genomes Project. The 1000 Genomes Project ran between 2008 and 2015, creating the largest public catalogue of human variation and genotype data. As the project ended, the Data Coordination Centre at EMBL-EBI has received continued funding from the Wellcome Trust to maintain and expand the resource.", "assemblyId": "GRCh38", diff --git a/deploy/docker-compose.yml b/deploy/docker-compose.yml index 0fa7e5e7..9ad00d80 100644 --- a/deploy/docker-compose.yml +++ b/deploy/docker-compose.yml @@ -3,7 +3,7 @@ version: '3.2' services: postgres: hostname: postgres - image: postgres:11.6 + image: postgres:13 environment: POSTGRES_USER: beacon POSTGRES_DB: beacondb @@ -20,7 +20,7 @@ services: environment: DATABASE_URL: postgres links: - - postgres:postgres + - postgres:postgres ports: - 5050:5050 restart: on-failure diff --git a/deploy/test/docker-compose.yml b/deploy/test/docker-compose.yml index 14affcfd..d12a1fb4 100644 --- a/deploy/test/docker-compose.yml +++ b/deploy/test/docker-compose.yml @@ -3,7 +3,7 @@ version: '3.2' services: postgres: hostname: postgres - image: postgres:11.6 + image: postgres:13 environment: POSTGRES_USER: beacon POSTGRES_DB: beacondb @@ -42,4 +42,4 @@ services: - 8000:8000 volumes: - ./mock_auth.py:/mock_auth.py - entrypoint: ["python", "/mock_auth.py", "0.0.0.0", "8000"] + entrypoint: [ "python", "/mock_auth.py", "0.0.0.0", "8000" ] diff --git a/deploy/test/example_metadata_controlled.json b/deploy/test/example_metadata_controlled.json index 0ffa5631..b231895d 100644 --- a/deploy/test/example_metadata_controlled.json +++ b/deploy/test/example_metadata_controlled.json @@ -1,5 +1,5 @@ { - "name": "1000 genomoe", + "name": "1000 genome", "datasetId": "urn:hg:1000genome:controlled", "description": "Data from the 1000 Genomes Project. The 1000 Genomes Project ran between 2008 and 2015, creating the largest public catalogue of human variation and genotype data. As the project ended, the Data Coordination Centre at EMBL-EBI has received continued funding from the Wellcome Trust to maintain and expand the resource.", "assemblyId": "GRCh38", diff --git a/deploy/test/example_metadata_controlled1.json b/deploy/test/example_metadata_controlled1.json index e2084975..1efbfc4b 100644 --- a/deploy/test/example_metadata_controlled1.json +++ b/deploy/test/example_metadata_controlled1.json @@ -1,5 +1,5 @@ { - "name": "1000 genomoe", + "name": "1000 genome", "datasetId": "urn:hg:1000genome:controlled1", "description": "Data from the 1000 Genomes Project. The 1000 Genomes Project ran between 2008 and 2015, creating the largest public catalogue of human variation and genotype data. As the project ended, the Data Coordination Centre at EMBL-EBI has received continued funding from the Wellcome Trust to maintain and expand the resource.", "assemblyId": "GRCh38", diff --git a/deploy/test/example_metadata_registered.json b/deploy/test/example_metadata_registered.json index 858a0e0a..3900bd3d 100644 --- a/deploy/test/example_metadata_registered.json +++ b/deploy/test/example_metadata_registered.json @@ -1,5 +1,5 @@ { - "name": "1000 genomoe", + "name": "1000 genome", "datasetId": "urn:hg:1000genome:registered", "description": "Data from the 1000 Genomes Project. The 1000 Genomes Project ran between 2008 and 2015, creating the largest public catalogue of human variation and genotype data. As the project ended, the Data Coordination Centre at EMBL-EBI has received continued funding from the Wellcome Trust to maintain and expand the resource.", "assemblyId": "GRCh38", diff --git a/dictionary.dic b/dictionary.dic new file mode 100644 index 00000000..e20f793e Binary files /dev/null and b/dictionary.dic differ diff --git a/docs/conf.py b/docs/conf.py index 56480723..7fe552d4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -10,7 +10,7 @@ # Get the project root dir, which is the parent dir of this -sys.path.insert(0, os.path.abspath('../beacon_api')) +sys.path.insert(0, os.path.abspath("../beacon_api")) # -- General configuration ------------------------------------------------ @@ -25,9 +25,19 @@ def __getattr__(cls, name): # List modules need to be mocked -MOCK_MODULES = ['aiohttp', 'asyncpg', 'cyvcf2', 'aiohttp_cors', - 'Cython', 'numpy', 'authlib.jose', 'authlib.jose.errors', - 'uvloop', 'aiocache', 'aiocache.serializers'] +MOCK_MODULES = [ + "aiohttp", + "asyncpg", + "cyvcf2", + "aiohttp_cors", + "Cython", + "numpy", + "authlib.jose", + "authlib.jose.errors", + "uvloop", + "aiocache", + "aiocache.serializers", +] sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) # If your documentation needs a minimal Sphinx version, state it here. @@ -37,32 +47,34 @@ def __getattr__(cls, name): # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ['sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.coverage', - 'sphinx.ext.ifconfig', - 'sphinx.ext.viewcode', - 'sphinx.ext.githubpages', - 'sphinx.ext.todo'] +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.coverage", + "sphinx.ext.ifconfig", + "sphinx.ext.viewcode", + "sphinx.ext.githubpages", + "sphinx.ext.todo", +] # Add any paths that contain templates here, relative to this directory. -templates_path = ['templates'] +templates_path = ["templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" # Get current year current_year = str(datetime.date.today().year) # General information about the project. -project = 'beacon-python' -copyright = f'2018 - {current_year}, {beacon_api.__copyright__} |' +project = "beacon-python" +copyright = f"2018 - {current_year}, {beacon_api.__copyright__} |" author = beacon_api.__author__ # The version info for the project you're documenting, acts as replacement for @@ -84,10 +96,10 @@ def __getattr__(cls, name): # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also affecst html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False @@ -97,22 +109,18 @@ def __getattr__(cls, name): # -- Options for HTML output ---------------------------------------------- -html_title = 'beacon-python API' +html_title = "beacon-python API" # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'sphinx_rtd_theme' -html_theme_options = { - 'collapse_navigation': True, - 'sticky_navigation': True, - 'display_version': True, - 'prev_next_buttons_location': 'bottom'} +html_theme = "sphinx_rtd_theme" +html_theme_options = {"collapse_navigation": True, "sticky_navigation": True, "display_version": True, "prev_next_buttons_location": "bottom"} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['static'] +html_static_path = ["static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -120,7 +128,7 @@ def __getattr__(cls, name): # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars html_sidebars = { - '**': [ + "**": [ # 'about.html', # 'navigation.html', # 'relations.html', # needs 'show_related': True theme option to display @@ -129,15 +137,15 @@ def __getattr__(cls, name): ] } -today_fmt = '%B %d, %Y' +today_fmt = "%B %d, %Y" def setup(app): """Add custom stylesheet.""" - app.add_css_file('custom.css') + app.add_css_file("custom.css") # -- Other stuff ---------------------------------------------------------- -htmlhelp_basename = 'beacon-python' -man_pages = [(master_doc, 'beacon-python', [author], 1)] -texinfo_documents = [(master_doc, 'beacon-python', author, 'Miscellaneous')] +htmlhelp_basename = "beacon-python" +man_pages = [(master_doc, "beacon-python", [author], 1)] +texinfo_documents = [(master_doc, "beacon-python", author, "Miscellaneous")] diff --git a/docs/db.rst b/docs/db.rst index 1ce6cd5c..7f976dcc 100644 --- a/docs/db.rst +++ b/docs/db.rst @@ -3,7 +3,7 @@ Database ======== -We use a PostgreSQL database (recommended version 11.6) for working with beacon data. +We use a PostgreSQL database (recommended version 13) for working with beacon data. For more information on setting up the database consult :ref:`database-setup`. .. attention:: We recommend https://pgtune.leopard.in.ua/ for establishing PostgreSQL diff --git a/docs/example.rst b/docs/example.rst index b2800046..330b6dfa 100644 --- a/docs/example.rst +++ b/docs/example.rst @@ -60,7 +60,7 @@ Example Response: "updateDateTime": "2018-12-01T10:28:07Z", "datasets": [{ "id": "urn:hg:1000genome", - "name": "1000 genomoe", + "name": "1000 genome", "externalUrl": "ftp://ftp.1000genomes.ebi.ac.uk/vol1/ftp/release/20130502/", "description": "Data from 1000 genome project", "assemblyId": "GRCh38", diff --git a/docs/instructions.rst b/docs/instructions.rst index 3bef54f9..54e5f8fa 100644 --- a/docs/instructions.rst +++ b/docs/instructions.rst @@ -3,8 +3,8 @@ Instructions .. note:: In order to run ``beacon-python`` Web Server requirements are as specified below: - * Python 3.6+; - * running DB `PostgreSQL Server `_ 9.6+ (recommended 11.6). + * Python 3.8+; + * running DB `PostgreSQL Server `_ 9.6+ (recommended 13). .. _env-setup: @@ -101,7 +101,7 @@ in the supplied token. If disabled, the audience(s) of a token will not be valid Disabling this can be a good solution for standalone Beacons, that want to be able to use tokens generated by any authority. If ``verify_aud=True`` is set -provide also value(s) for ``audience`` key, as otherwise the audience will be attempted to be valited, but as no audiences +provide also value(s) for ``audience`` key, as otherwise the audience will be attempted to be validated, but as no audiences are listed, the validation will fail. .. note:: For implementing `CONTROLLED` dataset permissions see :ref:`permissions`. @@ -165,7 +165,7 @@ Starting PostgreSQL using Docker: -e POSTGRES_PASSWORD=beacon \ -e POSTGRES_DB=beacondb \ -v "$PWD/data":/docker-entrypoint-initdb.d \ - -p 5432:5432 postgres:11.6 + -p 5432:5432 postgres:13 .. hint:: If one has their own database the ``beacon_init`` utility can be skipped, and make use of their own database by: @@ -208,7 +208,7 @@ As an example, a dataset metadata could be: .. code-block:: javascript { - "name": "1000 genomoe", + "name": "1000 genome", "datasetId": "urn:hg:1000genome", "description": "Data from 1000 genome project", "assemblyId": "GRCh38", diff --git a/requirements.txt b/requirements.txt index 71015ea9..8f730af0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,16 +1,11 @@ -aiohttp==3.7.4.post0 +aiohttp==3.8.1 aiohttp-cors==0.7.0 asyncpg==0.25.0 -jsonschema==3.2.0; python_version < '3.7' -jsonschema==4.3.2; python_version >= '3.7' +jsonschema==4.3.2 Cython==0.29.26 -cyvcf2==0.10.1; python_version < '3.7' -cyvcf2; python_version >= '3.7' -uvloop==0.14.0; python_version < '3.7' -uvloop==0.16.0; python_version >= '3.7' +cyvcf2==0.30.14 +uvloop==0.16.0 aiocache==0.11.1 -ujson==4.3.0; python_version < '3.7' -ujson==5.1.0; python_version >= '3.7' -aiomcache==0.6.0 +ujson==5.1.0 Authlib==0.15.5 gunicorn==20.1.0 diff --git a/setup.py b/setup.py index 0b0f2a3a..26bf6782 100644 --- a/setup.py +++ b/setup.py @@ -33,31 +33,25 @@ "Topic :: Internet :: WWW/HTTP :: HTTP Servers", "Topic :: Scientific/Engineering :: Bio-Informatics", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", ], install_requires=[ "asyncpg==0.25.0", - "aiohttp==3.7.4.post0", + "aiohttp==3.8.1", "Authlib==0.15.5", "aiohttp-cors==0.7.0", - "jsonschema==3.2.0; python_version < '3.7'", - "jsonschema==4.3.2; python_version >= '3.7'", + "jsonschema==4.3.2", "gunicorn==20.1.0", - "uvloop==0.14.0; python_version < '3.7'", - "uvloop==0.16.0; python_version >= '3.7'", - "cyvcf2==0.10.1; python_version < '3.7'", - "cyvcf2; python_version >= '3.7'", + "uvloop==0.16.0", + "cyvcf2==0.30.14", "aiocache==0.11.1", - "aiomcache==0.6.0", - "ujson==4.3.0; python_version < '3.7'", - "ujson==5.1.0; python_version >= '3.7'", + "ujson==5.1.0", ], extras_require={ "vcf": [ - "cyvcf2==0.10.1; python_version < '3.7'", "numpy==1.21.5", - "cyvcf2; python_version >= '3.7'", + "cyvcf2==0.30.14", "Cython==0.29.26", ], "test": [ @@ -68,7 +62,6 @@ "tox==3.24.4", "flake8==4.0.1", "flake8-docstrings==1.6.0", - "asynctest==0.13.0", "aioresponses==0.7.2", "black==21.12b0", ], diff --git a/tests/test_app.py b/tests/test_app.py index 38d6901a..8a1f0d7d 100644 --- a/tests/test_app.py +++ b/tests/test_app.py @@ -1,10 +1,7 @@ import unittest -from aiohttp.test_utils import AioHTTPTestCase, unittest_run_loop -from aiohttp import web -from beacon_api.app import init, main, initialize -from unittest import mock +from aiohttp.test_utils import AioHTTPTestCase +from beacon_api.app import init, initialize import asyncpg -import asynctest import json from authlib.jose import jwt import os @@ -17,7 +14,13 @@ def generate_token(issuer): """Mock ELIXIR AAI token.""" - pem = {"kty": "oct", "kid": "018c0ae5-4d9b-471b-bfd6-eef314bc7037", "use": "sig", "alg": "HS256", "k": "hJtXIZ2uSN5kbQfbtTNWbpdmhkV8FJG-Onbc6mxCcYg"} + pem = { + "kty": "oct", + "kid": "018c0ae5-4d9b-471b-bfd6-eef314bc7037", + "use": "sig", + "alg": "HS256", + "k": "hJtXIZ2uSN5kbQfbtTNWbpdmhkV8FJG-Onbc6mxCcYg", + } header = {"jku": "http://test.csc.fi/jwk", "kid": "018c0ae5-4d9b-471b-bfd6-eef314bc7037", "alg": "HS256"} payload = {"iss": issuer, "aud": "audience", "exp": 9999999999, "sub": "smth@smth.org"} token = jwt.encode(header, payload, pem).decode("utf-8") @@ -26,7 +29,13 @@ def generate_token(issuer): def generate_bad_token(): """Mock AAI token.""" - pem = {"kty": "oct", "kid": "018c0ae5-4d9b-471b-bfd6-eef314bc7037", "use": "sig", "alg": "HS256", "k": "hJtXIZ2uSN5kbQfbtTNWbpdmhkV8FJG-Onbc6mxCcYg"} + pem = { + "kty": "oct", + "kid": "018c0ae5-4d9b-471b-bfd6-eef314bc7037", + "use": "sig", + "alg": "HS256", + "k": "hJtXIZ2uSN5kbQfbtTNWbpdmhkV8FJG-Onbc6mxCcYg", + } header = {"jku": "http://test.csc.fi/jwk", "kid": "018c0ae5-4d9b-471b-bfd6-eef314bc7037", "alg": "HS256"} payload = {"iss": "bad_issuer", "aud": "audience", "exp": 0, "sub": "smth@elixir-europe.org"} token = jwt.encode(header, payload, pem).decode("utf-8") @@ -35,7 +44,7 @@ def generate_bad_token(): async def create_db_mock(app): """Mock the db connection pool.""" - app["pool"] = asynctest.mock.Mock(asyncpg.create_pool()) + app["pool"] = unittest.mock.Mock(asyncpg.create_pool()) return app @@ -50,7 +59,7 @@ class AppTestCase(AioHTTPTestCase): Testing web app endpoints. """ - @asynctest.mock.patch("beacon_api.app.initialize", side_effect=create_db_mock) + @unittest.mock.patch("beacon_api.app.initialize", side_effect=create_db_mock) async def get_application(self, pool_mock): """Retrieve web Application for test.""" token, public_key = generate_token("http://test.csc.fi") @@ -60,34 +69,30 @@ async def get_application(self, pool_mock): self.env.set("TOKEN", token) return await init() - @unittest_run_loop - async def tearDown(self): + async def tearDownAsync(self): """Finish up tests.""" self.env.unset("PUBLIC_KEY") self.env.unset("TOKEN") await caches.get("default").delete("jwk_key") - @unittest_run_loop async def test_beacon_info(self): """Test the Beacon info endpoint. The status should always be 200. """ - with asynctest.mock.patch("beacon_api.app.beacon_info", return_value={"id": "value"}): + with unittest.mock.patch("beacon_api.app.beacon_info", return_value={"id": "value"}): resp = await self.client.request("GET", "/") self.assertEqual(200, resp.status) - @unittest_run_loop async def test_ga4gh_info(self): """Test the GA4GH Discovery info endpoint. The status should always be 200. """ - with asynctest.mock.patch("beacon_api.app.ga4gh_info", return_value={"id": "value"}): + with unittest.mock.patch("beacon_api.app.ga4gh_info", return_value={"id": "value"}): resp = await self.client.request("GET", "/service-info") self.assertEqual(200, resp.status) - @unittest_run_loop async def test_post_info(self): """Test the info endpoint with POST. @@ -96,7 +101,6 @@ async def test_post_info(self): resp = await self.client.request("POST", "/") self.assertEqual(405, resp.status) - @unittest_run_loop async def test_post_service_info(self): """Test the service-info endpoint with POST. @@ -105,19 +109,16 @@ async def test_post_service_info(self): resp = await self.client.request("POST", "/service-info") self.assertEqual(405, resp.status) - @unittest_run_loop async def test_empty_get_query(self): """Test empty GET query endpoint.""" resp = await self.client.request("GET", "/query") self.assertEqual(400, resp.status) - @unittest_run_loop async def test_empty_post_query(self): """Test empty POST query endpoint.""" resp = await self.client.request("POST", "/query", data=json.dumps({})) self.assertEqual(400, resp.status) - @unittest_run_loop async def test_bad_start_post_query(self): """Test bad start combination POST query endpoint.""" bad_start = { @@ -134,7 +135,6 @@ async def test_bad_start_post_query(self): resp = await self.client.request("POST", "/query", data=json.dumps(bad_start)) self.assertEqual(400, resp.status) - @unittest_run_loop async def test_bad_start2_post_query(self): """Test bad start combination 2 POST query endpoint.""" bad_start = { @@ -151,7 +151,6 @@ async def test_bad_start2_post_query(self): resp = await self.client.request("POST", "/query", data=json.dumps(bad_start)) self.assertEqual(400, resp.status) - @unittest_run_loop async def test_bad_startend_post_query(self): """Test end smaller than start POST query endpoint.""" bad_start = { @@ -166,7 +165,6 @@ async def test_bad_startend_post_query(self): resp = await self.client.request("POST", "/query", data=json.dumps(bad_start)) self.assertEqual(400, resp.status) - @unittest_run_loop async def test_bad_startminmax_post_query(self): """Test start min greater than start Max POST query endpoint.""" bad_start = { @@ -181,7 +179,6 @@ async def test_bad_startminmax_post_query(self): resp = await self.client.request("POST", "/query", data=json.dumps(bad_start)) self.assertEqual(400, resp.status) - @unittest_run_loop async def test_bad_endminmax_post_query(self): """Test end min greater than start Max POST query endpoint.""" bad_start = { @@ -196,19 +193,24 @@ async def test_bad_endminmax_post_query(self): resp = await self.client.request("POST", "/query", data=json.dumps(bad_start)) self.assertEqual(400, resp.status) - @asynctest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) - @asynctest.mock.patch("beacon_api.app.query_request_handler") - @unittest_run_loop + @unittest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) + @unittest.mock.patch("beacon_api.app.query_request_handler") async def test_good_start_post_query(self, mock_handler, mock_object): """Test good start combination POST query endpoint.""" - good_start = {"referenceName": "MT", "start": 10, "referenceBases": "T", "variantType": "MNP", "assemblyId": "GRCh38", "includeDatasetResponses": "HIT"} + good_start = { + "referenceName": "MT", + "start": 10, + "referenceBases": "T", + "variantType": "MNP", + "assemblyId": "GRCh38", + "includeDatasetResponses": "HIT", + } mock_handler.side_effect = json.dumps(good_start) resp = await self.client.request("POST", "/query", data=json.dumps(good_start)) self.assertEqual(200, resp.status) - @asynctest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) - @asynctest.mock.patch("beacon_api.app.query_request_handler") - @unittest_run_loop + @unittest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) + @unittest.mock.patch("beacon_api.app.query_request_handler") async def test_good_start2_post_query(self, mock_handler, mock_object): """Test good start combination 2 POST query endpoint.""" good_start = { @@ -224,9 +226,8 @@ async def test_good_start2_post_query(self, mock_handler, mock_object): resp = await self.client.request("POST", "/query", data=json.dumps(good_start)) self.assertEqual(200, resp.status) - @asynctest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) - @asynctest.mock.patch("beacon_api.app.query_request_handler") - @unittest_run_loop + @unittest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) + @unittest.mock.patch("beacon_api.app.query_request_handler") async def test_good_start3_post_query(self, mock_handler, mock_object): """Test good start combination 3 POST query endpoint.""" good_start = { @@ -242,53 +243,46 @@ async def test_good_start3_post_query(self, mock_handler, mock_object): resp = await self.client.request("POST", "/query", data=json.dumps(good_start)) self.assertEqual(200, resp.status) - @unittest_run_loop async def test_unauthorized_no_token_post_query(self): """Test unauthorized POST query endpoint, with no token.""" resp = await self.client.request("POST", "/query", data=json.dumps(PARAMS), headers={"Authorization": "Bearer"}) self.assertEqual(401, resp.status) - @unittest_run_loop async def test_unauthorized_token_post_query(self): """Test unauthorized POST query endpoint, bad token.""" resp = await self.client.request("POST", "/query", data=json.dumps(PARAMS), headers={"Authorization": f"Bearer {self.bad_token}"}) self.assertEqual(403, resp.status) - @unittest_run_loop async def test_invalid_scheme_get_query(self): """Test unauthorized GET query endpoint, invalid scheme.""" params = "?assemblyId=GRCh38&referenceName=1&start=10000&referenceBases=A&alternateBases=T&datasetIds=dataset1" resp = await self.client.request("GET", f"/query{params}", headers={"Authorization": "SMTH x"}) self.assertEqual(401, resp.status) - @asynctest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) - @asynctest.mock.patch("beacon_api.app.query_request_handler", side_effect=json.dumps(PARAMS)) - @unittest_run_loop + @unittest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) + @unittest.mock.patch("beacon_api.app.query_request_handler", side_effect=json.dumps(PARAMS)) async def test_valid_token_get_query(self, mock_handler, mock_object): """Test valid token GET query endpoint.""" token = os.environ.get("TOKEN") resp = await self.client.request("POST", "/query", data=json.dumps(PARAMS), headers={"Authorization": f"Bearer {token}"}) self.assertEqual(200, resp.status) - @unittest_run_loop async def test_bad_json_post_query(self): """Test bad json POST query endpoint.""" resp = await self.client.request("POST", "/query", data="") self.assertEqual(500, resp.status) - @asynctest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) - @asynctest.mock.patch("beacon_api.app.query_request_handler", side_effect=json.dumps(PARAMS)) - @unittest_run_loop + @unittest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) + @unittest.mock.patch("beacon_api.app.query_request_handler", side_effect=json.dumps(PARAMS)) async def test_valid_get_query(self, mock_handler, mock_object): """Test valid GET query endpoint.""" params = "?assemblyId=GRCh38&referenceName=1&start=10000&referenceBases=A&alternateBases=T" - with asynctest.mock.patch("beacon_api.app.initialize", side_effect=create_db_mock): + with unittest.mock.patch("beacon_api.app.initialize", side_effect=create_db_mock): resp = await self.client.request("GET", f"/query{params}") self.assertEqual(200, resp.status) - @asynctest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) - @asynctest.mock.patch("beacon_api.app.query_request_handler", side_effect=json.dumps(PARAMS)) - @unittest_run_loop + @unittest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) + @unittest.mock.patch("beacon_api.app.query_request_handler", side_effect=json.dumps(PARAMS)) async def test_valid_post_query(self, mock_handler, mock_object): """Test valid POST query endpoint.""" resp = await self.client.request("POST", "/query", data=json.dumps(PARAMS)) @@ -301,7 +295,13 @@ class AppTestCaseForbidden(AioHTTPTestCase): Testing web app for wrong issuer. """ - @asynctest.mock.patch("beacon_api.app.initialize", side_effect=create_db_mock) + async def tearDownAsync(self): + """Finish up tests.""" + self.env.unset("PUBLIC_KEY") + self.env.unset("TOKEN") + await caches.get("default").delete("jwk_key") + + @unittest.mock.patch("beacon_api.app.initialize", side_effect=create_db_mock) async def get_application(self, pool_mock): """Retrieve web Application for test.""" token, public_key = generate_token("something") @@ -310,16 +310,8 @@ async def get_application(self, pool_mock): self.env.set("TOKEN", token) return await init() - @unittest_run_loop - async def tearDown(self): - """Finish up tests.""" - self.env.unset("PUBLIC_KEY") - self.env.unset("TOKEN") - await caches.get("default").delete("jwk_key") - - @asynctest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) - @asynctest.mock.patch("beacon_api.app.query_request_handler", side_effect=json.dumps(PARAMS)) - @unittest_run_loop + @unittest.mock.patch("beacon_api.app.parse_request_object", side_effect=mock_parse_request_object) + @unittest.mock.patch("beacon_api.app.query_request_handler", side_effect=json.dumps(PARAMS)) async def test_forbidden_token_get_query(self, mock_handler, mock_object): """Test forbidden GET query endpoint, invalid scheme.""" token = os.environ.get("TOKEN") @@ -327,39 +319,27 @@ async def test_forbidden_token_get_query(self, mock_handler, mock_object): self.assertEqual(403, resp.status) -class TestBasicFunctionsApp(asynctest.TestCase): +class TestBasicFunctionsApp(unittest.IsolatedAsyncioTestCase): """Test App Base. Testing basic functions from web app. """ - def setUp(self): - """Initialise fixtures.""" - pass - - def tearDown(self): - """Remove setup variables.""" - pass - - @mock.patch("beacon_api.app.web") - def test_main(self, mock_webapp): - """Should start the webapp.""" - main() - mock_webapp.run_app.assert_called() - - async def test_init(self): - """Test init type.""" - server = await init() - self.assertIs(type(server), web.Application) + async def test_servinit(self): + """Test server initialization function execution.""" + # Don't really need much testing here, if the server initialization + # executes to the end all is fine. + app = await init() + self.assertTrue(app is not None) - @asynctest.mock.patch("beacon_api.app.set_cors") + @unittest.mock.patch("beacon_api.app.set_cors") async def test_initialize(self, mock_cors): """Test create db pool, should just return the result of init_db_pool. We will mock the init_db_pool, thus we assert we just call it. """ app = {} - with asynctest.mock.patch("beacon_api.app.init_db_pool") as db_mock: + with unittest.mock.patch("beacon_api.app.init_db_pool") as db_mock: await initialize(app) db_mock.assert_called() diff --git a/tests/test_basic.py b/tests/test_basic.py index 91c1232c..575e9e9f 100644 --- a/tests/test_basic.py +++ b/tests/test_basic.py @@ -1,4 +1,4 @@ -import asynctest +import unittest import aiohttp from beacon_api.utils.db_load import parse_arguments, init_beacon_db, main from beacon_api.conf.config import init_db_pool @@ -13,9 +13,7 @@ def mock_token(bona_fide, permissions, auth): """Mock a processed token.""" - return {"bona_fide_status": bona_fide, - "permissions": permissions, - "authenticated": auth} + return {"bona_fide_status": bona_fide, "permissions": permissions, "authenticated": auth} class MockDecodedPassport: @@ -53,7 +51,7 @@ async def close(self): async def check_tables(self, array): """Mimic check_tables.""" - return ['DATASET1', 'DATASET2'] + return ["DATASET1", "DATASET2"] async def create_tables(self, sql_file): """Mimic create_tables.""" @@ -72,7 +70,7 @@ async def load_datafile(self, vcf, datafile, datasetId, n=1000, min_ac=1): return ["datasetId", "variants"] -class TestBasicFunctions(asynctest.TestCase): +class TestBasicFunctions(unittest.IsolatedAsyncioTestCase): """Test supporting functions.""" def setUp(self): @@ -85,21 +83,21 @@ def tearDown(self): def test_parser(self): """Test argument parsing.""" - parsed = parse_arguments(['/path/to/datafile.csv', '/path/to/metadata.json']) - self.assertEqual(parsed.datafile, '/path/to/datafile.csv') - self.assertEqual(parsed.metadata, '/path/to/metadata.json') + parsed = parse_arguments(["/path/to/datafile.csv", "/path/to/metadata.json"]) + self.assertEqual(parsed.datafile, "/path/to/datafile.csv") + self.assertEqual(parsed.metadata, "/path/to/metadata.json") - @asynctest.mock.patch('beacon_api.conf.config.asyncpg') + @unittest.mock.patch("beacon_api.conf.config.asyncpg") async def test_init_pool(self, db_mock): """Test database connection pool creation.""" - db_mock.return_value = asynctest.CoroutineMock(name='create_pool') - db_mock.create_pool = asynctest.CoroutineMock() + db_mock.return_value = unittest.mock.AsyncMock(name="create_pool") + db_mock.create_pool = unittest.mock.AsyncMock() await init_db_pool() db_mock.create_pool.assert_called() - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.BeaconDB') - @asynctest.mock.patch('beacon_api.utils.db_load.VCF') + @unittest.mock.patch("beacon_api.utils.db_load.LOG") + @unittest.mock.patch("beacon_api.utils.db_load.BeaconDB") + @unittest.mock.patch("beacon_api.utils.db_load.VCF") async def test_init_beacon_db(self, mock_vcf, db_mock, mock_log): """Test beacon_init db call.""" db_mock.return_value = MockBeaconDB() @@ -110,14 +108,16 @@ async def test_init_beacon_db(self, mock_vcf, db_mock, mock_log): "sampleCount": 2504, "externalUrl": "https://datasethost.org/dataset1", "accessType": "PUBLIC"}""" - metafile = self._dir.write('data.json', metadata.encode('utf-8')) + metafile = self._dir.write("data.json", metadata.encode("utf-8")) data = """MOCK VCF file""" - datafile = self._dir.write('data.vcf', data.encode('utf-8')) + datafile = self._dir.write("data.vcf", data.encode("utf-8")) await init_beacon_db([datafile, metafile]) - mock_log.info.mock_calls = ['Mark the database connection to be closed', - 'The database connection has been closed'] + mock_log.info.mock_calls = [ + "Mark the database connection to be closed", + "The database connection has been closed", + ] - @asynctest.mock.patch('beacon_api.utils.db_load.init_beacon_db') + @unittest.mock.patch("beacon_api.utils.db_load.init_beacon_db") def test_main_db(self, mock_init): """Test run asyncio main beacon init.""" main() @@ -126,18 +126,18 @@ def test_main_db(self, mock_init): def test_aud_claim(self): """Test aud claim function.""" env = EnvironmentVarGuard() - env.set('JWT_AUD', "aud1,aud2") + env.set("JWT_AUD", "aud1,aud2") result = verify_aud_claim() # Because it is false we expect it not to be parsed expected = (False, []) self.assertEqual(result, expected) - env.unset('JWT_AUD') + env.unset("JWT_AUD") def test_token_scheme_check_bad(self): """Test token scheme no token.""" # This might never happen, yet lets prepare for it with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized): - token_scheme_check("", 'https', {}, 'localhost') + token_scheme_check("", "https", {}, "localhost") def test_access_resolution_base(self): """Test assumptions for access resolution. @@ -146,9 +146,9 @@ def test_access_resolution_base(self): """ request = PARAMS token = mock_token(False, [], False) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1", "2"], ["3", "4"], ["5", "6"]) - self.assertListEqual(result[0], ['PUBLIC']) + self.assertListEqual(result[0], ["PUBLIC"]) intermediate_list = result[1] intermediate_list.sort() self.assertListEqual(["1", "2"], intermediate_list) @@ -160,9 +160,9 @@ def test_access_resolution_no_controlled(self): """ request = PARAMS token = mock_token(False, [], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1", "2"], ["3", "4"], ["5", "6"]) - self.assertListEqual(result[0], ['PUBLIC']) + self.assertListEqual(result[0], ["PUBLIC"]) intermediate_list = result[1] intermediate_list.sort() self.assertListEqual(["1", "2"], intermediate_list) @@ -174,9 +174,9 @@ def test_access_resolution_registered(self): """ request = PARAMS token = mock_token(True, [], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1", "2"], ["3", "4"], ["5", "6"]) - self.assertListEqual(result[0], ['PUBLIC', 'REGISTERED']) + self.assertListEqual(result[0], ["PUBLIC", "REGISTERED"]) intermediate_list = result[1] intermediate_list.sort() self.assertListEqual(["1", "2", "3", "4"], intermediate_list) @@ -188,9 +188,9 @@ def test_access_resolution_controlled_no_registered(self): """ request = PARAMS token = mock_token(False, ["5", "6"], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1", "2"], ["3", "4"], ["5", "6"]) - self.assertListEqual(result[0], ['PUBLIC', 'CONTROLLED']) + self.assertListEqual(result[0], ["PUBLIC", "CONTROLLED"]) intermediate_list = result[1] intermediate_list.sort() self.assertListEqual(["1", "2", "5", "6"], intermediate_list) @@ -202,9 +202,9 @@ def test_access_resolution_controlled_registered(self): """ request = PARAMS token = mock_token(True, ["5", "6"], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1", "2"], ["3", "4"], ["5", "6"]) - self.assertListEqual(result[0], ['PUBLIC', 'REGISTERED', 'CONTROLLED']) + self.assertListEqual(result[0], ["PUBLIC", "REGISTERED", "CONTROLLED"]) intermediate_list = result[1] intermediate_list.sort() self.assertListEqual(["1", "2", "3", "4", "5", "6"], intermediate_list) @@ -216,7 +216,7 @@ def test_access_resolution_bad_registered(self): """ request = PARAMS token = mock_token(False, [], False) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized): access_resolution(request, token, host, [], ["3"], []) @@ -227,7 +227,7 @@ def test_access_resolution_no_registered2(self): """ request = PARAMS token = mock_token(False, [], True) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPForbidden): access_resolution(request, token, host, [], ["4"], []) @@ -238,7 +238,7 @@ def test_access_resolution_controlled_forbidden(self): """ request = PARAMS token = mock_token(False, [7], True) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPForbidden): access_resolution(request, token, host, [], ["6"], []) @@ -249,7 +249,7 @@ def test_access_resolution_controlled_unauthorized(self): """ request = PARAMS token = mock_token(False, [], False) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized): access_resolution(request, token, host, [], ["5"], []) @@ -260,9 +260,9 @@ def test_access_resolution_controlled_no_perms(self): """ request = PARAMS token = mock_token(False, ["7"], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["2"], ["6"], []) - self.assertEqual(result, (['PUBLIC'], ["2"])) + self.assertEqual(result, (["PUBLIC"], ["2"])) def test_access_resolution_controlled_some(self): """Test assumptions for access resolution for requested controlled some datasets. @@ -271,9 +271,9 @@ def test_access_resolution_controlled_some(self): """ request = PARAMS token = mock_token(False, ["5"], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, [], [], ["5", "6"]) - self.assertEqual(result, (['CONTROLLED'], ["5"])) + self.assertEqual(result, (["CONTROLLED"], ["5"])) def test_access_resolution_controlled_no_perms_public(self): """Test assumptions for access resolution for requested controlled and public, returning public only. @@ -282,9 +282,9 @@ def test_access_resolution_controlled_no_perms_public(self): """ request = PARAMS token = mock_token(False, [], False) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1"], [], ["5"]) - self.assertEqual(result, (['PUBLIC'], ["1"])) + self.assertEqual(result, (["PUBLIC"], ["1"])) def test_access_resolution_controlled_no_perms_bonafide(self): """Test assumptions for access resolution for requested controlled and registered, returning registered only. @@ -293,9 +293,9 @@ def test_access_resolution_controlled_no_perms_bonafide(self): """ request = PARAMS token = mock_token(True, [], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, [], ["4"], ["7"]) - self.assertEqual(result, (['REGISTERED'], ["4"])) + self.assertEqual(result, (["REGISTERED"], ["4"])) def test_access_resolution_controlled_never_reached(self): """Test assumptions for access resolution for requested controlled unauthorized. @@ -305,7 +305,7 @@ def test_access_resolution_controlled_never_reached(self): """ request = PARAMS token = mock_token(False, None, False) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized): access_resolution(request, token, host, [], [], ["8"]) @@ -317,57 +317,80 @@ def test_access_resolution_controlled_never_reached2(self): """ request = PARAMS token = mock_token(False, None, True) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPForbidden): access_resolution(request, token, host, [], [], ["8"]) - @asynctest.mock.patch('beacon_api.permissions.ga4gh.validate_passport') + @unittest.mock.patch("beacon_api.permissions.ga4gh.validate_passport") async def test_ga4gh_controlled(self, m_validation): """Test ga4gh permissions claim parsing.""" # Test: no passports, no permissions datasets = await get_ga4gh_controlled([]) self.assertEqual(datasets, set()) # Test: 1 passport, 1 unique dataset, 1 permission - passport = {"ga4gh_visa_v1": {"type": "ControlledAccessGrants", - "value": "https://institution.org/EGAD01", - "source": "https://ga4gh.org/duri/no_org", - "by": "self", - "asserted": 1539069213, - "expires": 4694742813}} + passport = { + "ga4gh_visa_v1": { + "type": "ControlledAccessGrants", + "value": "https://institution.org/EGAD01", + "source": "https://ga4gh.org/duri/no_org", + "by": "self", + "asserted": 1539069213, + "expires": 4694742813, + } + } m_validation.return_value = passport dataset = await get_ga4gh_controlled([{}]) # one passport - self.assertEqual(dataset, {'EGAD01'}) + self.assertEqual(dataset, {"EGAD01"}) # Test: 2 passports, 1 unique dataset, 1 permission (permissions must not be duplicated) - passport = {"ga4gh_visa_v1": {"type": "ControlledAccessGrants", - "value": "https://institution.org/EGAD01", - "source": "https://ga4gh.org/duri/no_org", - "by": "self", - "asserted": 1539069213, - "expires": 4694742813}} + passport = { + "ga4gh_visa_v1": { + "type": "ControlledAccessGrants", + "value": "https://institution.org/EGAD01", + "source": "https://ga4gh.org/duri/no_org", + "by": "self", + "asserted": 1539069213, + "expires": 4694742813, + } + } m_validation.return_value = passport dataset = await get_ga4gh_controlled([{}, {}]) # two passports - self.assertEqual(dataset, {'EGAD01'}) + self.assertEqual(dataset, {"EGAD01"}) # Test: 2 passports, 2 unique datasets, 2 permissions # Can't test this case with the current design! # Would need a way for validate_passport() to mock two different results async def test_ga4gh_bona_fide(self): """Test ga4gh statuses claim parsing.""" - passports = [("enc", "header", { - "ga4gh_visa_v1": {"type": "AcceptedTermsAndPolicies", - "value": "https://doi.org/10.1038/s41431-018-0219-y", - "source": "https://ga4gh.org/duri/no_org", - "by": "self", - "asserted": 1539069213, - "expires": 4694742813} - }), - ("enc", "header", { - "ga4gh_visa_v1": {"type": "ResearcherStatus", - "value": "https://doi.org/10.1038/s41431-018-0219-y", - "source": "https://ga4gh.org/duri/no_org", - "by": "peer", - "asserted": 1539017776, - "expires": 1593165413}})] + passports = [ + ( + "enc", + "header", + { + "ga4gh_visa_v1": { + "type": "AcceptedTermsAndPolicies", + "value": "https://doi.org/10.1038/s41431-018-0219-y", + "source": "https://ga4gh.org/duri/no_org", + "by": "self", + "asserted": 1539069213, + "expires": 4694742813, + } + }, + ), + ( + "enc", + "header", + { + "ga4gh_visa_v1": { + "type": "ResearcherStatus", + "value": "https://doi.org/10.1038/s41431-018-0219-y", + "source": "https://ga4gh.org/duri/no_org", + "by": "peer", + "asserted": 1539017776, + "expires": 1593165413, + } + }, + ), + ] # Good test: both required passport types contained the correct value bona_fide_status = await get_ga4gh_bona_fide(passports) self.assertEqual(bona_fide_status, True) # has bona fide @@ -376,12 +399,12 @@ async def test_ga4gh_bona_fide(self): bona_fide_status = await get_ga4gh_bona_fide(passports_empty) self.assertEqual(bona_fide_status, False) # doesn't have bona fide - @asynctest.mock.patch('beacon_api.permissions.ga4gh.get_jwk') - @asynctest.mock.patch('beacon_api.permissions.ga4gh.jwt') - @asynctest.mock.patch('beacon_api.permissions.ga4gh.LOG') + @unittest.mock.patch("beacon_api.permissions.ga4gh.get_jwk") + @unittest.mock.patch("beacon_api.permissions.ga4gh.jwt") + @unittest.mock.patch("beacon_api.permissions.ga4gh.LOG") async def test_validate_passport(self, mock_log, m_jwt, m_jwk): """Test passport validation.""" - m_jwk.return_value = 'jwk' + m_jwk.return_value = "jwk" # Test: validation passed m_jwt.return_value = MockDecodedPassport() await validate_passport({}) @@ -394,7 +417,7 @@ async def test_validate_passport(self, mock_log, m_jwt, m_jwk): # need to assert the log called mock_log.error.assert_called_with("Something went wrong when processing JWT tokens: 1") - @asynctest.mock.patch('beacon_api.permissions.ga4gh.get_ga4gh_permissions') + @unittest.mock.patch("beacon_api.permissions.ga4gh.get_ga4gh_permissions") async def test_check_ga4gh_token(self, m_get_perms): """Test token scopes.""" # Test: no scope found @@ -403,28 +426,28 @@ async def test_check_ga4gh_token(self, m_get_perms): self.assertEqual(dataset_permissions, set()) self.assertEqual(bona_fide_status, False) # Test: scope is ok, but no claims - decoded_data = {'scope': ''} + decoded_data = {"scope": ""} dataset_permissions, bona_fide_status = await check_ga4gh_token(decoded_data, {}, False, set()) self.assertEqual(dataset_permissions, set()) self.assertEqual(bona_fide_status, False) # Test: scope is ok, claims are ok - m_get_perms.return_value = {'EGAD01'}, True - decoded_data = {'scope': 'openid ga4gh_passport_v1'} + m_get_perms.return_value = {"EGAD01"}, True + decoded_data = {"scope": "openid ga4gh_passport_v1"} dataset_permissions, bona_fide_status = await check_ga4gh_token(decoded_data, {}, False, set()) - self.assertEqual(dataset_permissions, {'EGAD01'}) + self.assertEqual(dataset_permissions, {"EGAD01"}) self.assertEqual(bona_fide_status, True) async def test_decode_passport(self): """Test key-less JWT decoding.""" - token, _ = generate_token('http://test.csc.fi') + token, _ = generate_token("http://test.csc.fi") header, payload = await decode_passport(token) - self.assertEqual(header.get('alg'), 'HS256') - self.assertEqual(payload.get('iss'), 'http://test.csc.fi') + self.assertEqual(header.get("alg"), "HS256") + self.assertEqual(payload.get("iss"), "http://test.csc.fi") - @asynctest.mock.patch('beacon_api.permissions.ga4gh.get_ga4gh_bona_fide') - @asynctest.mock.patch('beacon_api.permissions.ga4gh.get_ga4gh_controlled') - @asynctest.mock.patch('beacon_api.permissions.ga4gh.decode_passport') - @asynctest.mock.patch('beacon_api.permissions.ga4gh.retrieve_user_data') + @unittest.mock.patch("beacon_api.permissions.ga4gh.get_ga4gh_bona_fide") + @unittest.mock.patch("beacon_api.permissions.ga4gh.get_ga4gh_controlled") + @unittest.mock.patch("beacon_api.permissions.ga4gh.decode_passport") + @unittest.mock.patch("beacon_api.permissions.ga4gh.retrieve_user_data") async def test_get_ga4gh_permissions(self, m_userinfo, m_decode, m_controlled, m_bonafide): """Test GA4GH permissions main function.""" # Test: no data (nothing) @@ -440,25 +463,17 @@ async def test_get_ga4gh_permissions(self, m_userinfo, m_decode, m_controlled, m # Test: permissions m_userinfo.return_value = [{}] header = {} - payload = { - 'ga4gh_visa_v1': { - 'type': 'ControlledAccessGrants' - } - } + payload = {"ga4gh_visa_v1": {"type": "ControlledAccessGrants"}} m_decode.return_value = header, payload - m_controlled.return_value = {'EGAD01'} + m_controlled.return_value = {"EGAD01"} m_bonafide.return_value = False dataset_permissions, bona_fide_status = await get_ga4gh_permissions({}) - self.assertEqual(dataset_permissions, {'EGAD01'}) + self.assertEqual(dataset_permissions, {"EGAD01"}) self.assertEqual(bona_fide_status, False) # Test: bona fide m_userinfo.return_value = [{}] header = {} - payload = { - 'ga4gh_visa_v1': { - 'type': 'ResearcherStatus' - } - } + payload = {"ga4gh_visa_v1": {"type": "ResearcherStatus"}} m_decode.return_value = header, payload m_controlled.return_value = set() m_bonafide.return_value = True @@ -467,5 +482,5 @@ async def test_get_ga4gh_permissions(self, m_userinfo, m_decode, m_controlled, m self.assertEqual(bona_fide_status, True) -if __name__ == '__main__': - asynctest.main() +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_data_query.py b/tests/test_data_query.py index 587f0e90..bae9cf01 100644 --- a/tests/test_data_query.py +++ b/tests/test_data_query.py @@ -1,13 +1,10 @@ -import asynctest -import aiohttp +import unittest from unittest import mock from beacon_api.utils.data_query import filter_exists, transform_record from beacon_api.utils.data_query import transform_misses, transform_metadata, find_datasets, add_handover -from beacon_api.utils.data_query import fetch_datasets_access, fetch_dataset_metadata, fetch_filtered_dataset from beacon_api.extensions.handover import make_handover from datetime import datetime from beacon_api.utils.data_query import handle_wildcard -from .test_db_load import Connection, ConnectionException class Record: @@ -16,9 +13,19 @@ class Record: Mimic asyncpg Record object. """ - def __init__(self, accessType, frequency=None, createDateTime=None, updateDateTime=None, - referenceBases=None, alternateBases=None, start=None, end=None, - variantCount=0, variantType=None): + def __init__( + self, + accessType, + frequency=None, + createDateTime=None, + updateDateTime=None, + referenceBases=None, + alternateBases=None, + start=None, + end=None, + variantCount=0, + variantType=None, + ): """Initialise things.""" self.data = {"accessType": accessType} # self.variantCount = variantCount @@ -62,17 +69,9 @@ def values(self): return self.data.values() -class TestDataQueryFunctions(asynctest.TestCase): +class TestDataQueryFunctions(unittest.IsolatedAsyncioTestCase): """Test Data Query functions.""" - def setUp(self): - """Set up.""" - pass - - def tearDown(self): - """Close database connection after tests.""" - pass - def test_filter_exists(self): """Test filtering hits and miss datasets.""" datasets = [{"exists": True, "name": "DATASET1"}, {"exists": False, "name": "DATASET2"}] @@ -87,29 +86,59 @@ def test_filter_exists(self): def test_transform_record(self): """Test transform DB record.""" - response = {"frequency": 0.009112876, "info": {"accessType": "PUBLIC"}, - "referenceBases": "CT", "alternateBases": "AT", - "start": 10, "end": 12, - "variantCount": 3, "variantType": "MNP"} - record = Record("PUBLIC", 0.009112875989879, referenceBases="CT", alternateBases="AT", start=10, end=12, variantCount=3, variantType="MNP") + response = { + "frequency": 0.009112876, + "info": {"accessType": "PUBLIC"}, + "referenceBases": "CT", + "alternateBases": "AT", + "start": 10, + "end": 12, + "variantCount": 3, + "variantType": "MNP", + } + record = Record( + "PUBLIC", + 0.009112875989879, + referenceBases="CT", + alternateBases="AT", + start=10, + end=12, + variantCount=3, + variantType="MNP", + ) result = transform_record(record) self.assertEqual(result, response) def test_transform_misses(self): """Test transform misses record.""" - response = {"referenceBases": '', "alternateBases": '', "variantType": "", - "frequency": 0, "callCount": 0, "sampleCount": 0, "variantCount": 0, - "start": 0, "end": 0, "info": {"accessType": "PUBLIC"}} + response = { + "referenceBases": "", + "alternateBases": "", + "variantType": "", + "frequency": 0, + "callCount": 0, + "sampleCount": 0, + "variantCount": 0, + "start": 0, + "end": 0, + "info": {"accessType": "PUBLIC"}, + } record = Record("PUBLIC") result = transform_misses(record) self.assertEqual(result, response) def test_transform_metadata(self): """Test transform medata record.""" - response = {"createDateTime": "2018-10-20T20:33:40Z", "updateDateTime": "2018-10-20T20:33:40Z", - "info": {"accessType": "PUBLIC"}} - record = Record("PUBLIC", createDateTime=datetime.strptime("2018-10-20 20:33:40+00", '%Y-%m-%d %H:%M:%S+00'), - updateDateTime=datetime.strptime("2018-10-20 20:33:40+00", '%Y-%m-%d %H:%M:%S+00')) + response = { + "createDateTime": "2018-10-20T20:33:40Z", + "updateDateTime": "2018-10-20T20:33:40Z", + "info": {"accessType": "PUBLIC"}, + } + record = Record( + "PUBLIC", + createDateTime=datetime.strptime("2018-10-20 20:33:40+00", "%Y-%m-%d %H:%M:%S+00"), + updateDateTime=datetime.strptime("2018-10-20 20:33:40+00", "%Y-%m-%d %H:%M:%S+00"), + ) result = transform_metadata(record) self.assertEqual(result, response) @@ -117,164 +146,50 @@ def test_add_handover(self): """Test that add handover.""" # Test that the handover actually is added handovers = [{"handover1": "info"}, {"handover2": "url"}] - record = {"datasetId": "test", "referenceName": "22", "referenceBases": "A", - "alternateBases": "C", "start": 10, "end": 11, "variantType": "SNP"} - with mock.patch('beacon_api.extensions.handover.make_handover', return_value=handovers): + record = { + "datasetId": "test", + "referenceName": "22", + "referenceBases": "A", + "alternateBases": "C", + "start": 10, + "end": 11, + "variantType": "SNP", + } + with mock.patch("beacon_api.extensions.handover.make_handover", return_value=handovers): result = add_handover(record) - record['datasetHandover'] = handovers + record["datasetHandover"] = handovers self.assertEqual(result, record) def test_make_handover(self): """Test make handover.""" - paths = [('lab1', 'desc1', 'path1'), ('lab2', 'desc2', 'path2')] - result = make_handover(paths, ['id1', 'id2', 'id1']) + paths = [("lab1", "desc1", "path1"), ("lab2", "desc2", "path2")] + result = make_handover(paths, ["id1", "id2", "id1"]) # The number of handovers = number of paths * number of unique datasets self.assertEqual(len(result), 4) self.assertIn("path1", result[0]["url"]) - self.assertEqual(result[0]["description"], 'desc1') + self.assertEqual(result[0]["description"], "desc1") - @asynctest.mock.patch('beacon_api.utils.data_query.fetch_filtered_dataset') + @unittest.mock.patch("beacon_api.utils.data_query.fetch_filtered_dataset") async def test_find_datasets(self, mock_filtered): """Test find datasets.""" mock_filtered.return_value = [] token = dict() token["bona_fide_status"] = False - result = await find_datasets(None, 'GRCh38', None, 'Y', 'T', 'C', [], token, "NONE") + result = await find_datasets(None, "GRCh38", None, "Y", "T", "C", [], token, "NONE") self.assertEqual(result, []) # setting ALL should cover MISS call as well - result_all = await find_datasets(None, 'GRCh38', None, 'Y', 'T', 'C', [], token, "ALL") + result_all = await find_datasets(None, "GRCh38", None, "Y", "T", "C", [], token, "ALL") self.assertEqual(result_all, []) - async def test_datasets_access_call_public(self): - """Test db call of getting public datasets access.""" - pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection(accessData=[{'accesstype': 'PUBLIC', 'datasetid': 'mock:public:id'}]) - result = await fetch_datasets_access(pool, None) - # for now it can return a tuple of empty datasets - # in order to get a response we will have to mock it - # in Connection() class - self.assertEqual(result, (['mock:public:id'], [], [])) - - async def test_datasets_access_call_exception(self): - """Test db call of getting public datasets access with exception.""" - pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = ConnectionException() - with self.assertRaises(aiohttp.web_exceptions.HTTPInternalServerError): - await fetch_datasets_access(pool, None) - - async def test_datasets_access_call_registered(self): - """Test db call of getting registered datasets access.""" - pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection(accessData=[{'accesstype': 'REGISTERED', 'datasetid': 'mock:registered:id'}]) - result = await fetch_datasets_access(pool, None) - # for now it can return a tuple of empty datasets - # in order to get a response we will have to mock it - # in Connection() class - self.assertEqual(result, ([], ['mock:registered:id'], [])) - - async def test_datasets_access_call_controlled(self): - """Test db call of getting controlled datasets access.""" - pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection(accessData=[{'accesstype': 'CONTROLLED', 'datasetid': 'mock:controlled:id'}]) - result = await fetch_datasets_access(pool, None) - # for now it can return a tuple of empty datasets - # in order to get a response we will have to mock it - # in Connection() class - self.assertEqual(result, ([], [], ['mock:controlled:id'])) - - async def test_datasets_access_call_multiple(self): - """Test db call of getting controlled and public datasets access.""" - pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection(accessData=[{'accesstype': 'CONTROLLED', 'datasetid': 'mock:controlled:id'}, - {'accesstype': 'PUBLIC', 'datasetid': 'mock:public:id'}]) - result = await fetch_datasets_access(pool, None) - # for now it can return a tuple of empty datasets - # in order to get a response we will have to mock it - # in Connection() class - self.assertEqual(result, (['mock:public:id'], [], ['mock:controlled:id'])) - - async def test_fetch_dataset_metadata_call(self): - """Test db call of getting datasets metadata.""" - pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection() - result = await fetch_dataset_metadata(pool, None, None) - # for now it can return empty dataset - # in order to get a response we will have to mock it - # in Connection() class - self.assertEqual(result, []) - - async def test_fetch_dataset_metadata_call_exception(self): - """Test db call of getting datasets metadata with exception.""" - pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = ConnectionException() - with self.assertRaises(aiohttp.web_exceptions.HTTPInternalServerError): - await fetch_dataset_metadata(pool, None, None) - - async def test_fetch_filtered_dataset_call(self): - """Test db call for retrieving main data.""" - pool = asynctest.CoroutineMock() - db_response = {"referenceBases": '', "alternateBases": '', "variantType": "", - "referenceName": 'Chr38', - "frequency": 0, "callCount": 0, "sampleCount": 0, "variantCount": 0, - "start": 0, "end": 0, "accessType": "PUBLIC", "datasetId": "test"} - pool.acquire().__aenter__.return_value = Connection(accessData=[db_response]) - assembly_id = 'GRCh38' - position = (10, 20, None, None, None, None) - chromosome = 1 - reference = 'A' - alternate = ('DUP', None) - result = await fetch_filtered_dataset(pool, assembly_id, position, chromosome, reference, alternate, None, None, False) - # for now it can return empty dataset - # in order to get a response we will have to mock it - # in Connection() class - expected = {'referenceName': 'Chr38', 'callCount': 0, 'sampleCount': 0, 'variantCount': 0, 'datasetId': 'test', - 'referenceBases': '', 'alternateBases': '', 'variantType': '', 'start': 0, 'end': 0, 'frequency': 0, - 'info': {'accessType': 'PUBLIC'}, - 'datasetHandover': [{'handoverType': {'id': 'CUSTOM', 'label': 'Variants'}, - 'description': 'browse the variants matched by the query', - 'url': 'https://examplebrowser.org/dataset/test/browser/variant/Chr38-1--'}, - {'handoverType': {'id': 'CUSTOM', 'label': 'Region'}, - 'description': 'browse data of the region matched by the query', - 'url': 'https://examplebrowser.org/dataset/test/browser/region/Chr38-1-1'}, - {'handoverType': {'id': 'CUSTOM', 'label': 'Data'}, - 'description': 'retrieve information of the datasets', - 'url': 'https://examplebrowser.org/dataset/test/browser'}]} - - self.assertEqual(result, [expected]) - - async def test_fetch_filtered_dataset_call_misses(self): - """Test db call for retrieving miss data.""" - pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection() # db_response is [] - assembly_id = 'GRCh38' - position = (10, 20, None, None, None, None) - chromosome = 1 - reference = 'A' - alternate = ('DUP', None) - result_miss = await fetch_filtered_dataset(pool, assembly_id, position, chromosome, reference, alternate, None, None, True) - self.assertEqual(result_miss, []) - - async def test_fetch_filtered_dataset_call_exception(self): - """Test db call of retrieving main data with exception.""" - assembly_id = 'GRCh38' - position = (10, 20, None, None, None, None) - chromosome = 1 - reference = 'A' - alternate = ('DUP', None) - pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = ConnectionException() - with self.assertRaises(aiohttp.web_exceptions.HTTPInternalServerError): - await fetch_filtered_dataset(pool, assembly_id, position, chromosome, reference, alternate, None, None, False) - def test_handle_wildcard(self): """Test PostgreSQL wildcard handling.""" - sequence1 = 'ATCG' - sequence2 = 'ATNG' - sequence3 = 'NNCN' - self.assertEqual(handle_wildcard(sequence1), ['ATCG']) + sequence1 = "ATCG" + sequence2 = "ATNG" + sequence3 = "NNCN" + self.assertEqual(handle_wildcard(sequence1), ["ATCG"]) self.assertEqual(handle_wildcard(sequence2), ["%AT_G%"]) self.assertEqual(handle_wildcard(sequence3), ["%__C_%"]) -if __name__ == '__main__': - asynctest.main() +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_db_load.py b/tests/test_db_load.py index 456e2dd6..c6c4bf9a 100644 --- a/tests/test_db_load.py +++ b/tests/test_db_load.py @@ -1,382 +1,366 @@ -import unittest -import asynctest -import asyncio -from testfixtures import TempDirectory -from beacon_api.utils.db_load import BeaconDB - - -class Variant: - """Variant Class. - - Mock this for Variant calculations. - """ - - def __init__(self, ALT, REF, INF, call_rate, var_type, num_called, is_sv=False): - """Initialize class.""" - self.INFO = INF - self.ALT = ALT - self.REF = REF - self.call_rate = call_rate - self.var_type = var_type - self.num_called = num_called - self.is_sv = is_sv - - -class INFO: - """INFO CLass. - - Mock this for storing VCF info. - """ - - def __init__(self, AC, VT, AN, AF, SVTYPE=None): - """Initialize class.""" - self.AC = AC - self.VT = VT - self.AN = AN - self.AF = AF - self.SVTYPE = SVTYPE - - def get(self, key): - """Inside `__getitem__` method.""" - return getattr(self, key) - - -class Transaction: - """Class Transaction. - - Mock this from asyncpg. - """ - - def __init__(self, *args, **kwargs): - """Initialize class.""" - pass - - async def __aenter__(self): - """Initialize class.""" - pass - - async def __aexit__(self, *args): - """Initialize class.""" - pass - - -class Statement(Transaction): - """Class Transaction. - - Mock this from asyncpg. - """ - - def __init__(self, query, accessData): - """Initialize class.""" - self.accessData = accessData - pass - - async def fetch(self, *args, **kwargs): - """Mimic fetch.""" - if self.accessData: - return self.accessData - else: - return [] - - -class Connection: - """Class Connection. - - Mock this from asyncpg. - """ - - def __init__(self, accessData=None): - """Initialize class.""" - self.accessData = accessData - pass - - async def fetch(self, *args, **kwargs): - """Mimic fetch.""" - return [{"table_name": "DATATSET1"}, {"table_name": "DATATSET2"}] - - async def execute(self, query, *args): - """Mimic execute.""" - return [] - - async def close(self): - """Mimic close.""" - pass - - async def __aenter__(self): - """Initialize class.""" - pass - - async def __aexit__(self, *args): - """Initialize class.""" - pass - - @asyncio.coroutine - def prepare(self, query): - """Mimic prepare.""" - return Statement(query, self.accessData) - - def transaction(self, *args, **kwargs): - """Mimic transaction.""" - return Transaction(*args, **kwargs) - - -class ConnectionException: - """Class Connection with Exception. - - Mock this from asyncpg. - """ - - def __init__(self): - """Initialize class.""" - pass - - def transaction(self, *args, **kwargs): - """Mimic transaction.""" - return Transaction(*args, **kwargs) - - async def execute(self, query, *args): - """Mimic execute.""" - return Exception - - @asyncio.coroutine - def prepare(self, query): - """Mimic prepare.""" - return Exception - - -class DatabaseTestCase(asynctest.TestCase): - """Test database operations.""" - - def setUp(self): - """Initialise BeaconDB object.""" - self._db = BeaconDB() - self._dir = TempDirectory() - self.data = """##fileformat=VCFv4.0 - ##fileDate=20090805 - ##source=myImputationProgramV3.1 - ##reference=1000GenomesPilot-NCBI36 - ##phasing=partial - ##INFO= - ##INFO= - ##INFO= - ##INFO= - ##INFO= - ##INFO= - ##INFO= - ##INFO= - ##FILTER= - ##FILTER= - ##FORMAT= - ##FORMAT= - ##FORMAT= - ##FORMAT= - ##ALT= - ##ALT= - #CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA00001 NA00002 NA00003 - 19 111 . A C 9.6 . . GT:HQ 0|0:10,10 0|0:10,10 0/1:3,3 - 19 112 . A G 10 . . GT:HQ 0|0:10,10 0|0:10,10 0/1:3,3 - 20 14370 rs6054257 G A 29 PASS NS=3;DP=14;AF=0.5;DB;H2 GT:GQ:DP:HQ 0|0:48:1:51,51 1|0:48:8:51,51 1/1:43:5:.,. - chrM 15011 . T C . PASS . GT:GQ:DP:RO:QR:AO:QA:GL 1:160:970:0:0:968:31792:-2860.58,0 1:160:970:0:0:968:31792:-2860.58,0""" - self.datafile = self._dir.write('data.csv', self.data.encode('utf-8')) - - def tearDown(self): - """Close database connection after tests.""" - self._dir.cleanup_all() - - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_rchop(self, db_mock): - """Test rchop for SVTYPE.""" - db_mock.return_value = Connection() - await self._db.connection() - result = self._db._rchop('INS:ME:LINE1', ":LINE1") - self.assertEqual('INS:ME', result) - result_no_ending = self._db._rchop('INS', ":LINE1") - self.assertEqual('INS', result_no_ending) - - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_handle_type(self, db_mock): - """Test handle type.""" - db_mock.return_value = Connection() - await self._db.connection() - result = self._db._handle_type(1, int) - self.assertEqual([1], result) - result_tuple = self._db._handle_type((0.1, 0.2), float) - self.assertEqual([0.1, 0.2], result_tuple) - - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_bnd_parts(self, db_mock): - """Test breakend parsing parts.""" - db_mock.return_value = Connection() - await self._db.connection() - result = self._db._bnd_parts('[CHR17:31356925[N', '126_2') - self.assertEqual(('chr17', 31356925, True, True, 'N', True, '126_2'), result) - - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg') - async def test_connection(self, db_mock): - """Test database URL fetching.""" - await self._db.connection() - db_mock.connect.assert_called() - - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_check_tables(self, db_mock): - """Test checking tables.""" - db_mock.return_value = Connection() - await self._db.connection() - db_mock.assert_called() - result = await self._db.check_tables(['DATATSET1', 'DATATSET2']) - # No Missing tables - self.assertEqual(result, []) - - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_create_tables(self, db_mock, mock_log): - """Test creating tables.""" - sql = """CREATE TABLE IF NOT EXISTS beacon_data_table ( - id SERIAL, - dataset_id VARCHAR(200), - PRIMARY KEY (id));""" - db_mock.return_value = Connection() - await self._db.connection() - db_mock.assert_called() - sql_file = self._dir.write('sql.init', sql.encode('utf-8')) - await self._db.create_tables(sql_file) - # Should assert logs - mock_log.info.assert_called_with('Tables have been created') - - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_create_tables_exception(self, db_mock, mock_log): - """Test creating tables exception.""" - db_mock.return_value = ConnectionException() - await self._db.connection() - await self._db.create_tables('sql.init') - log = "AN ERROR OCCURRED WHILE ATTEMPTING TO CREATE TABLES -> [Errno 2] No such file or directory: 'sql.init'" - mock_log.error.assert_called_with(log) - - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - @asynctest.mock.patch('beacon_api.utils.db_load.VCF') - async def test_load_metadata(self, mock_vcf, db_mock, mock_log): - """Test load metadata.""" - metadata = """{"name": "ALL.chrMT.phase3_callmom-v0_4.20130502.genotypes.vcf", - "datasetId": "urn:hg:exampleid", - "description": "Mitochondrial genome from the 1000 Genomes project", - "assemblyId": "GRCh38", - "createDateTime": "2013-05-02 12:00:00", - "updateDateTime": "2013-05-02 12:00:00", - "version": "v0.4", - "externalUrl": "smth", - "accessType": "PUBLIC"}""" - db_mock.return_value = Connection() - await self._db.connection() - db_mock.assert_called() - metafile = self._dir.write('data.json', metadata.encode('utf-8')) - vcf = asynctest.mock.MagicMock(name='samples') - vcf.samples.return_value = [1, 2, 3] - await self._db.load_metadata(vcf, metafile, self.datafile) - # Should assert logs - mock_log.info.mock_calls = [f'Parsing metadata from {metafile}', - 'Metadata has been parsed'] - - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_load_metadata_exception(self, db_mock, mock_log): - """Test load metadata error.""" - db_mock.return_value = ConnectionException() - await self._db.connection() - vcf = asynctest.mock.MagicMock(name='samples') - vcf.samples.return_value = [1, 2, 3] - await self._db.load_metadata(vcf, 'meta.are', 'datafile') - log = "AN ERROR OCCURRED WHILE ATTEMPTING TO PARSE METADATA -> [Errno 2] No such file or directory: 'meta.are'" - mock_log.error.assert_called_with(log) - - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_load_datafile(self, db_mock, mock_log): - """Test load_datafile.""" - db_mock.return_value = Connection() - vcf = asynctest.mock.MagicMock(name='samples') - vcf.return_value = [{'record': 1}, {'record': 2}, {'records': 3}] - vcf.samples.return_value = [{'record': 1}, {'record': 2}, {'records': 3}] - await self._db.connection() - db_mock.assert_called() - await self._db.load_datafile(vcf, self.datafile, 'DATASET1') - # Should assert logs - mock_log.info.mock_calls = [f'Read data from {self.datafile}', - f'{self.datafile} has been processed'] - - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_insert_variants(self, db_mock, mock_log): - """Test insert variants.""" - db_mock.return_value = Connection() - await self._db.connection() - db_mock.assert_called() - await self._db.insert_variants('DATASET1', ['C'], 1) - # Should assert logs - mock_log.info.mock_calls = ['Received 1 variants for insertion to DATASET1', - 'Insert variants into the database'] - - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_close(self, db_mock, mock_log): - """Test database URL close.""" - db_mock.return_value = Connection() - await self._db.connection() - await self._db.close() - mock_log.info.mock_calls = ['Mark the database connection to be closed', - 'The database connection has been closed'] - - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_close_error(self, db_mock, mock_log): - """Test database URL close error.""" - db_mock.return_value = ConnectionException() - await self._db.connection() - await self._db.close() - log = "AN ERROR OCCURRED WHILE ATTEMPTING TO CLOSE DATABASE CONNECTION -> 'ConnectionException' object has no attribute 'close'" - mock_log.error.assert_called_with(log) - - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_unpack(self, db_mock, mock_log): - """Test database URL fetching.""" - db_mock.return_value = Connection() - await self._db.connection() - inf1 = INFO((1), 'i', 3, None) - variant_1 = Variant(['C'], 'T', inf1, 0.7, 'indel', 3) - result = self._db._unpack(variant_1) - self.assertEqual(([0.3333333333333333], [1], ['SNP'], ['C'], 3, []), result) - inf2 = INFO(1, 'M', 3, None) - variant_2 = Variant(['AT', 'A'], 'ATA', inf2, 0.7, 'mnp', 3) - result = self._db._unpack(variant_2) - self.assertEqual(([0.3333333333333333], [1], ['DEL', 'DEL'], ['AT', 'A'], 3, []), result) - inf3 = INFO((1), 'S', 3, 0.5) - variant_3 = Variant(['TC'], 'T', inf3, 0.7, 'snp', 3) - result = self._db._unpack(variant_3) - self.assertEqual(([0.5], [1], ['INS'], ['TC'], 3, []), result) - inf4 = INFO((1), '', 3, None, 'BND') - variant_4 = Variant(['TC'], 'T', inf4, 0.7, 'snp', 3) - result = self._db._unpack(variant_4) - self.assertEqual(([0.3333333333333333], [1], ['SNP'], ['TC'], 3, []), result) - inf5 = INFO((1), 'S', 3, None, '') - variant_5 = Variant(['TC'], 'T', inf5, 0.7, 'ins', 3) - result5 = self._db._unpack(variant_5) - self.assertEqual(([0.3333333333333333], [1], ['INS'], ['TC'], 3, []), result5) - - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - async def test_chunks(self, db_mock, mock_log): - """Test database URL fetching.""" - db_mock.return_value = Connection() - await self._db.connection() - variant = [(1, 2), (2, 3)] - result = self._db._chunks(variant, 1) - lines = [] - for i in result: - lines.append(list(i)) - self.assertEqual([[(1, 2)], [(2, 3)]], lines) - - -if __name__ == '__main__': - unittest.main() +import unittest +from testfixtures import TempDirectory +from beacon_api.utils.db_load import BeaconDB + + +class Variant: + """Variant Class. + + Mock this for Variant calculations. + """ + + def __init__(self, ALT, REF, INF, call_rate, var_type, num_called, is_sv=False): + """Initialize class.""" + self.INFO = INF + self.ALT = ALT + self.REF = REF + self.call_rate = call_rate + self.var_type = var_type + self.num_called = num_called + self.is_sv = is_sv + + +class INFO: + """INFO CLass. + + Mock this for storing VCF info. + """ + + def __init__(self, AC, VT, AN, AF, SVTYPE=None): + """Initialize class.""" + self.AC = AC + self.VT = VT + self.AN = AN + self.AF = AF + self.SVTYPE = SVTYPE + + def get(self, key): + """Inside `__getitem__` method.""" + return getattr(self, key) + + +class Transaction: + """Class Transaction. + + Mock this from asyncpg. + """ + + def __init__(self, *args, **kwargs): + """Initialize class.""" + pass + + async def __aenter__(self): + """Initialize class.""" + pass + + async def __aexit__(self, *args): + """Initialize class.""" + pass + + +class Statement(Transaction): + """Class Transaction. + + Mock this from asyncpg. + """ + + def __init__(self, query, accessData): + """Initialize class.""" + self.accessData = accessData + pass + + async def fetch(self, *args, **kwargs): + """Mimic fetch.""" + if self.accessData: + return self.accessData + else: + return [] + + +class Connection: + """Class Connection. + + Mock this from asyncpg. + """ + + def __init__(self, accessData=None): + """Initialize class.""" + self.accessData = accessData + pass + + async def fetch(self, *args, **kwargs): + """Mimic fetch.""" + return [{"table_name": "DATATSET1"}, {"table_name": "DATATSET2"}] + + async def execute(self, query, *args): + """Mimic execute.""" + return [] + + async def close(self): + """Mimic close.""" + pass + + async def __aenter__(self): + """Initialize class.""" + pass + + async def __aexit__(self, exc_type, exc, tb): + """Initialize class.""" + pass + + async def prepare(self, query): + """Mimic prepare.""" + return Statement(query, self.accessData) + + async def transaction(self, *args, **kwargs): + """Mimic transaction.""" + return Transaction(*args, **kwargs) + + +class ConnectionException: + """Class Connection with Exception. + + Mock this from asyncpg. + """ + + def __init__(self): + """Initialize class.""" + pass + + async def transaction(self, *args, **kwargs): + """Mimic transaction.""" + return Transaction(*args, **kwargs) + + async def execute(self, query, *args): + """Mimic execute.""" + return Exception + + async def prepare(self, query): + """Mimic prepare.""" + return Exception + + +class DatabaseTestCase(unittest.IsolatedAsyncioTestCase): + """Test database operations.""" + + def setUp(self): + """Initialise BeaconDB object.""" + self._db = BeaconDB() + self._dir = TempDirectory() + self.data = """##fileformat=VCFv4.0 + ##fileDate=20090805 + ##source=myImputationProgramV3.1 + ##reference=1000GenomesPilot-NCBI36 + ##phasing=partial + ##INFO= + ##INFO= + ##INFO= + ##INFO= + ##INFO= + ##INFO= + ##INFO= + ##INFO= + ##FILTER= + ##FILTER= + ##FORMAT= + ##FORMAT= + ##FORMAT= + ##FORMAT= + ##ALT= + ##ALT= + #CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA00001 NA00002 NA00003 + 19 111 . A C 9.6 . . GT:HQ 0|0:10,10 0|0:10,10 0/1:3,3 + 19 112 . A G 10 . . GT:HQ 0|0:10,10 0|0:10,10 0/1:3,3 + 20 14370 rs6054257 G A 29 PASS NS=3;DP=14;AF=0.5;DB;H2 GT:GQ:DP:HQ 0|0:48:1:51,51 1|0:48:8:51,51 1/1:43:5:.,. + chrM 15011 . T C . PASS . GT:GQ:DP:RO:QR:AO:QA:GL 1:160:970:0:0:968:31792:-2860.58,0 1:160:970:0:0:968:31792:-2860.58,0""" + self.datafile = self._dir.write("data.csv", self.data.encode("utf-8")) + + def tearDown(self): + """Close database connection after tests.""" + self._dir.cleanup_all() + + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_rchop(self, db_mock): + """Test rchop for SVTYPE.""" + db_mock.return_value = Connection() + await self._db.connection() + result = self._db._rchop("INS:ME:LINE1", ":LINE1") + self.assertEqual("INS:ME", result) + result_no_ending = self._db._rchop("INS", ":LINE1") + self.assertEqual("INS", result_no_ending) + + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_handle_type(self, db_mock): + """Test handle type.""" + db_mock.return_value = Connection() + await self._db.connection() + result = self._db._handle_type(1, int) + self.assertEqual([1], result) + result_tuple = self._db._handle_type((0.1, 0.2), float) + self.assertEqual([0.1, 0.2], result_tuple) + + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_bnd_parts(self, db_mock): + """Test breakend parsing parts.""" + db_mock.return_value = Connection() + await self._db.connection() + result = self._db._bnd_parts("[CHR17:31356925[N", "126_2") + self.assertEqual(("chr17", 31356925, True, True, "N", True, "126_2"), result) + + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg") + async def test_connection(self, db_mock): + """Test database URL fetching.""" + await self._db.connection() + db_mock.connect.assert_called() + + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_check_tables(self, db_mock): + """Test checking tables.""" + db_mock.return_value = Connection() + await self._db.connection() + db_mock.assert_called() + result = await self._db.check_tables(["DATATSET1", "DATATSET2"]) + # No Missing tables + self.assertEqual(result, []) + + @unittest.mock.patch("beacon_api.utils.db_load.LOG") + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_create_tables(self, db_mock, mock_log): + """Test creating tables.""" + sql = """CREATE TABLE IF NOT EXISTS beacon_data_table ( + id SERIAL, + dataset_id VARCHAR(200), + PRIMARY KEY (id));""" + db_mock.return_value = Connection() + await self._db.connection() + db_mock.assert_called() + sql_file = self._dir.write("sql.init", sql.encode("utf-8")) + await self._db.create_tables(sql_file) + # Should assert logs + mock_log.info.assert_called_with("Tables have been created") + + @unittest.mock.patch("beacon_api.utils.db_load.LOG") + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_create_tables_exception(self, db_mock, mock_log): + """Test creating tables exception.""" + db_mock.return_value = ConnectionException() + await self._db.connection() + await self._db.create_tables("sql.init") + log = "AN ERROR OCCURRED WHILE ATTEMPTING TO CREATE TABLES -> [Errno 2] No such file or directory: 'sql.init'" + mock_log.error.assert_called_with(log) + + @unittest.mock.patch("beacon_api.utils.db_load.LOG") + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + @unittest.mock.patch("beacon_api.utils.db_load.VCF") + async def test_load_metadata(self, mock_vcf, db_mock, mock_log): + """Test load metadata.""" + metadata = """{"name": "ALL.chrMT.phase3_callmom-v0_4.20130502.genotypes.vcf", + "datasetId": "urn:hg:exampleid", + "description": "Mitochondrial genome from the 1000 Genomes project", + "assemblyId": "GRCh38", + "createDateTime": "2013-05-02 12:00:00", + "updateDateTime": "2013-05-02 12:00:00", + "version": "v0.4", + "externalUrl": "smth", + "accessType": "PUBLIC"}""" + db_mock.return_value = Connection() + await self._db.connection() + db_mock.assert_called() + metafile = self._dir.write("data.json", metadata.encode("utf-8")) + vcf = unittest.mock.MagicMock(name="samples") + vcf.samples.return_value = [1, 2, 3] + await self._db.load_metadata(vcf, metafile, self.datafile) + # Should assert logs + mock_log.info.mock_calls = [f"Parsing metadata from {metafile}", "Metadata has been parsed"] + + @unittest.mock.patch("beacon_api.utils.db_load.LOG") + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_load_metadata_exception(self, db_mock, mock_log): + """Test load metadata error.""" + db_mock.return_value = ConnectionException() + await self._db.connection() + vcf = unittest.mock.MagicMock(name="samples") + vcf.samples.return_value = [1, 2, 3] + await self._db.load_metadata(vcf, "meta.are", "datafile") + log = "AN ERROR OCCURRED WHILE ATTEMPTING TO PARSE METADATA -> [Errno 2] No such file or directory: 'meta.are'" + mock_log.error.assert_called_with(log) + + @unittest.mock.patch("beacon_api.utils.db_load.LOG") + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_load_datafile(self, db_mock, mock_log): + """Test load_datafile.""" + db_mock.return_value = Connection() + vcf = unittest.mock.MagicMock(name="samples") + vcf.return_value = [{"record": 1}, {"record": 2}, {"records": 3}] + vcf.samples.return_value = [{"record": 1}, {"record": 2}, {"records": 3}] + await self._db.connection() + db_mock.assert_called() + await self._db.load_datafile(vcf, self.datafile, "DATASET1") + # Should assert logs + mock_log.info.mock_calls = [f"Read data from {self.datafile}", f"{self.datafile} has been processed"] + + @unittest.mock.patch("beacon_api.utils.db_load.LOG") + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_close(self, db_mock, mock_log): + """Test database URL close.""" + db_mock.return_value = Connection() + await self._db.connection() + await self._db.close() + mock_log.info.mock_calls = [ + "Mark the database connection to be closed", + "The database connection has been closed", + ] + + @unittest.mock.patch("beacon_api.utils.db_load.LOG") + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_close_error(self, db_mock, mock_log): + """Test database URL close error.""" + db_mock.return_value = ConnectionException() + await self._db.connection() + await self._db.close() + log = "AN ERROR OCCURRED WHILE ATTEMPTING TO CLOSE DATABASE CONNECTION -> 'ConnectionException' object has no attribute 'close'" + mock_log.error.assert_called_with(log) + + @unittest.mock.patch("beacon_api.utils.db_load.LOG") + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_unpack(self, db_mock, mock_log): + """Test database URL fetching.""" + db_mock.return_value = Connection() + await self._db.connection() + inf1 = INFO((1), "i", 3, None) + variant_1 = Variant(["C"], "T", inf1, 0.7, "indel", 3) + result = self._db._unpack(variant_1) + self.assertEqual(([0.3333333333333333], [1], ["SNP"], ["C"], 3, []), result) + inf2 = INFO(1, "M", 3, None) + variant_2 = Variant(["AT", "A"], "ATA", inf2, 0.7, "mnp", 3) + result = self._db._unpack(variant_2) + self.assertEqual(([0.3333333333333333], [1], ["DEL", "DEL"], ["AT", "A"], 3, []), result) + inf3 = INFO((1), "S", 3, 0.5) + variant_3 = Variant(["TC"], "T", inf3, 0.7, "snp", 3) + result = self._db._unpack(variant_3) + self.assertEqual(([0.5], [1], ["INS"], ["TC"], 3, []), result) + inf4 = INFO((1), "", 3, None, "BND") + variant_4 = Variant(["TC"], "T", inf4, 0.7, "snp", 3) + result = self._db._unpack(variant_4) + self.assertEqual(([0.3333333333333333], [1], ["SNP"], ["TC"], 3, []), result) + inf5 = INFO((1), "S", 3, None, "") + variant_5 = Variant(["TC"], "T", inf5, 0.7, "ins", 3) + result5 = self._db._unpack(variant_5) + self.assertEqual(([0.3333333333333333], [1], ["INS"], ["TC"], 3, []), result5) + + @unittest.mock.patch("beacon_api.utils.db_load.LOG") + @unittest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + async def test_chunks(self, db_mock, mock_log): + """Test database URL fetching.""" + db_mock.return_value = Connection() + await self._db.connection() + variant = [(1, 2), (2, 3)] + result = self._db._chunks(variant, 1) + lines = [] + for i in result: + lines.append(list(i)) + self.assertEqual([[(1, 2)], [(2, 3)]], lines) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_mate_name.py b/tests/test_mate_name.py index d6fc6fd7..b71d16a9 100644 --- a/tests/test_mate_name.py +++ b/tests/test_mate_name.py @@ -1,21 +1,11 @@ -import asynctest -import aiohttp -from beacon_api.extensions.mate_name import find_fusion, fetch_fusion_dataset -from .test_db_load import Connection, ConnectionException +import unittest +from beacon_api.extensions.mate_name import find_fusion -class TestDataQueryFunctions(asynctest.TestCase): +class TestDataQueryFunctions(unittest.IsolatedAsyncioTestCase): """Test Data Query functions.""" - def setUp(self): - """Set up.""" - pass - - def tearDown(self): - """Close database connection after tests.""" - pass - - @asynctest.mock.patch("beacon_api.extensions.mate_name.fetch_fusion_dataset") + @unittest.mock.patch("beacon_api.extensions.mate_name.fetch_fusion_dataset") async def test_find_fusion(self, mock_filtered): """Test find datasets.""" mock_filtered.return_value = [] @@ -25,87 +15,6 @@ async def test_find_fusion(self, mock_filtered): result_miss = await find_fusion(None, "GRCh38", (), "Y", "T", "C", [], access_type, "MISS") self.assertEqual(result_miss, []) - async def test_fetch_fusion_dataset_call(self): - """Test db call for retrieving mate data.""" - pool = asynctest.CoroutineMock() - db_response = { - "referenceBases": "", - "alternateBases": "", - "variantType": "", - "referenceName": "Chr38", - "frequency": 0, - "callCount": 0, - "sampleCount": 0, - "variantCount": 0, - "start": 0, - "end": 0, - "accessType": "PUBLIC", - "datasetId": "test", - } - pool.acquire().__aenter__.return_value = Connection(accessData=[db_response]) - assembly_id = "GRCh38" - position = (10, 20, None, None, None, None) - chromosome = 1 - reference = "A" - result = await fetch_fusion_dataset(pool, assembly_id, position, chromosome, reference, None, None, None, False) - # for now it can return empty dataset - # in order to get a response we will have to mock it - # in Connection() class - expected = { - "referenceName": "Chr38", - "callCount": 0, - "sampleCount": 0, - "variantCount": 0, - "datasetId": "test", - "referenceBases": "", - "alternateBases": "", - "variantType": "", - "start": 0, - "end": 0, - "frequency": 0, - "info": {"accessType": "PUBLIC"}, - "datasetHandover": [ - { - "handoverType": {"id": "CUSTOM", "label": "Variants"}, - "description": "browse the variants matched by the query", - "url": "https://examplebrowser.org/dataset/test/browser/variant/Chr38-1--", - }, - { - "handoverType": {"id": "CUSTOM", "label": "Region"}, - "description": "browse data of the region matched by the query", - "url": "https://examplebrowser.org/dataset/test/browser/region/Chr38-1-1", - }, - { - "handoverType": {"id": "CUSTOM", "label": "Data"}, - "description": "retrieve information of the datasets", - "url": "https://examplebrowser.org/dataset/test/browser", - }, - ], - } - self.assertEqual(result, [expected]) - - async def test_fetch_fusion_dataset_call_miss(self): - """Test db call for retrieving mate miss data.""" - pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection() - assembly_id = "GRCh38" - position = (10, 20, None, None, None, None) - chromosome = 1 - reference = "A" - result_miss = await fetch_fusion_dataset(pool, assembly_id, position, chromosome, reference, None, None, None, True) - self.assertEqual(result_miss, []) - - async def test_fetch_fusion_dataset_call_exception(self): - """Test db call for retrieving mate data with exception.""" - pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = ConnectionException() - assembly_id = "GRCh38" - position = (10, 20, None, None, None, None) - chromosome = 1 - reference = "A" - with self.assertRaises(aiohttp.web_exceptions.HTTPInternalServerError): - await fetch_fusion_dataset(pool, assembly_id, position, chromosome, reference, None, None, None, False) - if __name__ == "__main__": - asynctest.main() + unittest.main() diff --git a/tests/test_response.py b/tests/test_response.py index b3bd659c..da828822 100644 --- a/tests/test_response.py +++ b/tests/test_response.py @@ -1,6 +1,6 @@ from beacon_api.api.info import beacon_info, ga4gh_info from beacon_api.api.query import query_request_handler -import asynctest +import unittest from beacon_api.schemas import load_schema from beacon_api.utils.validate_jwt import get_key from beacon_api.permissions.ga4gh import retrieve_user_data, get_jwk @@ -56,14 +56,14 @@ ] -class TestBasicFunctions(asynctest.TestCase): +class TestBasicFunctions(unittest.IsolatedAsyncioTestCase): """Test supporting functions.""" - @asynctest.mock.patch("beacon_api.api.info.fetch_dataset_metadata") + @unittest.mock.patch("beacon_api.api.info.fetch_dataset_metadata") async def test_beacon_info(self, db_metadata): """Test info metadata response.""" db_metadata.return_value = [mock_dataset_metadata] - pool = asynctest.CoroutineMock() + pool = unittest.mock.AsyncMock() result = await beacon_info("localhost", pool) # if it is none no error occurred self.assertEqual(jsonschema.validate(json.loads(json.dumps(result)), load_schema("info")), None) @@ -75,13 +75,13 @@ async def test_ga4gh_info(self): # if it is none no error occurred self.assertEqual(jsonschema.validate(json.loads(json.dumps(result)), load_schema("service-info")), None) - @asynctest.mock.patch("beacon_api.api.query.find_datasets") - @asynctest.mock.patch("beacon_api.api.query.fetch_datasets_access") + @unittest.mock.patch("beacon_api.api.query.find_datasets") + @unittest.mock.patch("beacon_api.api.query.fetch_datasets_access") async def test_beacon_query(self, fetch_req_datasets, data_find): """Test query data response.""" data_find.return_value = mock_data fetch_req_datasets.return_value = mock_controlled - pool = asynctest.CoroutineMock() + pool = unittest.mock.AsyncMock() request = { "assemblyId": "GRCh38", "referenceName": "MT", @@ -97,13 +97,13 @@ async def test_beacon_query(self, fetch_req_datasets, data_find): self.assertEqual(jsonschema.validate(json.loads(json.dumps(result)), load_schema("response")), None) data_find.assert_called() - @asynctest.mock.patch("beacon_api.api.query.find_fusion") - @asynctest.mock.patch("beacon_api.api.query.fetch_datasets_access") + @unittest.mock.patch("beacon_api.api.query.find_fusion") + @unittest.mock.patch("beacon_api.api.query.fetch_datasets_access") async def test_beacon_query_bnd(self, fetch_req_datasets, data_find): """Test query data response.""" data_find.return_value = mock_data fetch_req_datasets.return_value = mock_controlled - pool = asynctest.CoroutineMock() + pool = unittest.mock.AsyncMock() request = { "assemblyId": "GRCh38", "referenceName": "MT", @@ -171,13 +171,13 @@ async def test_get_jwk(self, m): self.assertTrue(isinstance(result, dict)) self.assertTrue(result["keys"][0]["alg"], "RSA256") - @asynctest.mock.patch("beacon_api.permissions.ga4gh.LOG") + @unittest.mock.patch("beacon_api.permissions.ga4gh.LOG") async def test_get_jwk_bad(self, mock_log): """Test get JWK exception log.""" await get_jwk("http://test.csc.fi/jwk") mock_log.error.assert_called_with("Could not retrieve JWK from http://test.csc.fi/jwk") - @asynctest.mock.patch("beacon_api.utils.validate_jwt.OAUTH2_CONFIG", return_value={"server": None}) + @unittest.mock.patch("beacon_api.utils.validate_jwt.OAUTH2_CONFIG", return_value={"server": None}) async def test_bad_get_key(self, oauth_none): """Test bad test_get_key.""" with self.assertRaises(aiohttp.web_exceptions.HTTPInternalServerError): diff --git a/tox.ini b/tox.ini index 966eae0f..2abdedb1 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py{36,38},flake8,docs,bandit,unit_tests,mypy +envlist = flake8,docs,bandit,unit_tests,mypy skipsdist = True [flake8] @@ -35,8 +35,15 @@ skip_install = true deps = -rrequirements.txt mypy + types-ujson commands = mypy --ignore-missing-imports beacon_api/ +[testenv:black] +skip_install = true +deps = + black +commands = black . -l 160 --check + [testenv:unit_tests] setenv = CONFIG_FILE = {toxinidir}/tests/test.ini @@ -49,5 +56,4 @@ commands = py.test -x --cov=beacon_api tests/ --cov-fail-under=80 [gh-actions] python = - 3.6: unit_tests 3.8: flake8, unit_tests, docs, bandit, mypy