Skip to content
This repository has been archived by the owner on Dec 16, 2019. It is now read-only.

Commit

Permalink
Merge pull request #357 from NBISweden/feature/flake8-docstrings
Browse files Browse the repository at this point in the history
Make everything validate flake8-docstrings
  • Loading branch information
blankdots committed Sep 20, 2018
2 parents be87db6 + 7b8858c commit 92c4dd0
Show file tree
Hide file tree
Showing 31 changed files with 304 additions and 147 deletions.
3 changes: 1 addition & 2 deletions docs/code.rst
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,7 @@ Utility Functions
lega.utils.storage
lega.utils.eureka
lega.utils.exceptions
lega.utils.logging.LEGAHandler
lega.utils.logging.JSONFormatter
lega.utils.logging



Expand Down
9 changes: 9 additions & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,16 @@

# -- General configuration ------------------------------------------------


class Mock(MagicMock):
"""Mock modules.
For some modules we will not build docs.
"""

@classmethod
def __getattr__(cls, name):
"""Mock modules."""
return MagicMock()


Expand Down Expand Up @@ -121,7 +128,9 @@ def __getattr__(cls, name):

today_fmt = '%B %d, %Y'


def setup(app):
"""Add custom stylesheet."""
app.add_stylesheet('custom.css')


Expand Down
2 changes: 1 addition & 1 deletion lega/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# __init__ is here so that we don't collapse in sys.path with another lega module

"""The lega package contains code to start a *Local EGA*. """
"""The lega package contains code to start a *Local EGA*."""

__title__ = 'Local EGA'
__version__ = VERSION = '1.1'
Expand Down
8 changes: 4 additions & 4 deletions lega/conf/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""\
Configuration Module provides a dictionary-like with configuration settings.
"""Configuration Module provides a dictionary-like with configuration settings.
It also loads the logging settings when ``setup`` is called.
* The ``--log <file>`` argument is used to configuration where the logs go.
Expand Down Expand Up @@ -109,7 +109,7 @@ def _load_log_file(self, filename):
self.log_conf = None

def _load_log_conf(self, args=None):
"""Finding the `--log` file."""
"""Find the `--log` file."""
try:
lconf = args[args.index('--log') + 1]
self._load_log_file(lconf)
Expand All @@ -135,7 +135,7 @@ def __repr__(self):
return res

def get_value(self, section, option, conv=str, default=None, raw=False):
""""Get a specific value for this paramater either as env variable or from config files.
"""Get a specific value for this paramater either as env variable or from config files.
``section`` and ``option`` are mandatory while ``conv``, ``default`` (fallback) and ``raw`` are optional.
"""
Expand Down
4 changes: 2 additions & 2 deletions lega/conf/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@

from . import CONF

def main(args=None):
"""The main routine."""

def main(args=None):
"""Run the main routine, for loading configuration."""
if not args:
args = sys.argv[1:]

Expand Down
15 changes: 8 additions & 7 deletions lega/ingest.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

'''Worker reading messages from the ``files`` queue, splitting the
Crypt4GH header from the remainder of the file. The header is stored
in the database and the remainder is sent to the backend storage:
"""Worker reading messages from the ``files`` queue, splitting the Crypt4GH header from the remainder of the file.
The header is stored in the database and the remainder is sent to the backend storage:
either a regular file system or an S3 object store.
It is possible to start several workers.
Expand All @@ -15,8 +15,7 @@
Upon completion, a message is sent to the local exchange with the
routing key :``archived``.
'''
"""

import sys
import logging
Expand All @@ -31,11 +30,11 @@

LOG = logging.getLogger(__name__)


@db.catch_error
@db.crypt4gh_to_user_errors
def work(fs, channel, data):
'''Reads a message, splits the header and sends the remainder to the backend store.'''

"""Read a message, split the header and send the remainder to the backend store."""
filepath = data['filepath']
LOG.info(f"Processing {filepath}")

Expand Down Expand Up @@ -93,7 +92,9 @@ def work(fs, channel, data):
LOG.debug(f"Reply message: {data}")
return data


def main(args=None):
"""Run ingest service."""
if not args:
args = sys.argv[1:]

Expand Down
52 changes: 37 additions & 15 deletions lega/keyserver.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
#!/usr/bin/env python3

'''\
The Keyserver provides a REST endpoint for retrieving PGP and Re-encryption keys.
"""The Keyserver provides a REST endpoint for retrieving PGP and Re-encryption keys.
The keyserver also registers with Eureka service discovery.
'''
"""


import sys
Expand All @@ -24,11 +24,12 @@
LOG = logging.getLogger(__name__)
routes = web.RouteTableDef()


class Cache:
"""In memory cache."""

def __init__(self, max_size=10, ttl=None):
"""Initialise cache."""
"""Initialize cache."""
self.store = dict()
self.max_size = max_size
self.ttl = ttl
Expand Down Expand Up @@ -70,7 +71,7 @@ def check_ttl(self):
return keys

def _time_delta(self, expire):
""""Convert time left in human readable format."""
"""Convert time left in human readable format."""
# A lot of back and forth transformation
end_time = datetime.datetime.fromtimestamp(expire).strftime(self.FMT)
today = datetime.datetime.today().strftime(self.FMT)
Expand Down Expand Up @@ -104,8 +105,9 @@ def clear(self):
# Caching the keys
####################################


def _unlock_key(name, active=None, path=None, expire=None, passphrase=None, **kwargs):
"""Unlocking a key and loading it in the cache."""
"""Unlock a key and loading it in the cache."""
key, _ = pgpy.PGPKey.from_file(path)
assert not key.is_public, f"The key {name} should be private"
with key.unlock(passphrase) as k:
Expand All @@ -121,8 +123,10 @@ def _unlock_key(name, active=None, path=None, expire=None, passphrase=None, **kw
# Retrieve the active keys
####################################


@routes.get('/active/{key_type}')
async def retrieve_active_key(request):
"""Retrieve the active key from the cache and serve it via HTTPS."""
key_type = request.match_info['key_type'].lower()
LOG.debug(f'Requesting active ({key_type}) key')
if key_type not in ('public', 'private'):
Expand All @@ -137,8 +141,10 @@ async def retrieve_active_key(request):
LOG.warn(f"Requested active ({key_type}) key not found.")
return web.HTTPNotFound()


@routes.get('/retrieve/{requested_id}/{key_type}')
async def retrieve_key(request):
"""Retrieve a specific key via keyID or fingerprint from the cache and serve it via HTTPS."""
LOG.debug('Retrieve key')
requested_id = request.match_info['requested_id']
key_type = request.match_info['key_type'].lower()
Expand All @@ -154,11 +160,13 @@ async def retrieve_key(request):
LOG.warn(f"Requested key {requested_id} not found.")
return web.HTTPNotFound()


@routes.post('/admin/unlock')
async def unlock_key(request):
"""Unlock a key via a POST request.
POST request takes the form:
\{"private": "path/to/file.sec", "passphrase": "pass", "expire": "30/MAR/18 08:00:00"\}
{"private": "path/to/file.sec", "passphrase": "pass", "expire": "30/MAR/18 08:00:00"}
"""
key_info = await request.json()
LOG.debug(f'Admin unlocking: {key_info}')
Expand All @@ -168,26 +176,29 @@ async def unlock_key(request):
else:
return web.HTTPBadRequest()


@routes.get('/health')
async def healthcheck(request):
"""A health endpoint for service discovery.
"""Return ok, health endpoint for service discovery.
It will always return ok.
"""
LOG.debug('Healthcheck called')
return web.HTTPOk()


# TO BE REMOVED
@routes.get('/admin/ttl')
async def check_ttl(request):
"""Evict from the cache if TTL expired
and return the keys that survived""" # ehh...why? /Fred
"""Evict from the cache if TTL expired and return the keys that survived."""
LOG.debug('Admin TTL')
expire = _cache.check_ttl()
if expire:
return web.json_response(expire)
else:
return web.HTTPBadRequest()


def load_keys_conf(store):
"""Parse and load keys configuration."""
# Cache the active key names
Expand All @@ -200,38 +211,49 @@ def load_keys_conf(store):

alive = True # used to set if the keyserver is alive in the shutdown


async def renew_lease(eureka, interval):
'''Renew eureka lease at specific interval.'''
"""Renew eureka lease at specific interval."""
while alive:
await asyncio.sleep(interval)
await eureka.renew()
LOG.info('Keyserver Eureka lease renewed.')


async def init(app):
'''Initialization running before the loop.run_forever'''
"""Initialize run before the loop.run_forever."""
app['renew_eureka'] = app.loop.create_task(renew_lease(app['eureka'], app['interval']))
# Note: will exit on failure
load_keys_conf(app['store'])
await app['eureka'].register()
LOG.info('Keyserver registered with Eureka.')


async def shutdown(app):
'''Function run after a KeyboardInterrupt. After that: cleanup'''
"""Perform shutdown, after a KeyboardInterrupt.
After that: cleanup.
"""
LOG.info('Shutting down the database engine')
global alive
await app['eureka'].deregister()
alive = False


async def cleanup(app):
'''Function run after a KeyboardInterrupt. Right after, the loop is closed'''
"""Perform cleanup, after a KeyboardInterrupt.
Right after, the loop is closed.
"""
LOG.info('Cancelling all pending tasks')
# THIS SPAWNS an error see https://github.com/aio-libs/aiohttp/blob/master/aiohttp/web_runner.py#L178
# for more details how the cleanup happens.
# for task in asyncio.Task.all_tasks():
# task.cancel()


def main(args=None):
"""Where the magic happens."""
"""Run keyserver with configuration."""
if not args:
args = sys.argv[1:]

Expand Down
13 changes: 7 additions & 6 deletions lega/mapper.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,16 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

'''Consumes message to update the database with stable IDs to file IDS mappings.
"""Consumes message to update the database with stable IDs to file IDS mappings.
Instead of building a REST endpoint in front of the database, we
exchange messages between the brokers.
Messages will be reliably arrived to the local broker, via the
registered upstream queue.
Note that the upstream is registered via an authenticated mechanism, and uses AMQPS.
'''
.. note:: The upstream is registered via an authenticated mechanism, and uses AMQPS.
"""

import sys
import logging
Expand All @@ -22,10 +21,10 @@

LOG = logging.getLogger(__name__)


@db.catch_error
def work(data):
'''Reads a message containing the ids and add it to the database.'''

"""Read a message containing the ids and add it to the database."""
file_id = data['file_id']
stable_id = data['stable_id']
LOG.info(f"Mapping {file_id} to stable_id {stable_id}")
Expand All @@ -35,7 +34,9 @@ def work(data):
LOG.info(f"Stable ID {stable_id} mapped to {file_id}")
return None


def main(args=None):
"""Run mapper service."""
if not args:
args = sys.argv[1:]

Expand Down
Loading

0 comments on commit 92c4dd0

Please sign in to comment.