Skip to content

Commit

Permalink
#5 pytest
Browse files Browse the repository at this point in the history
Incorporate pytest as the test runner, dockerize neo4j, make ns_workflow a package
  • Loading branch information
mjstealey committed Dec 6, 2018
1 parent 1ff8ecb commit a8638fa
Show file tree
Hide file tree
Showing 14 changed files with 1,008 additions and 36 deletions.
28 changes: 17 additions & 11 deletions .gitignore
@@ -1,15 +1,21 @@
.idea
.DS_Store
__pycache__/
*.egg
*.egg-info
*.py[cod]
venv
.coverage
.DS_Store
.idea
.pytest_cache
.venv
/base/.env
/base/secrets.py
/nginx/default.conf
base.sock
/static/*
/cover/*
.coverage
xunittest.xml
base/.env
base/secrets.py
cover/*
migrations
neo4j
nginx/default.conf
nosetests.xml
static/*
venv
xunittest.xml
xunittest.xml
__pycache__
33 changes: 33 additions & 0 deletions base/runner.py
@@ -0,0 +1,33 @@
"""
./manage.py test <django args> -- <pytest args>
"""

class PytestTestRunner(object):
"""Runs pytest to discover and run tests."""

def __init__(self, verbosity=1, failfast=False, keepdb=False, **kwargs):
self.verbosity = verbosity
self.failfast = failfast
self.keepdb = keepdb

def run_tests(self, test_labels):
"""Run pytest and return the exitcode.
It translates some of Django's test command option to pytest's.
"""
import pytest

argv = []
if self.verbosity == 0:
argv.append('--quiet')
if self.verbosity == 2:
argv.append('--verbose')
if self.verbosity == 3:
argv.append('-vv')
if self.failfast:
argv.append('--exitfirst')
if self.keepdb:
argv.append('--reuse-db')

argv.extend(test_labels)
return pytest.main(argv)
23 changes: 13 additions & 10 deletions base/settings.py
Expand Up @@ -160,19 +160,22 @@
LOGIN_REDIRECT_URL = '/profile/'
LOGOUT_REDIRECT_URL = '/'

# pytest-django as the test runner
# https://pytest-django.readthedocs.io/en/latest/faq.html
TEST_RUNNER = 'base.runner.PytestTestRunner'

# django-nose test runner
# https://django-nose.readthedocs.io/en/latest/
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--cover-erase',
'--with-coverage',
'--cover-package=users',
'--exclude=lib',
'--cover-html',
'--with-xunit', # Add this and the following line
'--xunit-file=xunittest.xml', # xunittest.xml could be any name
]
# TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
# NOSE_ARGS = [
# '--cover-erase',
# '--with-coverage',
# '--cover-package=users',
# '--exclude=lib',
# '--cover-html',
# '--with-xunit', # Add this and the following line
# '--xunit-file=xunittest.xml', # xunittest.xml could be any name
# ]


# Default Django logging is WARNINGS+ to console
Expand Down
44 changes: 29 additions & 15 deletions docker-compose.yml
Expand Up @@ -5,7 +5,7 @@ services:
image: postgres:10
container_name: database
ports:
- 5432:${POSTGRES_PORT:-5432}
- 5432:${POSTGRES_PORT:-5432}

django:
build:
Expand All @@ -14,25 +14,39 @@ services:
image: django
container_name: django
ports:
- 8000:8000
- 8000:8000
volumes:
- .:/code
- ./static:/code/static
- ./media:/code/media
- .:/code
- ./static:/code/static
- ./media:/code/media
environment:
- UWSGI_UID=${UWSGI_UID:-1000}
- UWSGI_GID=${UWSGI_GID:-1000}
- UWSGI_UID=${UWSGI_UID:-1000}
- UWSGI_GID=${UWSGI_GID:-1000}

nginx:
image: nginx:latest
container_name: nginx
ports:
- 8080:80
- 8443:443
- 8080:80
- 8443:443
volumes:
- .:/code
- ./static:/code/static
- ./media:/code/media
- ./nginx/default.conf:/etc/nginx/conf.d/default.conf
- ./ssl/ssl_dev.crt:/etc/ssl/SSL.crt # SSL development certificate
- ./ssl/ssl_dev.key:/etc/ssl/SSL.key # SSL development key
- .:/code
- ./static:/code/static
- ./media:/code/media
- ./nginx/default.conf:/etc/nginx/conf.d/default.conf
- ./ssl/ssl_dev.crt:/etc/ssl/SSL.crt # SSL development certificate
- ./ssl/ssl_dev.key:/etc/ssl/SSL.key # SSL development key

neo4j:
image: rencinrig/neo4j-apoc:3.4.7
container_name: neo4j
ports:
- 7474:7474 # for HTTP
- 7473:7473 # for HTTPS
- 7687:7687 # for Bolt
volumes:
- ./neo4j/data:/data
- ./neo4j/logs:/logs
- ./neo4j:/imports
environment:
- NEO4J_AUTH=neo4j/neo4j123
1 change: 1 addition & 0 deletions lib/ns_workflow/MANIFEST.in
@@ -0,0 +1 @@
include ns_workflow/rules.json
2 changes: 2 additions & 0 deletions lib/ns_workflow/ns_workflow/__init__.py
@@ -0,0 +1,2 @@
from .abstract_workflow import WorkflowException, AbstractWorkflow
from .neo4j import Neo4jWorkflow
24 changes: 24 additions & 0 deletions lib/ns_workflow/ns_workflow/abstract_workflow.py
@@ -0,0 +1,24 @@
from abc import ABC, abstractmethod

class AbstractWorkflow(ABC):

@abstractmethod
def import_workflow(self, graph: str, graphId: str = None) -> str:
""" import graph from string giving it unique id
(externally provided or auto-generated)"""
return None

@abstractmethod
def validate_workflow(self, graphId: str) -> None:
pass

@abstractmethod
def delete_workflow(self, graphId: str) -> None:
pass

@abstractmethod
def count_nodes(self, graphId: str, nodeRole: str = None) -> int:
pass

class WorkflowException(Exception):
pass
132 changes: 132 additions & 0 deletions lib/ns_workflow/ns_workflow/neo4j.py
@@ -0,0 +1,132 @@
import uuid
import networkx as nx
import tempfile
import os
from neo4j import GraphDatabase
import neo4j
from typing import List, Set, Dict, Tuple, Optional
from .abstract_workflow import AbstractWorkflow, WorkflowException
import json
import time
import logging
import sys


class Neo4jWorkflow(AbstractWorkflow):

def __init__(self, url: str, user: str, pswd: str, importHostDir: str = None, importDir: str = None) -> None:
""" URL of Neo4j instance, credentials and directory
from where Neo4j can import graphs"""
self.url = url
self.user = user
self.pswd = pswd
self.importHostDir = importHostDir
self.importDir = importDir
self.driver = GraphDatabase.driver(self.url, auth=(user, pswd))

self.log = logging.getLogger(__name__)

def _prep_workflow(self, graphml: str, graphId: str = None) -> Tuple[str, str, str]:
"""Import a workflow graphml, assigning it a new unique graph ID
return the name of the file where graph is saved with updated GraphID
and the assigned graphID"""
if graphId is None:
graphId = str(uuid.uuid4())

# save to file
f1 = tempfile.NamedTemporaryFile(suffix="-graphml", mode='w')
f1.write(graphml)

# read using networkx
g = nx.read_graphml(f1.name)
f1.close()

for n in list(g.nodes):
g.nodes[n]['GraphID'] = graphId

# save back to GraphML
# where to save is determined by whether importDir is set
destDir = self.importHostDir
if self.importHostDir is None:
destDir = tempfile.gettempdir()

uniqName = str(uuid.uuid4())
hostFileName = os.path.join(destDir, uniqName)
mappedFileName = os.path.join(self.importDir, uniqName)
nx.write_graphml(g, hostFileName)

return graphId, hostFileName, mappedFileName

def _import_workflow(self, graphmlFile: str) -> None:
""" import graph into Neo4j from a file"""

with self.driver.session() as session:
session.run('call apoc.import.graphml( $fileName, {batchSize: 10000, readLabels: true, storeNodeIds: true, defaultRelationshipType: "isPrerequisiteFor" } ) ',
fileName=graphmlFile)

def import_workflow(self, graphml: str, graphId: str = None) -> str:
""" import graph into Neo4j from a string, assigning it a unique id"""

id, hostFileName, mappedFileName = self._prep_workflow(graphml, graphId)
self.log.debug('Importing workflow with id %s for id %s', id, graphId)

if graphId is not None:
assert (id == graphId)

retry = 5
while retry > 0:
try:
self._import_workflow(mappedFileName)
retry = - 1
except neo4j.exceptions.ClientError as neo4jerr:
self.log.error("Unable to load, deleting graph %s", id)
self.delete_workflow(id)
retry = retry - 1
time.sleep(1.0)

# remove the file
os.unlink(hostFileName)

if retry == 0:
raise(WorkflowException('Unable to load graph'))

return id

def _validate_workflow(self, graphId: str, rulesFile: str) -> None:
""" validate the graph imported in Neo4j according to a set of given Cipher rules"""
f = open(rulesFile)
rulesDict = json.load(f)
f.close()

for r in rulesDict:
with self.driver.session() as session:
#print('Applying rule ', r['msg'])
v = session.run(r['rule'], graphId=graphId).single().value()
#print("Rule {}, value {}".format(r['msg'], v))
if v is False:
raise WorkflowException(r['msg'])

return True

def validate_workflow(self, graphId: str) -> None:
""" validate the graph imported in Neo4j according to standard Cipher rules """

self.log.debug('Validating workflow %s', graphId)
return self._validate_workflow(graphId, os.path.dirname(__file__) + '/rules.json')

def delete_workflow(self, graphId: str) -> None:
""" delete a workflow with this ID from Neo4j"""
self.log.debug('Deleting workflow %s', graphId)
with self.driver.session() as session:
session.run('match (n {GraphID: $graphId })detach delete n', graphId=graphId)

def count_nodes(self, graphId: str, nodeRole: str = None) -> int:
""" count the nodes of particular role in workflow"""
self.log.debug('Counting nodes in graph %s', graphId)
with self.driver.session() as session:
if nodeRole is None:
return session.run('match (n {GraphID: $graphId }) return count(n)',
graphId=graphId).single().value()
else:
return session.run('match (n {GraphID: $graphId, Role: $nodeRole} ) return count(n)',
graphId=graphId, nodeRole=nodeRole).single().value()
34 changes: 34 additions & 0 deletions lib/ns_workflow/ns_workflow/rules.json
@@ -0,0 +1,34 @@
[
{
"rule": "MATCH (s {GraphID: $graphId, Type: \"Start\"}) RETURN count(*)=1",
"msg" : "There must be a single Start node."
},
{
"rule": "MATCH (s {GraphID: $graphId, Type: \"Stop\"}) RETURN count(*)=1",
"msg" : "There must be a single Stop node."
},
{
"rule": "MATCH (s {GraphID: $graphId}), p= (s) -[*]-> (s) RETURN size(collect(p))=0",
"msg" : "Workflow graph must not contain cycles."
},
{
"rule": "MATCH (s {GraphID: $graphId}), (d {GraphID: $graphId}), (e {GraphID: $graphId}) WHERE s.Type=\"Start\" AND NOT d.Type=\"Start\" and NOT e.Type=\"Start\" WITH s, d, count(e) AS TotalDest MATCH p= (s) -[*]-> (d) WITH d, count(p) AS cp, TotalDest RETURN TotalDest=size(collect(cp))",
"msg" : "Any node must be reachable from the Start node."
},
{
"rule": "MATCH (s {GraphID: $graphId}), (d {GraphID: $graphId}), (e {GraphID: $graphId}) WHERE NOT s.Type=\"Stop\" AND d.Type=\"Stop\" and NOT e.Type=\"Stop\" WITH s, d, count(e) AS TotalSrc MATCH p= (s) -[*]-> (d) WITH s, count(p) AS cp, TotalSrc RETURN TotalSrc=size(collect(cp))",
"msg" : "Stop node must be reachable from any node."
},
{
"rule": "MATCH (n {GraphID: $graphId, Type: \"Condition\"}) -[rel]-> () RETURN ALL(r IN collect(rel) WHERE r.Type IN [\"ConditionTrue\", \"ConditionFalse\"])",
"msg" : "Condition nodes must have two branches only (ConditionTrue and ConditionFalse)"
},
{
"rule": "MATCH (n {GraphID: $graphId}), (m {GraphID: $graphId}) WHERE n.ID=m.ID AND NOT id(n)=id(m) RETURN count(n) = 0",
"msg" : "All node IDs must be distinct."
},
{
"rule": "MATCH (n {GraphID: $graphId}) RETURN ALL(r IN collect(n) WHERE r.Role IN [\"None\", \"STAFF\", \"DP\", \"INP\", \"PI\", \"IG\"])",
"msg" : "Node roles should be \"None\", \"STAFF\", \"DP\", \"INP\", \"IG\", or \"PI\"."
}
]
15 changes: 15 additions & 0 deletions lib/ns_workflow/setup.py
@@ -0,0 +1,15 @@
from setuptools import setup

setup(name='ns_workflow',
version='0.1',
description='Notary Service Workflow using Neo4j/APOC',
url='https://github.com/RENCI-NRIG/notary-service',
author='Ilya Baldin',
author_email='ibaldin@renci.org',
license='MIT',
packages=['ns_workflow'],
install_requires=[
'neo4j',
'networkx',
],
zip_safe=False)

0 comments on commit a8638fa

Please sign in to comment.