Skip to content

Commit

Permalink
Merged master and fixed conflict in datafixtures
Browse files Browse the repository at this point in the history
  • Loading branch information
sbreiff committed Jul 9, 2020
2 parents 6dcf246 + ce85659 commit 2bbb948
Show file tree
Hide file tree
Showing 79 changed files with 2,125 additions and 868 deletions.
21 changes: 15 additions & 6 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,27 +19,36 @@ addons:
- make
- graphviz
- nginx
postgresql: '11.1'
env:
global:
- PGPORT=5433
- BOTO_CONFIG=/bogus
- PATH="/usr/share/elasticsearch/bin:/usr/lib/postgresql/9.4/bin:$PATH"
- PATH="/usr/share/elasticsearch/bin:/usr/lib/postgresql/11/bin:$PATH"
- ELASTIC_BEANSTALK_LABEL=$TRAVIS_COMMIT
- USER="4dn-dcic"
- SNO_REPO="snovault"
before_install:
- ls -dal /usr/lib/postgresql/*/bin/postgres
- find /usr/lib/postgresql -name 'postgres' -print
- ps auxww | grep postgres
- sudo apt-get install -yq --no-install-suggests --no-install-recommends postgresql-common
- sudo service postgresql stop
- sudo apt install -yq --no-install-suggests --no-install-recommends postgresql-11 postgresql-client-11
- sudo service postgresql status
- sudo service postgresql start 11
- sudo service postgresql status
- python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)"
- echo $tibanna_deploy
- postgres --version
- initdb --version
- ls -dal /usr/lib/postgresql/*/bin/postgres
- find /usr/lib/postgresql -name 'postgres' -print
- ps auxww | grep postgres
- nvm install 10 || (echo "Retrying nvm install" && sleep 5 && nvm install 10)
- node --version
- npm config set python /usr/bin/python2.7
install:
# need to manually change the version of six used by Travis for some reason
- pip uninstall -y six
- pip install six==1.11.0
- pip install --upgrade pip==19.0.3
- pip install --upgrade pip
- pip install poetry
- pip install coveralls
- pip install codacy-coverage
Expand Down
12 changes: 12 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,19 @@ deploy2: # spins up waittress to serve the application
deploy3: # uploads: GeneAnnotationFields, then Genes, then AnnotationFields, then Variant + VariantSamples
python src/encoded/commands/ingestion.py src/encoded/annotations/variant_table_v0.4.6.csv src/encoded/schemas/annotation_field.json src/encoded/schemas/variant.json src/encoded/schemas/variant_sample.json src/encoded/annotations/vcf_v0.4.6.vcf hms-dbmi hms-dbmi src/encoded/annotations/gene_table_v0.4.5.csv src/encoded/schemas/gene_annotation_field.json src/encoded/schemas/gene.json src/encoded/annotations/gene_inserts_v0.4.5.json hms-dbmi hms-dbmi development.ini --post-variant-consequences --post-variants --post-gene-annotation-field-inserts --post-gene-inserts --app-name app

psql-dev: # starts psql with the url after 'sqlalchemy.url =' in development.ini
@psql `grep 'sqlalchemy[.]url =' development.ini | sed -E 's/^.* = (.*)/\1/'`

kibana-start:
scripts/kibana-start

kibana-stop:
scripts/kibana-stop

kill: # kills back-end processes associated with the application. Use with care.
pkill -f postgres &
pkill -f elasticsearch &
pkill -f moto_server &

clean-python:
@echo -n "Are you sure? This will wipe all libraries installed on this virtualenv [y/N] " && read ans && [ $${ans:-N} = y ]
Expand Down Expand Up @@ -103,6 +113,8 @@ info:
$(info - Use 'make deploy1' to spin up postgres/elasticsearch and load inserts.)
$(info - Use 'make deploy2' to spin up the application server.)
$(info - Use 'make deploy3' to load variants and genes.)
$(info - Use 'make psql-dev' to start psql on data associated with an active 'make deploy1'.)
$(info - Use 'make kibana-start' to start kibana, and 'make kibana-stop' to stop it.)
$(info - Use 'make kill' to kill postgres and elasticsearch proccesses. Please use with care.)
$(info - Use 'make moto-setup' to install moto, for less flaky tests. Implied by 'make build'.)
$(info - Use 'make npm-setup' to build the front-end. Implied by 'make build'.)
Expand Down
2 changes: 1 addition & 1 deletion examples/s3cp.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
r = requests.get(encode_url, auth=(AUTHID,AUTHPW), headers=HEADERS, allow_redirects=True, stream=True)
try:
r.raise_for_status
except:
except Exception:
print '%s href does not resolve' %(f_obj.get('accession'))
sys.exit()

Expand Down
2 changes: 1 addition & 1 deletion examples/submit_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@
)
try:
r.raise_for_status()
except:
except Exception:
print('Submission failed: %s %s' % (r.status_code, r.reason))
print(r.text)
raise
Expand Down
113 changes: 59 additions & 54 deletions poetry.lock

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[tool.poetry]
# Note: Various modules refer to this system as "encoded", not "cgap-portal".
name = "encoded"
version = "2.1.1"
version = "2.1.9"
description = "Clinical Genomics Analysis Platform"
authors = ["4DN-DCIC Team <support@4dnucleome.org>"]
license = "MIT"
Expand Down Expand Up @@ -115,7 +115,7 @@ xlwt = "1.2.0"
"zope.sqlalchemy" = "^1.2"

[tool.poetry.dev-dependencies]
coverage = "4.0.3"
coverage = ">=5.1"
# flake8 = "3.7.8"
flaky = "3.6.1"
# flask only for moto[server]
Expand Down
33 changes: 33 additions & 0 deletions scripts/kibana-start
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
docker --version

if [ $? -ne 0 ]; then
echo "Docker is not installed."
open "https://docs.docker.com/docker-for-mac/install/" &
exit 1
fi

existing_network=`docker network ls | grep localnet`

if [ -z "${existing_network}" ]; then
docker network create localnet --driver=bridge
else
echo "docker localnet is already set up. From 'docker network':"
echo " ${existing_network}"
fi

existing_kibana=`docker ps | egrep 'kibana:[0-9]*.[0-9]+.*[ ].*'`

if [ -z "${existing_kibana}" ]; then

docker run -d --network localnet -p 5601:5601 -e ELASTICSEARCH_URL=http://host.docker.internal:9200 kibana:5.6.16

else
echo "Kibana is already running. From 'docker ps':"
echo " ${existing_kibana}"
fi

local_kibana_url="http://localhost:5601/app/kibana#/dev_tools/console?_g=()"

echo "Opening kibana in browser at '${local_kibana_url}'..."
open "${local_kibana_url}" &

10 changes: 10 additions & 0 deletions scripts/kibana-stop
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
container=`docker ps | grep kibana | cut -f 1 -d ' '`

if [ -z "${container}" ]; then
echo "kibana is not running."
exit 1
else
echo "Stopping kibana..."
docker stop "${container}"
echo "Stopped."
fi
2 changes: 1 addition & 1 deletion scripts/submit_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@
)
try:
r.raise_for_status()
except:
except Exception:
print('Submission failed: %s %s' % (r.status_code, r.reason))
print(r.text)
raise
Expand Down
4 changes: 2 additions & 2 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
[flake8]
max-line-length = 100
max-line-length = 160
[coverage:run]
branch = True
omit =
omit =
*/encode_schemas/*
*/encode_types/*
*/tests/*
Expand Down
2 changes: 1 addition & 1 deletion src/encoded/commands/add_date_created.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def run(testapp, collections=None, exclude=None, dry_run=False):

def main():
import argparse
parser = argparse.ArgumentParser(
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is specified wrong here.
description="Fix date_created", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
Expand Down
16 changes: 7 additions & 9 deletions src/encoded/commands/check_rendering.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

logger = logging.getLogger(__name__)


def check_path(testapp, path):
try:
res = testapp.get(path, status='*').maybe_follow(status='*')
Expand Down Expand Up @@ -49,16 +50,15 @@ def run(testapp, collections=None):
collection_path = resource_path(collection, '')
check_path(testapp, collection_path)
failed = 0
count = -1 # PyCharm worries the 'count' variable won't get set in the next loop if the collection is empty.
for count, item in enumerate(itervalues(collection)):
path = resource_path(item, '')
if not check_path(testapp, path):
failed += 1
if failed:
logger.info('Collection %s: %d of %d failed to render.',
collection_path, failed, count)
logger.info('Collection %s: %d of %d failed to render.', collection_path, failed, count)
else:
logger.info('Collection %s: all %d rendered ok',
collection_path, count)
logger.info('Collection %s: all %d rendered ok', collection_path, count)


def internal_app(configfile, app_name=None, username='TEST', accept='text/html'):
Expand All @@ -74,14 +74,13 @@ def internal_app(configfile, app_name=None, username='TEST', accept='text/html')

def main():
import argparse
parser = argparse.ArgumentParser(
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is specified wrong here.
description="Check rendering of items", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('--username', '-u', default='TEST',
help="User uuid/email")
parser.add_argument('--username', '-u', default='TEST', help="User uuid/email")
parser.add_argument('config_uri', help="path to configfile")
parser.add_argument('path', nargs='*', help="path to test")
args = parser.parse_args()
Expand All @@ -97,8 +96,7 @@ def main():
if not check_path(testapp, path):
failed += 1
if failed:
logger.info('Paths: %d of %d failed to render.',
failed, len(args.path))
logger.info('Paths: %d of %d failed to render.', failed, len(args.path))
else:
logger.info('Paths: all %d rendered ok', len(args.path))
else:
Expand Down
2 changes: 1 addition & 1 deletion src/encoded/commands/clear_variants_and_genes.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def main():
""" Wipes the variant + gene items in appropriate order """
logging.basicConfig()

parser = argparse.ArgumentParser(
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is invalid
description='Clear an item type out of metadata storage',
epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter
Expand Down
2 changes: 1 addition & 1 deletion src/encoded/commands/configure_kibana_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def main():
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.INFO)

parser = argparse.ArgumentParser(
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is invalid
description="Configure Kibana Index", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
Expand Down
18 changes: 10 additions & 8 deletions src/encoded/commands/create_mapping_on_deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from pyramid.paster import get_app
from snovault.elasticsearch.create_mapping import run as run_create_mapping
from dcicutils.log_utils import set_logging
from dcicutils.beanstalk_utils import whodaman
# from dcicutils.beanstalk_utils import whodaman

log = structlog.getLogger(__name__)
EPILOG = __doc__
Expand All @@ -19,13 +19,14 @@
'AccessKey',
'Cohort',
'Family',
'FilterSet',
'Nexus',
'User',
'Workflow',
'WorkflowMapping',
'WorkflowRun',
'WorkflowRunAwsem',
'VariantConsequence',
'Disorder',
'FileFormat',
'FileFastq',
'FileProcessed',
Expand All @@ -36,7 +37,6 @@
'Disorder',
'Individual',
'Case',
'FilterSet',
'Report',
'Document',
'QualityMetricBamcheck',
Expand All @@ -46,12 +46,12 @@
'QualityMetricCmphet',
'QualityMetricWorkflowrun',
'QualityMetricVcfcheck',
'QualityMetricVcfqc',
'TrackingItem',
'Software',
'Sample',
'SampleProcessing',
'StaticSection',
'Document',
'Page',
'AnnotationField',
'Variant',
Expand Down Expand Up @@ -81,7 +81,7 @@

BEANSTALK_PROD_ENVS = [
ENV_WEBPROD,
# ENV_WEBPROD2,
# ENV_WEBPROD2,
]


Expand Down Expand Up @@ -155,15 +155,16 @@ def _run_create_mapping(app, args):
if args.wipe_es: # override deploy_cfg WIPE_ES option
log.info('Overriding deploy_cfg and wiping ES')
deploy_cfg['WIPE_ES'] = True
run_create_mapping(app, check_first=(not deploy_cfg['WIPE_ES']), purge_queue=args.clear_queue, item_order=ITEM_INDEX_ORDER)
run_create_mapping(app, check_first=(not deploy_cfg['WIPE_ES']), purge_queue=args.clear_queue,
item_order=ITEM_INDEX_ORDER)
except Exception as e:
log.error("Exception encountered while gathering deployment information or running create_mapping")
log.error(str(e))
exit(1)


def main():
parser = argparse.ArgumentParser(
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is invalid
description="Create Elasticsearch mapping on deployment", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
Expand All @@ -176,7 +177,8 @@ def main():
app = get_app(args.config_uri, args.app_name)
# Loading app will have configured from config file. Reconfigure here:
set_logging(in_prod=app.registry.settings.get('production'), log_name=__name__, level=logging.DEBUG)
# set_logging(app.registry.settings.get('elasticsearch.server'), app.registry.settings.get('production'), level=logging.DEBUG)
# set_logging(app.registry.settings.get('elasticsearch.server'), app.registry.settings.get('production'),
# level=logging.DEBUG)

_run_create_mapping(app, args)
exit(0)
Expand Down
2 changes: 1 addition & 1 deletion src/encoded/commands/export_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def perform_request(uri, attempt = 1):

def main():
import argparse
parser = argparse.ArgumentParser(
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is invalid
description="Export Data", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
Expand Down
14 changes: 8 additions & 6 deletions src/encoded/commands/extract_test_data.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import argparse
import csv
import loremipsum
import os
import random
import re
import sys
import traceback

from ..loadxl import *


Expand Down Expand Up @@ -128,7 +133,6 @@ def run(pipeline, inpath, outpath):


def main():
import argparse
parser = argparse.ArgumentParser(description='Extract test data set.')
parser.add_argument('--anonymize', '-a', action="store_true",
help="anonymize the data.")
Expand All @@ -138,15 +142,13 @@ def main():
help="directory to write filtered tsv files to.")
args = parser.parse_args()
pipeline = anon_pipeline() if args.anonymize else extract_pipeline()
import pdb
import sys
import traceback
try:
run(pipeline, args.inpath, args.outpath)
except:
except Exception:
type, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
# import pdb; pdb.post_mortem(tb)


if __name__ == '__main__':
main()
2 changes: 1 addition & 1 deletion src/encoded/commands/gene_ingestion.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def main():
--app-name app --post-annotation-field-inserts --post-inserts
"""
logging.basicConfig()
parser = argparse.ArgumentParser(
parser = argparse.ArgumentParser( # noqa - PyCharm wrongly thinks the formatter_class is invalid
description="Takes in a gene mapping table and produces inserts/schemas",
epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter
Expand Down
Loading

0 comments on commit 2bbb948

Please sign in to comment.