diff --git a/.travis.yml b/.travis.yml
index 5a191929f..9f807e2fe 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -30,7 +30,15 @@ install:
- pip install sphinx sphinx-bootstrap-theme coveralls 'ipython[all]==2.4.1'
- travis_retry pip install . --process-dependency-links
- 'echo "backend: Agg" > matplotlibrc'
-script:
+ # Install the biom plugin so we can run the analysis tests
+ - pip install https://github.com/qiita-spots/qiita_client/archive/master.zip
+ - pip install https://github.com/qiita-spots/qtp-biom/archive/master.zip --process-dependency-links
+ - export QIITA_SERVER_CERT=`pwd`/qiita_core/support_files/server.crt
+ - mkdir ~/.qiita_plugins
+ - cp $PWD/qiita_core/support_files/BIOM\ type_2.1.4.conf ~/.qiita_plugins
+before_script:
+ # Some of the tests rely on the plugin system to complete successfuly.
+ # Thus, we need a qiita webserver running to be able to execute the tests.
- export MOI_CONFIG_FP=`pwd`/qiita_core/support_files/config_test.cfg
- if [ ${TRAVIS_PULL_REQUEST} == "false" ]; then
export QIITA_CONFIG_FP=`pwd`/qiita_core/support_files/config_test_travis.cfg;
@@ -39,6 +47,12 @@ script:
- ipython profile create qiita-general --parallel
- qiita-env start_cluster qiita-general
- qiita-env make --no-load-ontologies
+ - |
+ if [ ${TEST_ADD_STUDIES} == "False" ]; then
+ qiita pet webserver --no-build-docs start &
+ fi
+script:
+ - sleep 5
- if [ ${TEST_ADD_STUDIES} == "True" ]; then test_data_studies/commands.sh ; fi
- if [ ${TEST_ADD_STUDIES} == "True" ]; then qiita-cron-job ; fi
- if [ ${TEST_ADD_STUDIES} == "False" ]; then qiita-test-install ; fi
diff --git a/qiita_core/support_files/BIOM type_2.1.4.conf b/qiita_core/support_files/BIOM type_2.1.4.conf
new file mode 100644
index 000000000..d3eb040b4
--- /dev/null
+++ b/qiita_core/support_files/BIOM type_2.1.4.conf
@@ -0,0 +1,13 @@
+[main]
+NAME = BIOM type
+VERSION = 2.1.4
+DESCRIPTION = The Biological Observation Matrix format
+ENVIRONMENT_SCRIPT = source activate qtp-biom
+START_SCRIPT = start_biom
+PLUGIN_TYPE = artifact definition
+PUBLICATIONS =
+
+[oauth2]
+SERVER_CERT = /home/travis/miniconda3/envs/qiita/lib/python2.7/site-packages/qiita_core/support_files/server.crt
+CLIENT_ID = dHgaXDwq665ksFPqfIoD3Jt8KRXdSioTRa4lGa5mGDnz6JTIBf
+CLIENT_SECRET = xqx61SD4M2EWbaS0WYv3H1nIemkvEAMIn16XMLjy5rTCqi7opCcWbfLINEwtV48bQ
diff --git a/qiita_core/support_files/config_test.cfg b/qiita_core/support_files/config_test.cfg
index 0e91b9176..8809c6f2d 100644
--- a/qiita_core/support_files/config_test.cfg
+++ b/qiita_core/support_files/config_test.cfg
@@ -23,7 +23,7 @@ LOG_DIR =
REQUIRE_APPROVAL = True
# Base URL: DO NOT ADD TRAILING SLASH
-BASE_URL = https://localhost
+BASE_URL = https://localhost:21174
# Download path files
UPLOAD_DATA_DIR = /tmp/
@@ -35,7 +35,7 @@ WORKING_DIR = /tmp/
MAX_UPLOAD_SIZE = 100
# Path to the base directory where the data files are going to be stored
-BASE_DATA_DIR =
+BASE_DATA_DIR = /home/travis/miniconda3/envs/qiita/lib/python2.7/site-packages/qiita_db/support_files/test_data/
# Valid upload extension, comma separated. Empty for no uploads
VALID_UPLOAD_EXTENSION = fastq,fastq.gz,txt,tsv,sff,fna,qual
diff --git a/qiita_db/artifact.py b/qiita_db/artifact.py
index 717579d72..3113a4bd7 100644
--- a/qiita_db/artifact.py
+++ b/qiita_db/artifact.py
@@ -311,8 +311,7 @@ def create(cls, filepaths, artifact_type, name=None, prep_template=None,
# them execute a set of common operations. Declare functions to avoid
# code duplication. These functions should not be used outside of the
# create function, hence declaring them here
- def _common_creation_steps(atype, cmd_id, data_type, cmd_parameters,
- fps, mv_files):
+ def _common_creation_steps(atype, cmd_id, data_type, cmd_parameters):
gen_timestamp = datetime.now()
visibility_id = qdb.util.convert_to_id("sandbox", "visibility")
atype_id = qdb.util.convert_to_id(atype, "artifact_type")
@@ -328,15 +327,6 @@ def _common_creation_steps(atype, cmd_id, data_type, cmd_parameters,
cmd_parameters, visibility_id, atype_id, False]
qdb.sql_connection.TRN.add(sql, sql_args)
a_id = qdb.sql_connection.TRN.execute_fetchlast()
- # Associate the artifact with its filepaths
- fp_ids = qdb.util.insert_filepaths(
- fps, a_id, atype, "filepath",
- move_files=mv_files, copy=(not mv_files))
- sql = """INSERT INTO qiita.artifact_filepath
- (artifact_id, filepath_id)
- VALUES (%s, %s)"""
- sql_args = [[a_id, fp_id] for fp_id in fp_ids]
- qdb.sql_connection.TRN.add(sql, sql_args, many=True)
qdb.sql_connection.TRN.execute()
return cls(a_id)
@@ -398,8 +388,7 @@ def _associate_with_analysis(instance, analysis_id):
instance = _common_creation_steps(
artifact_type, processing_parameters.command.id,
- dtypes.pop(), processing_parameters.dump(), filepaths,
- move_files)
+ dtypes.pop(), processing_parameters.dump())
_associate_with_study(instance, study_id)
else:
@@ -411,8 +400,7 @@ def _associate_with_analysis(instance, analysis_id):
if len(dtypes) > 1 else dtypes.pop())
instance = _common_creation_steps(
artifact_type, processing_parameters.command.id,
- data_type, processing_parameters.dump(), filepaths,
- move_files)
+ data_type, processing_parameters.dump())
_associate_with_analysis(instance, analysis_id)
# Associate the artifact with its parents
@@ -436,8 +424,7 @@ def _associate_with_analysis(instance, analysis_id):
# This artifact is uploaded by the user in the
# processing pipeline
instance = _common_creation_steps(
- artifact_type, None, prep_template.data_type(), None,
- filepaths, move_files)
+ artifact_type, None, prep_template.data_type(), None)
# Associate the artifact with the prep template
prep_template.artifact = instance
# Associate the artifact with the study
@@ -445,11 +432,20 @@ def _associate_with_analysis(instance, analysis_id):
else:
# This artifact is an initial artifact of an analysis
instance = _common_creation_steps(
- artifact_type, None, data_type, None, filepaths,
- move_files)
+ artifact_type, None, data_type, None)
# Associate the artifact with the analysis
analysis.add_artifact(instance)
+ # Associate the artifact with its filepaths
+ fp_ids = qdb.util.insert_filepaths(
+ filepaths, instance.id, artifact_type, "filepath",
+ move_files=move_files, copy=(not move_files))
+ sql = """INSERT INTO qiita.artifact_filepath
+ (artifact_id, filepath_id)
+ VALUES (%s, %s)"""
+ sql_args = [[instance.id, fp_id] for fp_id in fp_ids]
+ qdb.sql_connection.TRN.add(sql, sql_args, many=True)
+
if name:
instance.name = name
diff --git a/qiita_db/handlers/tests/test_plugin.py b/qiita_db/handlers/tests/test_plugin.py
index ecda5b62b..0fb108377 100644
--- a/qiita_db/handlers/tests/test_plugin.py
+++ b/qiita_db/handlers/tests/test_plugin.py
@@ -56,7 +56,9 @@ def test_get(self):
'for performing microbiome analysis from raw DNA '
'sequencing data',
'commands': ['Split libraries FASTQ', 'Split libraries',
- 'Pick closed-reference OTUs'],
+ 'Pick closed-reference OTUs', 'Summarize Taxa',
+ 'Beta Diversity', 'Alpha Rarefaction',
+ 'Single Rarefaction'],
'publications': [{'DOI': '10.1038/nmeth.f.303',
'PubMed': '20383131'}],
'default_workflows': ['FASTQ upstream workflow',
diff --git a/qiita_db/metadata_template/test/test_prep_template.py b/qiita_db/metadata_template/test/test_prep_template.py
index 05caf8af9..82d72a79a 100644
--- a/qiita_db/metadata_template/test/test_prep_template.py
+++ b/qiita_db/metadata_template/test/test_prep_template.py
@@ -930,7 +930,6 @@ def test_create_data_type_id(self):
def test_create_warning(self):
"""Warns if a required columns is missing for a given functionality
"""
- fp_count = qdb.util.get_count("qiita.filepath")
del self.metadata['barcode']
pt = npt.assert_warns(
qdb.exceptions.QiitaDBWarning,
diff --git a/qiita_db/test/test_analysis.py b/qiita_db/test/test_analysis.py
index a83f40a5b..ca300af79 100644
--- a/qiita_db/test/test_analysis.py
+++ b/qiita_db/test/test_analysis.py
@@ -66,6 +66,8 @@ def tearDown(self):
def _wait_for_jobs(self, analysis):
for j in analysis.jobs:
wait_for_processing_job(j.id)
+ if j.status == 'error':
+ print j.log.msg
def _create_analyses_with_samples(self, user='demo@microbio.me',
merge=False):
@@ -129,21 +131,11 @@ def test_can_be_publicized(self):
analysis = qdb.analysis.Analysis(1)
self.assertFalse(analysis.can_be_publicized)
a4 = qdb.artifact.Artifact(4)
- a5 = qdb.artifact.Artifact(5)
- a6 = qdb.artifact.Artifact(6)
a4.visibility = 'public'
- self.assertFalse(analysis.can_be_publicized)
-
- a5.visibility = 'public'
- self.assertFalse(analysis.can_be_publicized)
-
- a6.visibility = 'public'
self.assertTrue(analysis.can_be_publicized)
a4.visibility = 'private'
- a5.visibility = 'private'
- a6.visibility = 'private'
self.assertFalse(analysis.can_be_publicized)
def test_add_artifact(self):
diff --git a/qiita_db/test/test_setup.py b/qiita_db/test/test_setup.py
index fc1f305d5..c85aafa1d 100644
--- a/qiita_db/test/test_setup.py
+++ b/qiita_db/test/test_setup.py
@@ -36,7 +36,7 @@ def test_study_experimental_factor(self):
self.assertEqual(get_count("qiita.study_experimental_factor"), 1)
def test_filepath(self):
- self.assertEqual(get_count("qiita.filepath"), 26)
+ self.assertEqual(get_count("qiita.filepath"), 25)
def test_filepath_type(self):
self.assertEqual(get_count("qiita.filepath_type"), 21)
@@ -66,7 +66,7 @@ def test_analysis(self):
self.assertEqual(get_count("qiita.analysis"), 10)
def test_analysis_filepath(self):
- self.assertEqual(get_count("qiita.analysis_filepath"), 2)
+ self.assertEqual(get_count("qiita.analysis_filepath"), 1)
def test_analysis_sample(self):
self.assertEqual(get_count("qiita.analysis_sample"), 31)
diff --git a/qiita_pet/handlers/analysis_handlers.py b/qiita_pet/handlers/analysis_handlers.py
deleted file mode 100644
index 775ff27f2..000000000
--- a/qiita_pet/handlers/analysis_handlers.py
+++ /dev/null
@@ -1,418 +0,0 @@
-r"""
-Qiita analysis handlers for the Tornado webserver.
-
-"""
-# -----------------------------------------------------------------------------
-# Copyright (c) 2014--, The Qiita Development Team.
-#
-# Distributed under the terms of the BSD 3-clause License.
-#
-# The full license is in the file LICENSE, distributed with this software.
-# -----------------------------------------------------------------------------
-from __future__ import division
-from future.utils import viewitems
-from collections import defaultdict
-from os.path import join, sep, commonprefix, basename, dirname
-from json import dumps
-from functools import partial
-
-from tornado.web import authenticated, HTTPError, StaticFileHandler
-from tornado.gen import coroutine, Task
-from moi import ctx_default, r_client
-from moi.job import submit
-from moi.group import get_id_from_user, create_info
-
-from qiita_pet.util import is_localhost
-from qiita_pet.handlers.base_handlers import BaseHandler
-from qiita_pet.handlers.util import (
- download_link_or_path, get_shared_links)
-from qiita_pet.exceptions import QiitaPetAuthorizationError
-from qiita_ware.dispatchable import run_analysis
-from qiita_db.analysis import Analysis
-from qiita_db.artifact import Artifact
-from qiita_db.job import Command
-from qiita_db.user import User
-from qiita_db.util import (get_db_files_base_dir, add_message,
- check_access_to_analysis_result,
- filepath_ids_to_rel_paths, get_filepath_id)
-from qiita_db.exceptions import QiitaDBUnknownIDError
-from qiita_db.logger import LogEntry
-from qiita_db.reference import Reference
-from qiita_core.util import execute_as_transaction
-from qiita_core.qiita_settings import qiita_config
-
-SELECT_SAMPLES = 2
-SELECT_COMMANDS = 3
-
-
-def check_analysis_access(user, analysis):
- """Checks whether user has access to an analysis
-
- Parameters
- ----------
- user : User object
- User to check
- analysis : Analysis object
- Analysis to check access for
-
- Raises
- ------
- RuntimeError
- Tried to access analysis that user does not have access to
- """
- if not analysis.has_access(user):
- raise HTTPError(403, "Analysis access denied to %s" % (analysis.id))
-
-
-class SelectCommandsHandler(BaseHandler):
- """Select commands to be executed"""
- @authenticated
- @execute_as_transaction
- def get(self):
- analysis_id = int(self.get_argument('aid'))
- analysis = Analysis(analysis_id)
- check_analysis_access(self.current_user, analysis)
-
- data_types = analysis.data_types
- commands = Command.get_commands_by_datatype()
-
- self.render('select_commands.html',
- commands=commands, data_types=data_types, aid=analysis.id)
-
- @authenticated
- @execute_as_transaction
- def post(self):
- name = self.get_argument('name')
- desc = self.get_argument('description')
- analysis = Analysis.create(self.current_user, name, desc,
- from_default=True)
- # set to third step since this page is third step in workflow
- analysis.step = SELECT_COMMANDS
- data_types = analysis.data_types
- commands = Command.get_commands_by_datatype()
- self.render('select_commands.html',
- commands=commands, data_types=data_types, aid=analysis.id)
-
-
-class AnalysisWaitHandler(BaseHandler):
- @authenticated
- @execute_as_transaction
- def get(self, analysis_id):
- analysis_id = int(analysis_id)
- try:
- analysis = Analysis(analysis_id)
- except QiitaDBUnknownIDError:
- raise HTTPError(404, "Analysis %d does not exist" % analysis_id)
- else:
- check_analysis_access(self.current_user, analysis)
-
- group_id = r_client.hget('analyis-map', analysis_id)
- self.render("analysis_waiting.html",
- group_id=group_id, aname=analysis.name)
-
- @authenticated
- @execute_as_transaction
- def post(self, analysis_id):
- analysis_id = int(analysis_id)
- rarefaction_depth = self.get_argument('rarefaction-depth')
- mdsi = self.get_argument('merge-duplicated-sample-ids', default=False)
- if mdsi == 'on':
- mdsi = True
-
- # convert to integer if rarefaction level given
- if rarefaction_depth:
- rarefaction_depth = int(rarefaction_depth)
- else:
- rarefaction_depth = None
- analysis = Analysis(analysis_id)
- check_analysis_access(self.current_user, analysis)
-
- command_args = self.get_arguments("commands")
- cmd_split = [x.split("#") for x in command_args]
-
- moi_user_id = get_id_from_user(self.current_user.id)
- moi_group = create_info(
- analysis_id, 'group', url='%s/analysis/' % qiita_config.portal_dir,
- parent=moi_user_id, store=True)
- moi_name = ("Creating %s... When finished, please click the 'Success' "
- "link to the right" % analysis.name)
- moi_result_url = '%s/analysis/results/%d' % (qiita_config.portal_dir,
- analysis_id)
-
- submit(ctx_default, moi_group['id'], moi_name,
- moi_result_url, run_analysis, analysis_id, cmd_split,
- rarefaction_depth=rarefaction_depth,
- merge_duplicated_sample_ids=mdsi)
-
- r_client.hset('analyis-map', analysis_id, moi_group['id'])
-
- self.render("analysis_waiting.html",
- group_id=moi_group['id'], aname=analysis.name)
-
-
-class AnalysisResultsHandler(BaseHandler):
- @authenticated
- @execute_as_transaction
- def get(self, analysis_id):
- analysis_id = int(analysis_id.split("/")[0])
- analysis = Analysis(analysis_id)
- check_analysis_access(self.current_user, analysis)
-
- jobres = defaultdict(list)
- for jobject in analysis.jobs:
- results = []
- for res in jobject.results:
- name = basename(res)
- if name.startswith('index'):
- name = basename(dirname(res)).replace('_', ' ')
- results.append((res, name))
- jobres[jobject.datatype].append((jobject.command[0],
- results))
-
- dropped_samples = analysis.dropped_samples
- dropped = defaultdict(list)
- for proc_data_id, samples in viewitems(dropped_samples):
- if not samples:
- continue
- proc_data = Artifact(proc_data_id)
- data_type = proc_data.data_type
- dropped[data_type].append((proc_data.study.title, len(samples),
- ', '.join(samples)))
- share_access = (self.current_user.id in analysis.shared_with or
- self.current_user.id == analysis.owner)
-
- self.render("analysis_results.html", analysis_id=analysis_id,
- jobres=jobres, aname=analysis.name, dropped=dropped,
- basefolder=get_db_files_base_dir(),
- share_access=share_access)
-
- @authenticated
- @execute_as_transaction
- def post(self, analysis_id):
- analysis_id = int(analysis_id.split("/")[0])
- analysis_id_sent = int(self.get_argument('analysis_id'))
- action = self.get_argument('action')
-
- if analysis_id != analysis_id_sent or action != 'delete_analysis':
- raise QiitaPetAuthorizationError(
- self.current_user.id,
- 'analysis/results/%d-delete' % analysis_id)
-
- analysis = Analysis(analysis_id)
- analysis_name = analysis.name
- check_analysis_access(self.current_user, analysis)
-
- try:
- Analysis.delete(analysis_id)
- msg = ("Analysis %s has been deleted." % (
- analysis_name))
- level = "success"
- except Exception as e:
- e = str(e)
- msg = ("Couldn't remove %s analysis: %s" % (
- analysis_name, e))
- level = "danger"
- LogEntry.create('Runtime', "Couldn't remove analysis ID %d: %s" %
- (analysis_id, e))
-
- self.redirect(u"%s/analysis/show/?level=%s&message=%s"
- % (qiita_config.portal_dir, level, msg))
-
-
-class ShowAnalysesHandler(BaseHandler):
- """Shows the user's analyses"""
- @authenticated
- @execute_as_transaction
- def get(self):
- message = self.get_argument('message', '')
- level = self.get_argument('level', '')
- user = self.current_user
-
- analyses = user.shared_analyses | user.private_analyses
-
- is_local_request = is_localhost(self.request.headers['host'])
- gfi = partial(get_filepath_id, 'analysis')
- dlop = partial(download_link_or_path, is_local_request)
- mappings = {}
- bioms = {}
- tgzs = {}
- for analysis in analyses:
- _id = analysis.id
- # getting mapping file
- mapping = analysis.mapping_file
- if mapping is not None:
- mappings[_id] = dlop(mapping, gfi(mapping), 'mapping file')
- else:
- mappings[_id] = ''
- # getting biom tables
- links = [dlop(f, gfi(f), l)
- for l, f in viewitems(analysis.biom_tables)]
- bioms[_id] = '\n'.join(links)
- # getting tgz file
- tgz = analysis.tgz
- if tgz is not None:
- tgzs[_id] = dlop(tgz, gfi(tgz), 'tgz file')
- else:
- tgzs[_id] = ''
-
- self.render("show_analyses.html", analyses=analyses, message=message,
- level=level, is_local_request=is_local_request,
- mappings=mappings, bioms=bioms, tgzs=tgzs)
-
- @authenticated
- @execute_as_transaction
- def post(self):
- analysis_id = int(self.get_argument('analysis_id'))
- analysis = Analysis(analysis_id)
- analysis_name = analysis.name.decode('utf-8')
-
- check_analysis_access(self.current_user, analysis)
-
- try:
- Analysis.delete(analysis_id)
- msg = ("Analysis %s has been deleted." % (
- analysis_name))
- level = "success"
- except Exception as e:
- e = str(e)
- msg = ("Couldn't remove %s analysis: %s" % (
- analysis_name, e))
- level = "danger"
- LogEntry.create('Runtime', "Couldn't remove analysis ID %d: %s" %
- (analysis_id, e))
-
- self.redirect(u"%s/analysis/show/?level=%s&message=%s"
- % (qiita_config.portal_dir, level, msg))
-
-
-class ResultsHandler(StaticFileHandler, BaseHandler):
- @execute_as_transaction
- def validate_absolute_path(self, root, absolute_path):
- """Overrides StaticFileHandler's method to include authentication
- """
- # Get the filename (or the base directory) of the result
- if root[-1] != '/':
- root = "%s/" % root
- len_prefix = len(commonprefix([root, absolute_path]))
- base_requested_fp = absolute_path[len_prefix:].split(sep, 1)[0]
-
- current_user = self.current_user
-
- # If the user is an admin, then allow access
- if current_user.level == 'admin':
- return super(ResultsHandler, self).validate_absolute_path(
- root, absolute_path)
-
- # otherwise, we have to check if they have access to the requested
- # resource
- user_id = current_user.id
- accessible_filepaths = check_access_to_analysis_result(
- user_id, base_requested_fp)
-
- # Turn these filepath IDs into absolute paths
- db_files_base_dir = get_db_files_base_dir()
- relpaths = filepath_ids_to_rel_paths(accessible_filepaths)
-
- accessible_filepaths = {join(db_files_base_dir, relpath)
- for relpath in relpaths.values()}
-
- # check if the requested resource is a file (or is in a directory) that
- # the user has access to
- if join(root, base_requested_fp) in accessible_filepaths:
- return super(ResultsHandler, self).validate_absolute_path(
- root, absolute_path)
- else:
- raise QiitaPetAuthorizationError(user_id, absolute_path)
-
-
-class SelectedSamplesHandler(BaseHandler):
- @authenticated
- @execute_as_transaction
- def get(self):
- # Format sel_data to get study IDs for the processed data
- sel_data = defaultdict(dict)
- proc_data_info = {}
- sel_samps = self.current_user.default_analysis.samples
- for aid, samples in viewitems(sel_samps):
- a = Artifact(aid)
- sel_data[a.study][aid] = samples
- # Also get processed data info
- processing_parameters = a.processing_parameters
- if processing_parameters is None:
- params = None
- algorithm = None
- else:
- cmd = processing_parameters.command
- params = processing_parameters.values
- if 'reference' in params:
- ref = Reference(params['reference'])
- del params['reference']
-
- params['reference_name'] = ref.name
- params['reference_version'] = ref.version
- algorithm = '%s (%s)' % (cmd.software.name, cmd.name)
-
- proc_data_info[aid] = {
- 'processed_date': str(a.timestamp),
- 'algorithm': algorithm,
- 'data_type': a.data_type,
- 'params': params
- }
-
- self.render("analysis_selected.html", sel_data=sel_data,
- proc_info=proc_data_info)
-
-
-class AnalysisSummaryAJAX(BaseHandler):
- @authenticated
- @execute_as_transaction
- def get(self):
- info = self.current_user.default_analysis.summary_data()
- self.write(dumps(info))
-
-
-class ShareAnalysisAJAX(BaseHandler):
- @execute_as_transaction
- def _get_shared_for_study(self, analysis, callback):
- shared_links = get_shared_links(analysis)
- users = [u.email for u in analysis.shared_with]
- callback((users, shared_links))
-
- @execute_as_transaction
- def _share(self, analysis, user, callback):
- user = User(user)
- add_message('Analysis \'%s\' '
- 'has been shared with you.' %
- (qiita_config.portal_dir, analysis.id, analysis.name),
- [user])
- callback(analysis.share(user))
-
- @execute_as_transaction
- def _unshare(self, analysis, user, callback):
- user = User(user)
- add_message('Analysis \'%s\' has been unshared from you.' %
- analysis.name, [user])
- callback(analysis.unshare(user))
-
- @authenticated
- @coroutine
- @execute_as_transaction
- def get(self):
- analysis_id = int(self.get_argument('id'))
- analysis = Analysis(analysis_id)
- if self.current_user != analysis.owner:
- raise HTTPError(403, 'User %s does not have permissions to share '
- 'analysis %s' % (
- self.current_user.id, analysis.id))
-
- selected = self.get_argument('selected', None)
- deselected = self.get_argument('deselected', None)
-
- if selected is not None:
- yield Task(self._share, analysis, selected)
- if deselected is not None:
- yield Task(self._unshare, analysis, deselected)
-
- users, links = yield Task(self._get_shared_for_study, analysis)
-
- self.write(dumps({'users': users, 'links': links}))
diff --git a/qiita_pet/handlers/analysis_handlers/tests/test_base_handlers.py b/qiita_pet/handlers/analysis_handlers/tests/test_base_handlers.py
index 81810a045..f68128e78 100644
--- a/qiita_pet/handlers/analysis_handlers/tests/test_base_handlers.py
+++ b/qiita_pet/handlers/analysis_handlers/tests/test_base_handlers.py
@@ -13,6 +13,7 @@
from moi import r_client
from qiita_core.util import qiita_test_checker
+from qiita_core.testing import wait_for_processing_job
from qiita_db.user import User
from qiita_db.analysis import Analysis
from qiita_pet.test.tornado_test_base import TestHandlerBase
@@ -102,6 +103,13 @@ def test_post_create_analysis_handler(self):
r"http://localhost:\d+/analysis/description/\d+/")
self.assertEqual(response.code, 200)
+ # The new analysis id is located at the -2 position (see regex above)
+ new_id = response.effective_url.split('/')[-2]
+ a = Analysis(new_id)
+ # Make sure that all jobs have completed before we exit this tests
+ for j in a.jobs:
+ wait_for_processing_job(j.id)
+
def test_get_analysis_description_handler(self):
response = self.get('/analysis/description/1/')
self.assertEqual(response.code, 200)
diff --git a/qiita_pet/handlers/analysis_handlers/tests/test_listing_handlers.py b/qiita_pet/handlers/analysis_handlers/tests/test_listing_handlers.py
index f4e5742b5..739333dc9 100644
--- a/qiita_pet/handlers/analysis_handlers/tests/test_listing_handlers.py
+++ b/qiita_pet/handlers/analysis_handlers/tests/test_listing_handlers.py
@@ -28,5 +28,6 @@ def test_get_selected_samples_handler(self):
# Make sure page response loaded sucessfully
self.assertEqual(response.code, 200)
+
if __name__ == '__main__':
main()
diff --git a/qiita_pet/handlers/api_proxy/tests/test_artifact.py b/qiita_pet/handlers/api_proxy/tests/test_artifact.py
index 77a9dd09e..564bdd429 100644
--- a/qiita_pet/handlers/api_proxy/tests/test_artifact.py
+++ b/qiita_pet/handlers/api_proxy/tests/test_artifact.py
@@ -20,7 +20,7 @@
from qiita_db.artifact import Artifact
from qiita_db.metadata_template.prep_template import PrepTemplate
from qiita_db.study import Study
-from qiita_db.util import get_count, get_mountpoint
+from qiita_db.util import get_mountpoint
from qiita_db.processing_job import ProcessingJob
from qiita_db.user import User
from qiita_db.software import Command, Parameters, DefaultParameters
@@ -107,7 +107,11 @@ def test_artifact_types_get_req(self):
['FASTA_Sanger', None],
['FASTQ', None],
['SFF', None],
- ['per_sample_FASTQ', None]]}
+ ['per_sample_FASTQ', None],
+ ['distance_matrix', 'Distance matrix holding pairwise'
+ ' distance between samples'],
+ ['rarefaction_curves', 'Rarefaction curves'],
+ ['taxa_summary', 'Taxa summary plots']]}
self.assertEqual(obs['message'], exp['message'])
self.assertEqual(obs['status'], exp['status'])
@@ -447,7 +451,6 @@ def test_artifact_post_req(self):
pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), Study(1), '16S')
self._files_to_remove.extend([fp for _, fp in pt.get_filepaths()])
- new_artifact_id = get_count('qiita.artifact') + 1
obs = artifact_post_req(
'test@foo.bar', {}, 'Demultiplexed', 'New Test Artifact 2',
pt.id, 3)
@@ -458,7 +461,7 @@ def test_artifact_post_req(self):
wait_for_prep_information_job(pt.id)
# Instantiate the artifact to make sure it was made and
# to clean the environment
- a = Artifact(new_artifact_id)
+ a = Artifact(pt.artifact.id)
self._files_to_remove.extend([fp for _, fp, _ in a.filepaths])
def test_artifact_post_req_error(self):
diff --git a/qiita_pet/handlers/api_proxy/tests/test_prep_template.py b/qiita_pet/handlers/api_proxy/tests/test_prep_template.py
index 48133cdea..c7477b18b 100644
--- a/qiita_pet/handlers/api_proxy/tests/test_prep_template.py
+++ b/qiita_pet/handlers/api_proxy/tests/test_prep_template.py
@@ -52,7 +52,7 @@ def test_new_prep_template_get_req(self):
'status': 'success',
'prep_files': ['uploaded_file.txt'],
'data_types': ['16S', '18S', 'ITS', 'Metabolomic', 'Metagenomic',
- 'Proteomic'],
+ 'Multiomic', 'Proteomic'],
'ontology': {
'ENA': ['Cancer Genomics', 'Epigenetics', 'Exome Sequencing',
'Forensic or Paleo-genomics', 'Gene Regulation Study',
diff --git a/qiita_pet/handlers/artifact_handlers/tests/test_base_handlers.py b/qiita_pet/handlers/artifact_handlers/tests/test_base_handlers.py
index 337b02614..da3beaeee 100644
--- a/qiita_pet/handlers/artifact_handlers/tests/test_base_handlers.py
+++ b/qiita_pet/handlers/artifact_handlers/tests/test_base_handlers.py
@@ -10,11 +10,14 @@
from tempfile import mkstemp
from os import close, remove
from os.path import basename, exists, relpath
+from json import loads
from tornado.web import HTTPError
+from moi import r_client
+from qiita_core.qiita_settings import qiita_config
+from qiita_core.testing import wait_for_prep_information_job
from qiita_core.util import qiita_test_checker
-from qiita_db.util import get_db_files_base_dir
from qiita_db.user import User
from qiita_db.artifact import Artifact
from qiita_db.processing_job import ProcessingJob
@@ -23,13 +26,15 @@
from qiita_pet.test.tornado_test_base import TestHandlerBase
from qiita_pet.handlers.artifact_handlers.base_handlers import (
check_artifact_access, artifact_summary_get_request,
- artifact_summary_post_request, artifact_patch_request)
+ artifact_summary_post_request, artifact_patch_request,
+ artifact_post_req)
@qiita_test_checker()
class TestBaseHandlersUtils(TestCase):
def setUp(self):
self._files_to_remove = []
+ self.maxDiff = None
def tearDown(self):
for fp in self._files_to_remove:
@@ -63,6 +68,16 @@ def test_check_artifact_access(self):
a.visibility = 'public'
check_artifact_access(demo_u, a)
+ def _assert_summary_equal(self, obs, exp):
+ "Utility function for testing the artifact summary get request"
+ obs_files = obs.pop('files')
+ exp_files = exp.pop('files')
+ self.assertItemsEqual(obs_files, exp_files)
+ obs_jobs = obs.pop('processing_jobs')
+ exp_jobs = obs.pop('processing_jobs')
+ self.assertItemsEqual(obs_jobs, exp_jobs)
+ self.assertEqual(obs, exp)
+
def test_artifact_summary_get_request(self):
user = User('test@foo.bar')
# Artifact w/o summary
@@ -136,7 +151,7 @@ def test_artifact_summary_get_request(self):
(a.html_summary_fp[0],
'%s (html summary)' % basename(a.html_summary_fp[1])))
exp_summary_path = relpath(
- a.html_summary_fp[1], get_db_files_base_dir())
+ a.html_summary_fp[1], qiita_config.base_data_dir)
obs = artifact_summary_get_request(user, 1)
exp = {'name': 'Raw data 1',
'artifact_id': 1,
@@ -181,7 +196,7 @@ def test_artifact_summary_get_request(self):
self.assertEqual(obs, exp)
# returnig to private
- a.visibility = 'sandbox'
+ a.visibility = 'private'
# admin gets buttons
obs = artifact_summary_get_request(User('admin@foo.bar'), 2)
@@ -253,6 +268,18 @@ def test_artifact_summary_post_request(self):
exp = {'job': [job.id, 'queued', None]}
self.assertEqual(obs, exp)
+ def test_artifact_post_request(self):
+ # No access
+ with self.assertRaises(QiitaHTTPError):
+ artifact_post_req(User('demo@microbio.me'), 1)
+
+ artifact_post_req(User('test@foo.bar'), 2)
+ # Wait until the job is completed
+ wait_for_prep_information_job(1)
+ # Check that the delete function has been actually called
+ obs = r_client.get(loads(r_client.get('prep_template_1'))['job_id'])
+ self.assertIn('Cannot delete artifact 2', obs)
+
def test_artifact_patch_request(self):
a = Artifact(1)
test_user = User('test@foo.bar')
@@ -326,6 +353,11 @@ def test_get_artifact_summary_ajax_handler(self):
response = self.get('/artifact/1/summary/')
self.assertEqual(response.code, 200)
+ def test_post_artifact_ajax_handler(self):
+ response = self.post('/artifact/2/', {})
+ self.assertEqual(response.code, 200)
+ wait_for_prep_information_job(1)
+
def test_patch_artifact_ajax_handler(self):
a = Artifact(1)
self.assertEqual(a.name, 'Raw data 1')
@@ -346,7 +378,7 @@ def test_get_artifact_summary_handler(self):
a.html_summary_fp = fp
self._files_to_remove.extend([fp, a.html_summary_fp[1]])
- summary = relpath(a.html_summary_fp[1], get_db_files_base_dir())
+ summary = relpath(a.html_summary_fp[1], qiita_config.base_data_dir)
response = self.get('/artifact/html_summary/%s' % summary)
self.assertEqual(response.code, 200)
self.assertEqual(response.body, 'HTML TEST - not important\n')
diff --git a/qiita_pet/handlers/artifact_handlers/tests/test_process_handlers.py b/qiita_pet/handlers/artifact_handlers/tests/test_process_handlers.py
index 088a03dfa..6337f35cd 100644
--- a/qiita_pet/handlers/artifact_handlers/tests/test_process_handlers.py
+++ b/qiita_pet/handlers/artifact_handlers/tests/test_process_handlers.py
@@ -49,5 +49,6 @@ def test_get_process_artifact_handler(self):
self.assertNotEqual(response.body, "")
self.assertIn('load_artifact_type(params.nodes, true);', response.body)
+
if __name__ == '__main__':
main()
diff --git a/qiita_pet/handlers/study_handlers/__init__.py b/qiita_pet/handlers/study_handlers/__init__.py
index 423d6a66f..0cd6011d3 100644
--- a/qiita_pet/handlers/study_handlers/__init__.py
+++ b/qiita_pet/handlers/study_handlers/__init__.py
@@ -21,7 +21,7 @@
ListOptionsHandler, WorkflowHandler,
WorkflowRunHandler, JobAJAX)
from .artifact import (ArtifactGraphAJAX, NewArtifactHandler,
- ArtifactAdminAJAX, ArtifactAJAX, ArtifactSummaryAJAX,
+ ArtifactAdminAJAX, ArtifactSummaryAJAX,
ArtifactGetSamples)
from .sample_template import SampleTemplateAJAX, SampleAJAX
@@ -32,7 +32,7 @@
'StudyBaseInfoAJAX', 'SampleTemplateAJAX', 'PrepTemplateAJAX',
'NewArtifactHandler', 'PrepFilesHandler', 'ProcessArtifactHandler',
'ListCommandsHandler', 'ListOptionsHandler', 'SampleAJAX',
- 'StudyDeleteAjax', 'ArtifactAJAX', 'NewPrepTemplateAjax',
+ 'StudyDeleteAjax', 'NewPrepTemplateAjax',
'DataTypesMenuAJAX', 'StudyFilesAJAX', 'PrepTemplateSummaryAJAX',
'ArtifactSummaryAJAX', 'WorkflowHandler', 'WorkflowRunHandler',
'JobAJAX', 'AutocompleteHandler', 'StudyGetTags', 'StudyTags',
diff --git a/qiita_pet/handlers/study_handlers/artifact.py b/qiita_pet/handlers/study_handlers/artifact.py
index 3f1e3ccfa..9501c65fe 100644
--- a/qiita_pet/handlers/study_handlers/artifact.py
+++ b/qiita_pet/handlers/study_handlers/artifact.py
@@ -14,9 +14,9 @@
from qiita_pet.handlers.base_handlers import BaseHandler
from qiita_pet.handlers.api_proxy import (
artifact_graph_get_req, artifact_types_get_req, artifact_post_req,
- artifact_status_put_req, artifact_get_req, artifact_delete_req,
+ artifact_status_put_req, artifact_get_req,
artifact_summary_get_request, artifact_summary_post_request,
- artifact_patch_request, artifact_get_prep_req)
+ artifact_get_prep_req)
from qiita_core.util import execute_as_transaction
from qiita_core.qiita_settings import qiita_config
@@ -77,36 +77,6 @@ def post(self):
self.write(res)
-class ArtifactAJAX(BaseHandler):
- @authenticated
- def get(self):
- artifact_id = to_int(self.get_argument('artifact_id'))
- name = artifact_get_req(self.current_user.id, artifact_id)['name']
- self.write(name)
-
- @authenticated
- def post(self):
- artifact_id = to_int(self.get_argument('artifact_id'))
- self.write(artifact_delete_req(artifact_id, self.current_user.id))
-
- @authenticated
- def patch(self):
- """Patches a prep template in the system
-
- Follows the JSON PATCH specification:
- https://tools.ietf.org/html/rfc6902
- """
- req_op = self.get_argument('op')
- req_path = self.get_argument('path')
- req_value = self.get_argument('value', None)
- req_from = self.get_argument('from', None)
-
- response = artifact_patch_request(
- self.current_user.id, req_op, req_path, req_value, req_from)
-
- self.write(response)
-
-
class ArtifactGetSamples(BaseHandler):
@authenticated
def get(self):
diff --git a/qiita_pet/handlers/study_handlers/tests/test_artifact.py b/qiita_pet/handlers/study_handlers/tests/test_artifact.py
index aaa9531d3..880699d04 100644
--- a/qiita_pet/handlers/study_handlers/tests/test_artifact.py
+++ b/qiita_pet/handlers/study_handlers/tests/test_artifact.py
@@ -128,18 +128,6 @@ def test_post_artifact(self):
wait_for_prep_information_job(self.prep.id)
-class ArtifactAJAXTests(TestHandlerBase):
-
- def test_delete_artifact(self):
- response = self.post('/artifact/',
- {'artifact_id': 2})
- self.assertEqual(response.code, 200)
- # This is needed so the clean up works - this is a distributed system
- # so we need to make sure that all processes are done before we reset
- # the test database
- wait_for_prep_information_job(1)
-
-
class ArtifactGetSamplesTest(TestHandlerBase):
def test_get(self):
response = self.get('/artifact/samples/', {'ids[]': [4, 5]})
diff --git a/qiita_pet/test/test_analysis_handlers.py b/qiita_pet/test/test_analysis_handlers.py
deleted file mode 100644
index 8484e3335..000000000
--- a/qiita_pet/test/test_analysis_handlers.py
+++ /dev/null
@@ -1,132 +0,0 @@
-from unittest import main
-from json import loads
-
-from qiita_pet.test.tornado_test_base import TestHandlerBase
-from qiita_db.analysis import Analysis
-from qiita_db.user import User
-from qiita_db.util import get_count
-
-
-class TestSelectCommandsHandler(TestHandlerBase):
-
- def test_get(self):
- response = self.get('/analysis/3', {'aid': 1})
- # Make sure page response loaded sucessfully
- self.assertEqual(response.code, 200)
-
- def test_post(self):
- new_aid = get_count('qiita.analysis') + 1
- post_args = {
- 'name': 'post-test',
- 'description': "test of posting"}
- response = self.post('/analysis/3', post_args)
- # Make sure page response loaded sucessfully
- self.assertEqual(response.code, 200)
- # make sure analysis created
- analysis = Analysis(new_aid)
- self.assertEqual(analysis.name, 'post-test')
-
-
-class TestAnalysisWaitHandler(TestHandlerBase):
-
- def test_get_exists(self):
- response = self.get('/analysis/wait/1')
- # Make sure page response loaded sucessfully
- self.assertEqual(response.code, 200)
-
- def test_get_no_exists(self):
- response = self.get('/analysis/wait/237')
- # Make sure page response loaded with 404, not 500
- self.assertEqual(response.code, 404)
-
- def test_post(self):
- post_args = {
- 'rarefaction-depth': 100,
- 'commands': ['16S#command']
- }
- response = self.post('/analysis/wait/1', post_args)
- # Make sure page response loaded sucessfully
- self.assertEqual(response.code, 200)
-
- def test_post_other_params(self):
- post_args = {
- 'rarefaction-depth': '',
- 'merge-duplicated-sample-ids': 'on',
- 'commands': ['16S#command']
- }
- response = self.post('/analysis/wait/1', post_args)
- # Make sure page response loaded sucessfully
- self.assertEqual(response.code, 200)
-
-
-class TestAnalysisResultsHandler(TestHandlerBase):
-
- def test_get(self):
- # TODO: add proper test for this once figure out how. Issue 567
- # need to figure out biom table to test this with
- pass
-
-
-class TestShowAnalysesHandler(TestHandlerBase):
- def test_get(self):
- response = self.get('/analysis/show/')
- # Make sure page response loaded sucessfully
- self.assertEqual(response.code, 200)
-
-
-class TestSelectedSamplesHandler(TestHandlerBase):
- def test_get(self):
- response = self.get('/analysis/selected/')
- # Make sure page response loaded sucessfully
- self.assertEqual(response.code, 200)
-
-
-class TestShareAnalysisAjax(TestHandlerBase):
-
- def test_get_deselected(self):
- a = Analysis(1)
- u = User('shared@foo.bar')
- args = {'deselected': u.id, 'id': a.id}
- self.assertEqual(a.shared_with, [u])
- response = self.get('/analysis/sharing/', args)
- self.assertEqual(response.code, 200)
- exp = {'users': [], 'links': ''}
- self.assertEqual(loads(response.body), exp)
- self.assertEqual(a.shared_with, [])
-
- # Make sure unshared message added to the system
- self.assertEqual('Analysis \'SomeAnalysis\' has been unshared from '
- 'you.', u.messages()[0][1])
- # Share the analysis back with the user
- a.share(u)
-
- def test_get_selected(self):
- s = Analysis(1)
- u = User('admin@foo.bar')
- args = {'selected': u.id, 'id': s.id}
- response = self.get('/analysis/sharing/', args)
- self.assertEqual(response.code, 200)
- exp = {
- 'users': ['shared@foo.bar', u.id],
- 'links':
- ('Shared, '
- 'Admin')}
- self.assertEqual(loads(response.body), exp)
- self.assertEqual(s.shared_with, [User('shared@foo.bar'), u])
-
- # Make sure shared message added to the system
- self.assertEqual('Analysis '
- '\'SomeAnalysis\' has been shared with you.',
- u.messages()[0][1])
-
- def test_get_no_access(self):
- s = Analysis(2)
- u = User('admin@foo.bar')
- args = {'selected': u.id, 'id': 2}
- response = self.get('/analysis/sharing/', args)
- self.assertEqual(response.code, 403)
- self.assertEqual(s.shared_with, [])
-
-
-if __name__ == "__main__":
- main()
diff --git a/qiita_ware/wrapper.py b/qiita_ware/wrapper.py
index e7fd83b6e..a7c4ab937 100644
--- a/qiita_ware/wrapper.py
+++ b/qiita_ware/wrapper.py
@@ -4,37 +4,8 @@
from os import remove
from sys import stderr
-from skbio.util import flatten
import networkx as nx
-from moi.job import system_call, submit, ctxs, ctx_default
-
-from qiita_db.job import Job
-
-
-def system_call_from_job(job_id, **kwargs):
- """Executes a system call described by a Job
-
- Parameters
- ----------
- job_id : int
- The job object ID
- """
- job = Job(job_id)
- name, command = job.command
- options = job.options
-
- cmd = [command]
- cmd.extend(flatten(options.items()))
- cmd_fmt = ' '.join((str(i) for i in cmd))
-
- try:
- so, se, status = system_call(cmd_fmt)
- except Exception as e:
- job.set_error(str(e))
- raise
-
- # FIX THIS add_results should not be hard coded Issue #269
- job.add_results([(job.options["--output_dir"], "directory")])
+from moi.job import submit, ctxs, ctx_default
class ParallelWrapper(object):
diff --git a/scripts/qiita b/scripts/qiita
index a035dfbba..ac0053413 100755
--- a/scripts/qiita
+++ b/scripts/qiita
@@ -16,6 +16,7 @@ from os.path import join, abspath, dirname, basename
from future.utils import viewitems
from glob import glob
from time import sleep
+from multiprocessing import active_children
import click
import tornado.httpserver
@@ -414,6 +415,7 @@ def start(port, master):
# into an entirely different script.
from qiita_pet.webserver import Application
from tornado.options import options, parse_command_line
+ from tornado.ioloop import PeriodicCallback
if master:
# Deactivate all the plugins and only activate those that are currently
@@ -458,6 +460,12 @@ def start(port, master):
sleep(0.5)
ioloop.add_timeout(ioloop.time() + 0.5, callback_function)
+
+ # Set a PeriodicCallback for cleaning up the threads every 10 seconds
+ # To understand why this is working as expected, check the multiprocessing
+ # documentation https://docs.python.org/2/library/multiprocessing.html
+ PeriodicCallback(lambda: active_children(), 10000).start()
+
ioloop.start()
# #############################################################################