From 99ee51a1b800bc2bdbf4e1eee3d8579417185631 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 23 Nov 2015 09:31:22 +0000 Subject: [PATCH] Add test_report command for rebuilding reports from structured JSON. Refactor Galaxy testing abstractions toward better exception handling (print more, fail harder, try more - if one report type fails try the others but still throw an exception at the end) and toward greater reuse without being a full test context (i.e. try to be able to do more from just json). --- planemo/commands/cmd_share_test.py | 14 +- planemo/commands/cmd_test.py | 2 +- planemo/commands/cmd_test_reports.py | 24 ++++ planemo/galaxy_test/__init__.py | 5 +- planemo/galaxy_test/actions.py | 71 ++++++--- planemo/galaxy_test/structures.py | 27 +++- planemo/options.py | 34 ++++- tests/data/issue381.json | 206 +++++++++++++++++++++++++++ tests/test_test_report.py | 11 ++ 9 files changed, 346 insertions(+), 48 deletions(-) create mode 100644 planemo/commands/cmd_test_reports.py create mode 100644 tests/data/issue381.json create mode 100644 tests/test_test_report.py diff --git a/planemo/commands/cmd_share_test.py b/planemo/commands/cmd_share_test.py index eb179fd6d..60f06d18c 100644 --- a/planemo/commands/cmd_share_test.py +++ b/planemo/commands/cmd_share_test.py @@ -3,6 +3,7 @@ import click from planemo.cli import pass_context +from planemo import options from planemo.io import info from planemo import github_util @@ -11,20 +12,9 @@ "?test_data_url=%s" ) -target_path = click.Path( - file_okay=True, - dir_okay=False, - resolve_path=True, -) - @click.command("share_test") -@click.argument( - 'path', - metavar="FILE_PATH", - type=target_path, - default="tool_test_output.json", -) +@options.tool_test_json() @pass_context def cli(ctx, path, **kwds): """Publish JSON test results to Github Gist and produce sharable URL. diff --git a/planemo/commands/cmd_test.py b/planemo/commands/cmd_test.py index 37e55ef20..7410bd17b 100644 --- a/planemo/commands/cmd_test.py +++ b/planemo/commands/cmd_test.py @@ -24,7 +24,7 @@ ) @options.galaxy_target_options() @options.galaxy_config_options() -@options.test_options() +@options.test_report_options() @pass_context def cli(ctx, paths, **kwds): """Run the tests in the specified tool tests in a Galaxy instance. diff --git a/planemo/commands/cmd_test_reports.py b/planemo/commands/cmd_test_reports.py new file mode 100644 index 000000000..ab9daf608 --- /dev/null +++ b/planemo/commands/cmd_test_reports.py @@ -0,0 +1,24 @@ +import os + +import click + +from planemo.cli import pass_context +from planemo import io +from planemo import options +from planemo.galaxy_test import StructuredData, handle_test_reports + + +@click.command('test_reports') +@options.tool_test_json() +@options.test_report_options() +@pass_context +def cli(ctx, path, **kwds): + """Generate various tool test reports (HTML, text, markdown) from + structure output from tests (tool_test_output.json). + """ + if not os.path.exists(path): + io.error("Failed to tool test json file at %s" % path) + return 1 + + test_data = StructuredData(path) + handle_test_reports(ctx, test_data, **kwds) diff --git a/planemo/galaxy_test/__init__.py b/planemo/galaxy_test/__init__.py index 29a4c77bf..f17c7d397 100644 --- a/planemo/galaxy_test/__init__.py +++ b/planemo/galaxy_test/__init__.py @@ -1,3 +1,6 @@ from .actions import run_in_config +from .structures import StructuredData +from .actions import handle_test_reports -__all__ = ["run_in_config"] + +__all__ = ["run_in_config", "StructuredData", "handle_test_reports"] diff --git a/planemo/galaxy_test/actions.py b/planemo/galaxy_test/actions.py index 60f357953..ceb5565e6 100644 --- a/planemo/galaxy_test/actions.py +++ b/planemo/galaxy_test/actions.py @@ -83,27 +83,8 @@ def run_in_config(ctx, config, **kwds): return_code, ) - try: - test_data = test_results.structured_data - - if 'test_output' in kwds: - output_path = kwds['test_output'] - if output_path is not None: - with open(output_path, 'w') as handle: - handle.write(build_report.build_report(test_data)) - - for kw_name in ('markdown', 'text'): - if 'test_output_%s' % kw_name in kwds: - output_path = kwds['test_output_%s' % kw_name] - if output_path is None: - continue - - with open(output_path, 'w') as handle: - handle.write(build_report.build_report(test_data, report_type=kw_name)) - - except Exception: - ctx.vlog("Problem producing test output.", exception=True) - + test_data = test_results.structured_data + handle_test_reports(ctx, test_data, **kwds) __handle_summary( test_results, **kwds @@ -112,6 +93,54 @@ def run_in_config(ctx, config, **kwds): return return_code +def handle_test_reports(ctx, test_data, **kwds): + exceptions = [] + for report_type in ["html", "markdown", "text"]: + try: + _handle_test_output_file( + ctx, report_type, test_data, **kwds + ) + except Exception as e: + exceptions.append(e) + continue + + if len(exceptions) > 0: + raise exceptions[0] + + +def _handle_test_output_file(ctx, report_type, test_data, **kwds): + kwd_name = "test_output" + if report_type != "html": + kwd_name = "test_output_%s" % report_type + + path = kwds.get(kwd_name, None) + if path is None: + message = "No file specified for %s, skipping test output." % kwd_name + ctx.vlog(message) + return + + try: + contents = build_report.build_report( + report_type, report_type=report_type + ) + except Exception: + message = "Problem producing report file %s for %s" % ( + path, kwd_name + ) + ctx.vlog(message, exception=True) + raise + + try: + with open(path, 'w') as handle: + handle.write(contents) + except Exception: + message = "Problem writing output file %s for %s" % ( + kwd_name, path + ) + ctx.vlog(message, exception=True) + raise + + def __handle_summary( test_results, **kwds diff --git a/planemo/galaxy_test/structures.py b/planemo/galaxy_test/structures.py index 6128b9877..2390187ee 100644 --- a/planemo/galaxy_test/structures.py +++ b/planemo/galaxy_test/structures.py @@ -76,12 +76,15 @@ def __init__(self, json_path): self.structured_data_by_id = structured_data_by_id self.has_details = "summary" in structured_data if self.has_details: - self._read_summary() + self.read_summary() def update(self): with open(self.json_path, "w") as out_f: json.dump(self.structured_data, out_f) + def set_exit_code(self, exit_code): + self.structured_data["exit_code"] = exit_code + def merge_xunit(self, xunit_root): self.has_details = True xunit_attrib = xunit_root.attrib @@ -97,8 +100,6 @@ def merge_xunit(self, xunit_root): ) self.structured_data["summary"] = summary - self.num_tests = num_tests - self.num_problems = num_skips + num_errors + num_failures for testcase_el in xunit_t_elements_from_root(xunit_root): test = case_id(testcase_el) @@ -118,9 +119,17 @@ def merge_xunit(self, xunit_root): status = "success" test_data["status"] = status - def _read_summary(self): - # TODO: read relevant data out of summary object. - pass + def read_summary(self): + summary = self.structured_data["summary"] + num_tests = summary["num_tests"] + num_failures = summary["num_failures"] + num_skips = summary["num_skips"] + num_errors = summary["num_errors"] + + self.num_tests = num_tests + self.num_problems = num_skips + num_errors + num_failures + + self.exit_code = summary["exit_code"] @property def failed_ids(self): @@ -159,8 +168,14 @@ def __init__( sd.merge_xunit(self._xunit_root) else: self.xunit_tree = ET.fromstring("") + self.sd.set_exit_code(exit_code) + self.sd.read_summary() self.sd.update() + @property + def exit_code(self): + return self.sd.exit_code + @property def has_details(self): return self.sd.has_details diff --git a/planemo/options.py b/planemo/options.py index f79998ca2..178018006 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -616,14 +616,22 @@ def recursive_option(help="Recursively perform command for subdirectories."): ) -def test_options(): +def tool_test_json(): + target_path = click.Path( + file_okay=True, + dir_okay=False, + resolve_path=True, + ) + return click.argument( + 'path', + metavar="FILE_PATH", + type=target_path, + default="tool_test_output.json", + ) + + +def test_report_options(): return _compose( - click.option( - "--update_test_data", - is_flag=True, - help="Update test-data directory with job outputs (normally" - " written to directory --job_output_files if specified.)" - ), click.option( "--test_output", type=click.Path(file_okay=True, resolve_path=True), @@ -648,6 +656,18 @@ def test_options(): "computers)"), default=None, ), + ) + + +def test_options(): + return _compose( + click.option( + "--update_test_data", + is_flag=True, + help="Update test-data directory with job outputs (normally" + " written to directory --job_output_files if specified.)" + ), + test_report_options(), click.option( "--test_output_xunit", type=click.Path(file_okay=True, resolve_path=True), diff --git a/tests/data/issue381.json b/tests/data/issue381.json new file mode 100644 index 000000000..388585172 --- /dev/null +++ b/tests/data/issue381.json @@ -0,0 +1,206 @@ +{ + "exit_code": 1, + "summary" : { + "num_errors" : 1, + "num_skips" : 0, + "num_tests" : 4, + "num_failures" : 2 + }, + "version" : "0.1", + "tests" : [ + { + "id" : "functional.test_toolbox.TestForTool_bcftools_concat.test_tool_000000", + "has_data" : true, + "data" : { + "status" : "success", + "inputs" : { + "input_file2" : { + "id" : "5729865256bc2525", + "src" : "hda" + }, + "input_file1" : { + "id" : "2891970512fa2d5a", + "src" : "hda" + } + }, + "job" : { + "outputs" : { + "output_file" : { + "id" : "54f2a3a23292eb07", + "uuid" : "b02f5eb2-eece-440e-a8bb-9a08a8e8f22d", + "src" : "hda" + } + }, + "state" : "ok", + "model_class" : "Job", + "id" : "54f2a3a23292eb07", + "user_email" : "test@bx.psu.edu", + "tool_id" : "bcftools_concat", + "stdout" : "", + "params" : { + "sec_default" : "{\"allow_overlaps\": \"False\", \"remove_duplicates\": \"False\", \"ligate\": \"False\", \"min_PQ\": \"\", \"select_output_type\": \"v\", \"regions_file\": null, \"invert_regions_file\": \"False\"}", + "dbkey" : "\"hg17\"", + "chromInfo" : "\"/tmp/tmp5pjfin/galaxy-dev/tool-data/shared/ucsc/chrom/hg17.len\"" + }, + "stderr" : "", + "job_metrics" : [], + "update_time" : "2015-11-22T18:43:36.655580", + "create_time" : "2015-11-22T18:43:33.244091", + "exit_code" : 0, + "command_line" : "bcftools concat --output-type \"v\" /tmp/tmp5pjfin/files/000/dataset_1.dat /tmp/tmp5pjfin/files/000/dataset_2.dat > /tmp/tmp5pjfin/files/000/dataset_3.dat", + "external_id" : "11230", + "inputs" : { + "input_file2" : { + "id" : "5729865256bc2525", + "src" : "hda", + "uuid" : "e6aa6d21-f8b3-4b4a-a698-5b8a673cdf73" + }, + "input_file1" : { + "id" : "2891970512fa2d5a", + "src" : "hda", + "uuid" : "3ad7718d-1e9c-4ec0-8efd-494acbaa3fd5" + } + } + } + } + }, + { + "id" : "functional.test_toolbox.TestForTool_bcftools_concat.test_tool_000001", + "has_data" : true, + "data" : { + "problem_type" : "functional.test_toolbox.JobOutputsError", + "status" : "failure", + "inputs" : { + "input_file1" : { + "src" : "hda", + "id" : "8155e4b4bf1581ff" + }, + "input_file2" : { + "src" : "hda", + "id" : "7b55dbb89df8f4e5" + }, + "sec_default|allow_overlaps" : true + }, + "problem_log" : " File \"/usr/lib/python2.7/unittest/case.py\", line 329, in run\n testMethod()\n File \"/tmp/tmp5pjfin/galaxy-dev/test/functional/test_toolbox.py\", line 270, in test_tool\n self.do_it( td )\n File \"/tmp/tmp5pjfin/galaxy-dev/test/functional/test_toolbox.py\", line 67, in do_it\n raise e\n'Job in error state.\\nJob in error state.\\n-------------------- >> begin captured stdout << ---------------------\\nHistory with id 5729865256bc2525 in error - summary of datasets in error below.\\n--------------------------------------\\n| 3 - bcftools concat on data 2 and data 1 (HID - NAME) \\n| Dataset Blurb:\\n| error\\n| Dataset Info:\\n| Fatal error: Exit code 255 ()\\n| Failed to open /tmp/tmp5pjfin/files/000/dataset_4.dat: could not load index\\n| Dataset Job Standard Output:\\n| *Standard output was empty.*\\n| Dataset Job Standard Error:\\n| Fatal error: Exit code 255 ()\\n| Failed to open /tmp/tmp5pjfin/files/000/dataset_4.dat: could not load index\\n|\\n--------------------------------------\\nHistory with id 5729865256bc2525 in error - summary of datasets in error below.\\n--------------------------------------\\n| 3 - bcftools concat on data 2 and data 1 (HID - NAME) \\n| Dataset Blurb:\\n| error\\n| Dataset Info:\\n| Fatal error: Exit code 255 ()\\n| Failed to open /tmp/tmp5pjfin/files/000/dataset_4.dat: could not load index\\n| Dataset Job Standard Output:\\n| *Standard output was empty.*\\n| Dataset Job Standard Error:\\n| Fatal error: Exit code 255 ()\\n| Failed to open /tmp/tmp5pjfin/files/000/dataset_4.dat: could not load index\\n|\\n--------------------------------------\\n\\n--------------------- >> end captured stdout << ----------------------\\n-------------------- >> begin captured logging << --------------------\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.web.framework.webapp: INFO: Session authenticated using Galaxy master api key\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/users?key=test_key HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.web.framework.webapp: INFO: Session authenticated using Galaxy master api key\\nrequests.packages.urllib3.connectionpool: DEBUG: \"POST /api/users/2891970512fa2d5a/api_key HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"POST /api/histories HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.tools.actions.upload_common: INFO: tool upload1 created job id 4\\ngalaxy.tools.execute: DEBUG: Tool [upload1] created job [4] (602.265 ms)\\nrequests.packages.urllib3.connectionpool: DEBUG: \"POST /api/tools HTTP/1.1\" 200 None\\ngalaxy.jobs: DEBUG: (4) Working directory for job is: /tmp/tmp5pjfin/job_working_directory/000/4\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs.handler: DEBUG: (4) Dispatching to local runner\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs: DEBUG: (4) Persisting job destination (destination id: local:///)\\ngalaxy.jobs.runners: DEBUG: Job [4] queued (1379.148 ms)\\ngalaxy.jobs.handler: INFO: (4) Job dispatched\\ngalaxy.tools.deps: DEBUG: Building dependency shell command for dependency \\'samtools\\'\\ngalaxy.tools.deps: WARNING: Failed to resolve dependency on \\'samtools\\', ignoring\\ngalaxy.jobs.runners: DEBUG: (4) command is: python /tmp/tmp5pjfin/galaxy-dev/tools/data_source/upload.py /tmp/tmp5pjfin/galaxy-dev /tmp/tmp5pjfin/tmp/tmpuZo40k /tmp/tmp5pjfin/tmp/tmp8VZxv8 4:/tmp/tmp5pjfin/job_working_directory/000/4/dataset_4_files:/tmp/tmp5pjfin/files/000/dataset_4.dat; return_code=$?; python /tmp/tmp5pjfin/ +job_working_directory/000/4/set_metadata_mXQVti.py /tmp/tmp5pjfin/tmp/tmpuZo40k /tmp/tmp5pjfin/job_working_directory/000/4/galaxy.json /tmp/tmp5pjfin/job_working_directory/000/4/metadata_in_HistoryDatasetAssociation_4_T4oyhJ,/tmp/tmp5pjfin/job_working_directory/000/4/metadata_kwds_HistoryDatasetAssociation_4_k7QHE8,/tmp/tmp5pjfin/job_working_directory/000/4/metadata_out_HistoryDatasetAssociation_4_yXfnjX,/tmp/tmp5pjfin/job_working_directory/000/4/metadata_results_HistoryDatasetAssociation_4_Tz23sN,/tmp/tmp5pjfin/files/000/dataset_4.dat,/tmp/tmp5pjfin/job_working_directory/000/4/metadata_override_HistoryDatasetAssociation_4_7J3ovc 0; sh -c \"exit $return_code\"\\ngalaxy.jobs.runners.local: DEBUG: (4) executing job script: /tmp/tmp5pjfin/job_working_directory/000/4/galaxy_4.sh\\ngalaxy.jobs: DEBUG: (4) Persisting job destination (destination id: local:///)\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs.runners.local: DEBUG: execution finished: /tmp/tmp5pjfin/job_working_directory/000/4/galaxy_4.sh\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.datatypes.metadata: DEBUG: loading metadata from file for: HistoryDatasetAssociation 4\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs: DEBUG: job 4 ended (finish() executed in (1699.326 ms))\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.tools.actions.upload_common: INFO: tool upload1 created job id 5\\ngalaxy.tools.execute: DEBUG: Tool [upload1] created job [5] (743.937 ms)\\ngalaxy.jobs: DEBUG: (5) Working directory for job is: /tmp/tmp5pjfin/job_working_directory/000/5\\nrequests.packages.urllib3.connectionpool: DEBUG: \"POST /api/tools HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs.handler: DEBUG: (5) Dispatching to local runner\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs: DEBUG: (5) Persisting job destination (destination id: local:///)\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs.runners: DEBUG: Job [5] queued (534.749 ms)\\ngalaxy.jobs.handler: INFO: (5) Job dispatched\\ngalaxy.tools.deps: DEBUG: Building dependency shell command for dependency \\'samtools\\'\\ngalaxy.tools.deps: WARNING: Failed to resolve dependency on \\'samtools\\', ignoring\\ngalaxy.jobs.runners: DEBUG: (5) command is: python /tmp/tmp5pjfin/galaxy-dev/tools/data_source/upload.py /tmp/tmp5pjfin/galaxy-dev /tmp/tmp5pjfin/tmp/tmpuZo40k /tmp/tmp5pjfin/tmp/tmpEfL2nl 5:/tmp/tmp5pjfin/job_working_directory/000/5/dataset_5_files:/tmp/tmp5pjfin/files/000/dataset_5.dat; return_code=$?; python /tmp/tmp5pjfin/job_working_directory/000/5/set_metadata_BHJ9hq.py /tmp/tmp5pjfin/tmp/tmpuZo40k / +tmp/tmp5pjfin/job_working_directory/000/5/galaxy.json /tmp/tmp5pjfin/job_working_directory/000/5/metadata_in_HistoryDatasetAssociation_5_GEeQN7,/tmp/tmp5pjfin/job_working_directory/000/5/metadata_kwds_HistoryDatasetAssociation_5_YRQXRe,/tmp/tmp5pjfin/job_working_directory/000/5/metadata_out_HistoryDatasetAssociation_5_PqtV_Q,/tmp/tmp5pjfin/job_working_directory/000/5/metadata_results_HistoryDatasetAssociation_5_737sSK,/tmp/tmp5pjfin/files/000/dataset_5.dat,/tmp/tmp5pjfin/job_working_directory/000/5/metadata_override_HistoryDatasetAssociation_5_l0EuS5 0; sh -c \"exit $return_code\"\\ngalaxy.jobs.runners.local: DEBUG: (5) executing job script: /tmp/tmp5pjfin/job_working_directory/000/5/galaxy_5.sh\\ngalaxy.jobs: DEBUG: (5) Persisting job destination (destination id: local:///)\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs.runners.local: DEBUG: execution finished: /tmp/tmp5pjfin/job_working_directory/000/5/galaxy_5.sh\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.datatypes.metadata: DEBUG: loading metadata from file for: HistoryDatasetAssociation 5\\ngalaxy.jobs: DEBUG: job 5 ended (finish() executed in (754.248 ms))\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.tools.actions: INFO: Handled output (318.018 ms)\\ngalaxy.tools.actions: INFO: Verified access to datasets (22.165 ms)\\ngalaxy.tools.execute: DEBUG: Tool [bcftools_concat] created job [6] (775.622 ms)\\nrequests.packages.urllib3.connectionpool: DEBUG: \"POST /api/tools HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs: DEBUG: (6) Working directory for job is: /tmp/tmp5pjfin/job_working_directory/000/6\\ngalaxy.jobs.handler: DEBUG: (6) Dispatching to local runner\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/jobs/fa6d20d0fb68383f?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\ngalaxy.jobs: DEBUG: (6) Persisting job destination (destination id: local:///)\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs.runners: DEBUG: Job [6] queued (407.617 ms)\\ngalaxy.jobs.handler: INFO: (6) Job dispatched\\ngalaxy.tools.deps: DEBUG: Building dependency shell command for dependency \\'bcftools\\'\\ngalaxy.tools.deps: WARNING: Failed to resolve dependency on \\'bcftools\\', ignoring\\ngalaxy.jobs.runners: DEBUG: (6) command is: bcftools 2>&1 | grep \\'Version:\\' > /tmp/tmp5pjfin/tmp/GALAXY_VERSION_STRING_6 2>&1; bcftools concat --allow-overlaps --output-type \"v\" /tmp/tmp5pjfin/files/000/dataset_4.dat /tmp/tmp5pjfin/files/000/dataset_5.dat > /tmp/tmp5pjfin/files/000/dataset_6.dat; return_code=$?; python /tmp/tmp5pjfin/job_working_directory/000/6/set_metadata_FTDkBM.py /tmp/tmp5pjfin/tmp/tmpuZo40k /tmp/tmp5pjfin/job_working_directory/000/6/galaxy.json /tmp/tmp5pjfin/job_working_directory/000/6/metadata_in_HistoryDatasetAssociation_6_ZoV8cB,/tmp/tmp5pjfin/job_working_directory/000/6/metadata_kwds_HistoryDatasetAssociation_6_bAOa5r,/tmp/tmp5pjfin/job_working_directory/000/6/metadata_out_HistoryDatasetAssociation_6_puYIxT,/tmp/tmp5pjfin/job_working_directory/000/6/metadata_results_HistoryDatasetAssociation_6_gRWWAL,/tmp/tmp5pjfin/files/000/dataset_6.dat,/tmp/tmp5pjfin/job_ +working_directory/000/6/metadata_override_HistoryDatasetAssociation_6_1JAURb 0; sh -c \"exit $return_code\"\\ngalaxy.jobs.runners.local: DEBUG: (6) executing job script: /tmp/tmp5pjfin/job_working_directory/000/6/galaxy_6.sh\\ngalaxy.jobs: DEBUG: (6) Persisting job destination (destination id: local:///)\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/jobs/fa6d20d0fb68383f?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\ngalaxy.jobs.runners.local: DEBUG: execution finished: /tmp/tmp5pjfin/job_working_directory/000/6/galaxy_6.sh\\ngalaxy.jobs.output_checker: INFO: Job 6: Fatal error: Exit code 255 ()\\ngalaxy.jobs: DEBUG: setting dataset state to ERROR\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs: DEBUG: job 6 ended (finish() executed in (485.123 ms))\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/jobs/fa6d20d0fb68383f?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/jobs/fa6d20d0fb68383f?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525/contents?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525/contents/fa6d20d0fb68383f?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525/contents/fa6d20d0fb68383f/provenance?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/jobs/fa6d20d0fb68383f?full=true&key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/jobs/fa6d20d0fb68383f?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525/contents?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525/contents/fa6d20d0fb68383f?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/5729865256bc2525/contents/fa6d20d0fb68383f/provenance?key=d78ddba8840f79ec159f689fdef9d658 HTTP/1.1\" 200 None\\n--------------------- >> end captured logging << ---------------------'", + "job" : { + "inputs" : { + "input_file1" : { + "src" : "hda", + "uuid" : "4d470ffd-c230-40d6-a5cc-10fa4d8004a7", + "id" : "8155e4b4bf1581ff" + }, + "input_file2" : { + "src" : "hda", + "uuid" : "e5c2df91-ec50-484a-ab8d-ad4e6417f516", + "id" : "7b55dbb89df8f4e5" + } + }, + "external_id" : "11276", + "exit_code" : 255, + "command_line" : "bcftools concat --allow-overlaps --output-type \"v\" /tmp/tmp5pjfin/files/000/dataset_4.dat /tmp/tmp5pjfin/files/000/dataset_5.dat > /tmp/tmp5pjfin/files/000/dataset_6.dat", + "create_time" : "2015-11-22T18:43:52.782033", + "update_time" : "2015-11-22T18:43:57.189984", + "job_metrics" : [], + "stderr" : "Fatal error: Exit code 255 ()\nFailed to open /tmp/tmp5pjfin/files/000/dataset_4.dat: could not load index\n", + "params" : { + "sec_default" : "{\"allow_overlaps\": \"True\", \"remove_duplicates\": \"False\", \"ligate\": \"False\", \"min_PQ\": \"\", \"select_output_type\": \"v\", \"regions_file\": null, \"invert_regions_file\": \"False\"}", + "dbkey" : "\"hg17\"", + "chromInfo" : "\"/tmp/tmp5pjfin/galaxy-dev/tool-data/shared/ucsc/chrom/hg17.len\"" + }, + "stdout" : "", + "user_email" : "test@bx.psu.edu", + "tool_id" : "bcftools_concat", + "id" : "fa6d20d0fb68383f", + "state" : "error", + "model_class" : "Job", + "outputs" : { + "output_file" : { + "id" : "fa6d20d0fb68383f", + "uuid" : "48bf13ab-fe70-479e-955a-6ba20a9be374", + "src" : "hda" + } + } + }, + "output_problems" : [ + "Job in error state.", + "Job in error state." + ] + } + }, + { + "id" : "functional.test_toolbox.TestForTool_bcftools_concat.test_tool_000002", + "has_data" : false, + "data" : null + }, + { + "data" : { + "inputs" : { + "input_file2" : { + "src" : "hda", + "id" : "b842d972534ccb3e" + }, + "sec_default|allow_overlaps" : true, + "input_file1" : { + "id" : "a90a30fafe298e1e", + "src" : "hda" + }, + "sec_default|remove_duplicates" : true + }, + "problem_log" : " File \"/usr/lib/python2.7/unittest/case.py\", line 329, in run\n testMethod()\n File \"/tmp/tmp5pjfin/galaxy-dev/test/functional/test_toolbox.py\", line 270, in test_tool\n self.do_it( td )\n File \"/tmp/tmp5pjfin/galaxy-dev/test/functional/test_toolbox.py\", line 67, in do_it\n raise e\n'Job in error state.\\nJob in error state.\\n-------------------- >> begin captured stdout << ---------------------\\nHistory with id 8155e4b4bf1581ff in error - summary of datasets in error below.\\n--------------------------------------\\n| 3 - bcftools concat on data 2 and data 1 (HID - NAME) \\n| Dataset Blurb:\\n| error\\n| Dataset Info:\\n| Fatal error: Exit code 255 ()\\n| Failed to open /tmp/tmp5pjfin/files/000/dataset_8.dat: could not load index\\n| Dataset Job Standard Output:\\n| *Standard output was empty.*\\n| Dataset Job Standard Error:\\n| Fatal error: Exit code 255 ()\\n| Failed to open /tmp/tmp5pjfin/files/000/dataset_8.dat: could not load index\\n|\\n--------------------------------------\\nHistory with id 8155e4b4bf1581ff in error - summary of datasets in error below.\\n--------------------------------------\\n| 3 - bcftools concat on data 2 and data 1 (HID - NAME) \\n| Dataset Blurb:\\n| error\\n| Dataset Info:\\n| Fatal error: Exit code 255 ()\\n| Failed to open /tmp/tmp5pjfin/files/000/dataset_8.dat: could not load index\\n| Dataset Job Standard Output:\\n| *Standard output was empty.*\\n| Dataset Job Standard Error:\\n| Fatal error: Exit code 255 ()\\n| Failed to open /tmp/tmp5pjfin/files/000/dataset_8.dat: could not load index\\n|\\n--------------------------------------\\n\\n--------------------- >> end captured stdout << ----------------------\\n-------------------- >> begin captured logging << --------------------\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.web.framework.webapp: INFO: Session authenticated using Galaxy master api key\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/users?key=test_key HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.web.framework.webapp: INFO: Session authenticated using Galaxy master api key\\nrequests.packages.urllib3.connectionpool: DEBUG: \"POST /api/users/2891970512fa2d5a/api_key HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"POST /api/histories HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.tools.actions.upload_common: INFO: tool upload1 created job id 8\\ngalaxy.tools.execute: DEBUG: Tool [upload1] created job [8] (756.588 ms)\\nrequests.packages.urllib3.connectionpool: DEBUG: \"POST /api/tools HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs: DEBUG: (8) Working directory for job is: /tmp/tmp5pjfin/job_working_directory/000/8\\ngalaxy.jobs.handler: DEBUG: (8) Dispatching to local runner\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs: DEBUG: (8) Persisting job destination (destination id: local:///)\\ngalaxy.jobs.runners: DEBUG: Job [8] queued (1371.102 ms)\\ngalaxy.jobs.handler: INFO: (8) Job dispatched\\ngalaxy.tools.deps: DEBUG: Building dependency shell command for dependency \\'samtools\\'\\ngalaxy.tools.deps: WARNING: Failed to resolve dependency on \\'samtools\\', ignoring\\ngalaxy.jobs.runners: DEBUG: (8) command is: python /tmp/tmp5pjfin/galaxy-dev/tools/data_ +source/upload.py /tmp/tmp5pjfin/galaxy-dev /tmp/tmp5pjfin/tmp/tmpuZo40k /tmp/tmp5pjfin/tmp/tmpdGsL1g 8:/tmp/tmp5pjfin/job_working_directory/000/8/dataset_8_files:/tmp/tmp5pjfin/files/000/dataset_8.dat; return_code=$?; python /tmp/tmp5pjfin/job_working_directory/000/8/set_metadata_CcIolG.py /tmp/tmp5pjfin/tmp/tmpuZo40k /tmp/tmp5pjfin/job_working_directory/000/8/galaxy.json /tmp/tmp5pjfin/job_working_directory/000/8/metadata_in_HistoryDatasetAssociation_8_7NWAAc,/tmp/tmp5pjfin/job_working_directory/000/8/metadata_kwds_HistoryDatasetAssociation_8_DLEMeH,/tmp/tmp5pjfin/job_working_directory/000/8/metadata_out_HistoryDatasetAssociation_8_dumYsb,/tmp/tmp5pjfin/job_working_directory/000/8/metadata_results_HistoryDatasetAssociation_8_z6aYlu,/tmp/tmp5pjfin/files/000/dataset_8.dat,/tmp/tmp5pjfin/job_working_directory/000/8/metadata_override_HistoryDatasetAssociation_8_eLojbK 0; sh -c \"exit $return_code\"\\ngalaxy.jobs.runners.local: DEBUG: (8) executing job script: /tmp/tmp5pjfin/job_working_directory/000/8/galaxy_8.sh\\ngalaxy.jobs: DEBUG: (8) Persisting job destination (destination id: local:///)\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs.runners.local: DEBUG: execution finished: /tmp/tmp5pjfin/job_working_directory/000/8/galaxy_8.sh\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.datatypes.metadata: DEBUG: loading metadata from file for: HistoryDatasetAssociation 8\\ngalaxy.jobs: DEBUG: job 8 ended (finish() executed in (1791.218 ms))\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.tools.actions.upload_common: INFO: tool upload1 created job id 9\\ngalaxy.tools.execute: DEBUG: Tool [upload1] created job [9] (704.111 ms)\\ngalaxy.jobs: DEBUG: (9) Working directory for job is: /tmp/tmp5pjfin/job_working_directory/000/9\\nrequests.packages.urllib3.connectionpool: DEBUG: \"POST /api/tools HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs.handler: DEBUG: (9) Dispatching to local runner\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs: DEBUG: (9) Persisting job destination (destination id: local:///)\\ngalaxy.jobs.runners: DEBUG: Job [9] queued (368.196 ms)\\ngalaxy.jobs.handler: INFO: (9) Job dispatched\\ngalaxy.tools.deps: DEBUG: Building dependency shell command for dependency \\'samtools\\'\\ngalaxy.tools.deps: WARNING: Failed to resolve dependency on \\'samtools\\', ignoring\\ngalaxy.jobs.runners: DEBUG: (9) command is: python /tmp/tmp5pjfin/galaxy-dev/tools/data_source/upload.py /tmp/tmp5pjfin/galaxy-dev /tmp/tmp5pjfin/tmp/tmpuZo40k /tmp/tmp5pjfin/tmp/tmpZxbdHX 9:/tmp/tmp5pjfin/job_working_directory/000/9/dataset_9_files:/tmp/tmp5pjfin/files/000/dataset_9.dat; return_code=$?; python /tmp/tmp5pjfin/job_working_directory/000/9/set_metadata_XWt2fn.py /tmp/tmp5pjfin/tmp/tmpuZo40k / +tmp/tmp5pjfin/job_working_directory/000/9/galaxy.json /tmp/tmp5pjfin/job_working_directory/000/9/metadata_in_HistoryDatasetAssociation_9_tsabzV,/tmp/tmp5pjfin/job_working_directory/000/9/metadata_kwds_HistoryDatasetAssociation_9_UQgw2n,/tmp/tmp5pjfin/job_working_directory/000/9/metadata_out_HistoryDatasetAssociation_9_VDinI8,/tmp/tmp5pjfin/job_working_directory/000/9/metadata_results_HistoryDatasetAssociation_9_LFlQZF,/tmp/tmp5pjfin/files/000/dataset_9.dat,/tmp/tmp5pjfin/job_working_directory/000/9/metadata_override_HistoryDatasetAssociation_9_qYzOEG 0; sh -c \"exit $return_code\"\\ngalaxy.jobs.runners.local: DEBUG: (9) executing job script: /tmp/tmp5pjfin/job_working_directory/000/9/galaxy_9.sh\\ngalaxy.jobs: DEBUG: (9) Persisting job destination (destination id: local:///)\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs.runners.local: DEBUG: execution finished: /tmp/tmp5pjfin/job_working_directory/000/9/galaxy_9.sh\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.datatypes.metadata: DEBUG: loading metadata from file for: HistoryDatasetAssociation 9\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs: DEBUG: job 9 ended (finish() executed in (756.948 ms))\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.tools.actions: INFO: Handled output (474.659 ms)\\ngalaxy.tools.actions: INFO: Verified access to datasets (19.515 ms)\\ngalaxy.tools.execute: DEBUG: Tool [bcftools_concat] created job [10] (888.455 ms)\\ngalaxy.jobs: DEBUG: (10) Working directory for job is: /tmp/tmp5pjfin/job_working_directory/000/10\\nrequests.packages.urllib3.connectionpool: DEBUG: \"POST /api/tools HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs.handler: DEBUG: (10) Dispatching to local runner\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/jobs/5449172d6ff5669b?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\ngalaxy.jobs: DEBUG: (10) Persisting job destination (destination id: local:///)\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs.runners: DEBUG: Job [10] queued (377.760 ms)\\ngalaxy.jobs.handler: INFO: (10) Job dispatched\\ngalaxy.tools.deps: DEBUG: Building dependency shell command for dependency \\'bcftools\\'\\ngalaxy.tools.deps: WARNING: Failed to resolve dependency on \\'bcftools\\', ignoring\\ngalaxy.jobs.runners: DEBUG: (10) command is: bcftools 2>&1 | grep \\'Version:\\' > /tmp/tmp5pjfin/tmp/GALAXY_VERSION_STRING_10 2>&1; bcftools concat --allow-overlaps --remove-duplicates --output-type \"v\" /tmp/tmp5pjfin/files/000/dataset_8.dat /tmp/tmp5pjfin/files/000/dataset_9.dat > /tmp/tmp5pjfin/files/000/dataset_10.dat; return_code=$?; python /tmp/tmp5pjfin/job_working_directory/000/10/set_metadata_JflQbE.py /tmp/tmp5pjfin/tmp/tmpuZo40k /tmp/tmp5pjfin/job_working_directory/000/10/galaxy.json /tmp/tmp5pjfin/job_working_directory/000/10/metadata_in_HistoryDatasetAssociation_10_ANj2bu,/tmp/tmp5pjfin/job_working_directory/000/10/metadata_kwds_HistoryDatasetAssociation_10_TDlpDa,/tmp/tmp5pjfin/job_working_directory/000/10/metadata_out_HistoryDatasetAssociation_10_muyCeq,/tmp/tmp5pjfin/job_working_directory/000/10/metadata_results_HistoryDatasetAssociation_10_lY5jdH,/tmp/tmp5pjfin/files/ +000/dataset_10.dat,/tmp/tmp5pjfin/job_working_directory/000/10/metadata_override_HistoryDatasetAssociation_10_iIZp4h 0; sh -c \"exit $return_code\"\\ngalaxy.jobs.runners.local: DEBUG: (10) executing job script: /tmp/tmp5pjfin/job_working_directory/000/10/galaxy_10.sh\\ngalaxy.jobs: DEBUG: (10) Persisting job destination (destination id: local:///)\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/jobs/5449172d6ff5669b?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\ngalaxy.jobs.runners.local: DEBUG: execution finished: /tmp/tmp5pjfin/job_working_directory/000/10/galaxy_10.sh\\ngalaxy.jobs.output_checker: INFO: Job 10: Fatal error: Exit code 255 ()\\ngalaxy.jobs: DEBUG: setting dataset state to ERROR\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\ngalaxy.jobs: DEBUG: job 10 ended (finish() executed in (572.413 ms))\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/jobs/5449172d6ff5669b?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff/contents?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff/contents/5449172d6ff5669b?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff/contents/5449172d6ff5669b/provenance?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/jobs/5449172d6ff5669b?full=true&key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/jobs/5449172d6ff5669b?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff/contents?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff/contents/5449172d6ff5669b?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\nrequests.packages.urllib3.connectionpool: INFO: Starting new HTTP connection (1): localhost\\nrequests.packages.urllib3.connectionpool: DEBUG: \"GET /api/histories/8155e4b4bf1581ff/contents/5449172d6ff5669b/provenance?key=a13df6814f42fd8f95e5af2e48632070 HTTP/1.1\" 200 None\\n--------------------- >> end captured logging << ---------------------'", + "job" : { + "update_time" : "2015-11-22T18:44:24.816000", + "job_metrics" : [], + "external_id" : "11336", + "inputs" : { + "input_file2" : { + "id" : "b842d972534ccb3e", + "uuid" : "bbd523b2-0a90-40e4-8302-e931f46cccfb", + "src" : "hda" + }, + "input_file1" : { + "id" : "a90a30fafe298e1e", + "uuid" : "7852e0a8-8b4d-405b-9398-be468f9910e5", + "src" : "hda" + } + }, + "create_time" : "2015-11-22T18:44:21.622867", + "exit_code" : 255, + "command_line" : "bcftools concat --allow-overlaps --remove-duplicates --output-type \"v\" /tmp/tmp5pjfin/files/000/dataset_8.dat /tmp/tmp5pjfin/files/000/dataset_9.dat > /tmp/tmp5pjfin/files/000/dataset_10.dat", + "id" : "5449172d6ff5669b", + "user_email" : "test@bx.psu.edu", + "tool_id" : "bcftools_concat", + "stdout" : "", + "model_class" : "Job", + "state" : "error", + "outputs" : { + "output_file" : { + "uuid" : "a11eb35e-e64e-4c32-9571-08ed6fc014ec", + "src" : "hda", + "id" : "5449172d6ff5669b" + } + }, + "stderr" : "Fatal error: Exit code 255 ()\nFailed to open /tmp/tmp5pjfin/files/000/dataset_8.dat: could not load index\n", + "params" : { + "chromInfo" : "\"/tmp/tmp5pjfin/galaxy-dev/tool-data/shared/ucsc/chrom/hg17.len\"", + "dbkey" : "\"hg17\"", + "sec_default" : "{\"allow_overlaps\": \"True\", \"remove_duplicates\": \"True\", \"ligate\": \"False\", \"min_PQ\": \"\", \"select_output_type\": \"v\", \"regions_file\": null, \"invert_regions_file\": \"False\"}" + } + }, + "output_problems" : [ + "Job in error state.", + "Job in error state." + ], + "problem_type" : "functional.test_toolbox.JobOutputsError", + "status" : "failure" + }, + "has_data" : true, + "id" : "functional.test_toolbox.TestForTool_bcftools_concat.test_tool_000003" + } + ] +} \ No newline at end of file diff --git a/tests/test_test_report.py b/tests/test_test_report.py new file mode 100644 index 000000000..11e5eb2de --- /dev/null +++ b/tests/test_test_report.py @@ -0,0 +1,11 @@ +import os + +from .test_utils import CliTestCase, TEST_DATA_DIR + + +class TestReportsTestCase(CliTestCase): + + def test_build_reports(self): + with self._isolate(): + json_path = os.path.join(TEST_DATA_DIR, "issue381.json") + self._check_exit_code(["test_reports", json_path], exit_code=0)