Skip to content

Commit

Permalink
Fix stdout/stderr exception handling
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Aug 14, 2018
1 parent fc06e78 commit dfba132
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 7 deletions.
4 changes: 4 additions & 0 deletions lib/galaxy/jobs/output_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from logging import getLogger

from galaxy.tools.parser.error_level import StdioErrorLevel
from galaxy.util import unicodify
from galaxy.util.bunch import Bunch

log = getLogger(__name__)
Expand Down Expand Up @@ -30,6 +31,9 @@ def check_output(tool, stdout, stderr, tool_exit_code, job):
# has a bug but the tool was ok, and it lets a workflow continue.
state = DETECTED_JOB_STATE.OK

stdout = unicodify(stdout)
stderr = unicodify(stderr)

try:
# Check exit codes and match regular expressions against stdout and
# stderr if this tool was configured to do so.
Expand Down
5 changes: 3 additions & 2 deletions lib/galaxy/jobs/runners/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -643,8 +643,9 @@ def finish_job(self, job_state):
collect_output_success = True
while which_try < self.app.config.retry_job_output_collection + 1:
try:
stdout = shrink_stream_by_size(open(job_state.output_file, "r"), DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True)
stderr = shrink_stream_by_size(open(job_state.error_file, "r"), DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True)
with open(job_state.output_file, "rb") as stdout_file, open(job_state.error_file, 'rb') as stderr_file:
stdout = shrink_stream_by_size(stdout_file, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True)
stderr = shrink_stream_by_size(stderr_file, DATABASE_MAX_STRING_SIZE, join_by="\n..\n", left_larger=True, beginning_on_size_error=True)
break
except Exception as e:
if which_try == self.app.config.retry_job_output_collection:
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/jobs/runners/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,8 @@ def queue_job(self, job_wrapper):
job_id = job_wrapper.get_id_tag()

try:
stdout_file = tempfile.NamedTemporaryFile(mode='w+', suffix='_stdout', dir=job_wrapper.working_directory)
stderr_file = tempfile.NamedTemporaryFile(mode='w+', suffix='_stderr', dir=job_wrapper.working_directory)
stdout_file = tempfile.NamedTemporaryFile(mode='wb+', suffix='_stdout', dir=job_wrapper.working_directory)
stderr_file = tempfile.NamedTemporaryFile(mode='wb+', suffix='_stderr', dir=job_wrapper.working_directory)
log.debug('(%s) executing job script: %s' % (job_id, command_line))
proc = subprocess.Popen(args=command_line,
shell=True,
Expand Down
13 changes: 10 additions & 3 deletions lib/galaxy/util/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,8 +331,15 @@ def get_file_size(value, default=None):
return default


def shrink_stream_by_size(value, size, join_by="..", left_larger=True, beginning_on_size_error=False, end_on_size_error=False):
rval = ''
def shrink_stream_by_size(value, size, join_by=b"..", left_larger=True, beginning_on_size_error=False, end_on_size_error=False):
"""
Shrinks bytes read from `value` to `size`.
`value` needs to implement tell/seek, so files need to be opened in binary mode.
Returns unicode text with invalid characters replaced.
"""
rval = b''
join_by = smart_str(join_by)
if get_file_size(value) > size:
start = value.tell()
len_join_by = len(join_by)
Expand Down Expand Up @@ -363,7 +370,7 @@ def shrink_stream_by_size(value, size, join_by="..", left_larger=True, beginning
if not data:
break
rval += data
return rval
return unicodify(rval)


def shrink_string_by_size(value, size, join_by="..", left_larger=True, beginning_on_size_error=False, end_on_size_error=False):
Expand Down

0 comments on commit dfba132

Please sign in to comment.