Skip to content

Commit

Permalink
Merge branch 'dev' of git://github.com/galaxyproject/galaxy into ts_a…
Browse files Browse the repository at this point in the history
…pi_backbone
  • Loading branch information
davebx committed Apr 24, 2017
2 parents ee755cb + 4395792 commit 88a3ee1
Show file tree
Hide file tree
Showing 62 changed files with 164 additions and 168 deletions.
3 changes: 1 addition & 2 deletions config/openid_conf.xml.sample
@@ -1,8 +1,7 @@
<?xml version="1.0"?>
<openid>
<provider file="google.xml" />
<provider file="genomespace.xml" />
<provider file="yahoo.xml" />
<provider file="aol.xml" />
<provider file="launchpad.xml" />
<provider file="genomespace.xml" />
</openid>
14 changes: 7 additions & 7 deletions lib/galaxy/datatypes/data.py
Expand Up @@ -129,7 +129,7 @@ def get_raw_data( self, dataset ):
try:
return open(dataset.file_name, 'rb').read(-1)
except OSError:
log.exception('%s reading a file that does not exist %s' % (self.__class__.__name__, dataset.file_name))
log.exception('%s reading a file that does not exist %s', self.__class__.__name__, dataset.file_name)
return ''

def dataset_content_needs_grooming( self, file_name ):
Expand Down Expand Up @@ -229,7 +229,7 @@ def _archive_main_file(self, archive, display_name, data_filename):
archive.add(data_filename, archname)
except IOError:
error = True
log.exception("Unable to add composite parent %s to temporary library download archive" % data_filename)
log.exception("Unable to add composite parent %s to temporary library download archive", data_filename)
msg = "Unable to create archive for download, please report this error"
messagetype = "error"
return error, msg, messagetype
Expand Down Expand Up @@ -284,7 +284,7 @@ def _archive_composite_dataset( self, trans, data=None, **kwd ):
archive.add( fpath, rpath )
except IOError:
error = True
log.exception( "Unable to add %s to temporary library download archive" % rpath)
log.exception( "Unable to add %s to temporary library download archive", rpath)
msg = "Unable to create archive for download, please report this error"
continue
if not error:
Expand Down Expand Up @@ -439,7 +439,7 @@ def remove_display_app(self, app_id):
try:
del self.supported_display_apps[app_id]
except:
log.exception('Tried to remove display app %s from datatype %s, but this display app is not declared.' % ( type, self.__class__.__name__ ) )
log.exception('Tried to remove display app %s from datatype %s, but this display app is not declared.', type, self.__class__.__name__ )

def clear_display_apps( self ):
self.supported_display_apps = {}
Expand Down Expand Up @@ -477,7 +477,7 @@ def as_display_type(self, dataset, type, **kwd):
if type in self.get_display_types():
return getattr(self, self.supported_display_apps[type]['file_function'])(dataset, **kwd)
except:
log.exception('Function %s is referred to in datatype %s for displaying as type %s, but is not accessible' % (self.supported_display_apps[type]['file_function'], self.__class__.__name__, type) )
log.exception('Function %s is referred to in datatype %s for displaying as type %s, but is not accessible', self.supported_display_apps[type]['file_function'], self.__class__.__name__, type )
return "This display type (%s) is not implemented for this datatype (%s)." % ( type, dataset.ext)

def get_display_links( self, dataset, type, app, base_url, target_frame='_blank', **kwd ):
Expand All @@ -491,8 +491,8 @@ def get_display_links( self, dataset, type, app, base_url, target_frame='_blank'
if app.config.enable_old_display_applications and type in self.get_display_types():
return target_frame, getattr( self, self.supported_display_apps[type]['links_function'] )( dataset, type, app, base_url, **kwd )
except:
log.exception( 'Function %s is referred to in datatype %s for generating links for type %s, but is not accessible'
% ( self.supported_display_apps[type]['links_function'], self.__class__.__name__, type ) )
log.exception( 'Function %s is referred to in datatype %s for generating links for type %s, but is not accessible',
self.supported_display_apps[type]['links_function'], self.__class__.__name__, type )
return target_frame, []

def get_converter_types(self, original_dataset, datatypes_registry):
Expand Down
26 changes: 13 additions & 13 deletions lib/galaxy/datatypes/registry.py
Expand Up @@ -187,7 +187,7 @@ def __import_module( full_path, datatype_module, datatype_class_name ):
datatype_module = fields[ 0 ]
datatype_class_name = fields[ 1 ]
except Exception as e:
self.log.exception( 'Error parsing datatype definition for dtype %s: %s' % ( str( dtype ), str( e ) ) )
self.log.exception( 'Error parsing datatype definition for dtype %s', str( dtype ) )
ok = False
if ok:
datatype_class = None
Expand Down Expand Up @@ -217,13 +217,13 @@ def __import_module( full_path, datatype_module, datatype_class_name ):
datatype_class = getattr( module, datatype_class_name )
self.log.debug( 'Retrieved datatype module %s:%s from the datatype registry.' % ( str( datatype_module ), datatype_class_name ) )
except Exception as e:
self.log.exception( 'Error importing datatype module %s: %s' % ( str( datatype_module ), str( e ) ) )
self.log.exception( 'Error importing datatype module %s', str( datatype_module ) )
ok = False
elif type_extension is not None:
try:
datatype_class = self.datatypes_by_extension[ type_extension ].__class__
except Exception as e:
self.log.exception( 'Error determining datatype_class for type_extension %s: %s' % ( str( type_extension ), str( e ) ) )
self.log.exception( 'Error determining datatype_class for type_extension %s', str( type_extension ) )
ok = False
if ok:
if not deactivate:
Expand Down Expand Up @@ -324,7 +324,7 @@ def _load_build_sites( self, root ):
def load_build_site( build_site_config ):
# Take in either an XML element or simple dictionary from YAML and add build site for this.
if not (build_site_config.get( 'type' ) and build_site_config.get( 'file' )):
self.log.exception( "Site is missing required 'type' and 'file' attributes: %s" )
self.log.exception( "Site is missing required 'type' and 'file' attributes" )
return

site_type = build_site_config.get( 'type' )
Expand Down Expand Up @@ -397,7 +397,7 @@ def load_datatype_sniffers( self, root, deactivate=False, handling_proprietary_d
datatype_class_name = fields[ 1 ]
module = None
except Exception as e:
self.log.exception( 'Error determining datatype class or module for dtype %s: %s' % ( str( dtype ), str( e ) ) )
self.log.exception( 'Error determining datatype class or module for dtype %s', str( dtype ) )
ok = False
if ok:
if handling_proprietary_datatypes:
Expand All @@ -413,13 +413,13 @@ def load_datatype_sniffers( self, root, deactivate=False, handling_proprietary_d
for comp in datatype_module.split( '.' )[ 1: ]:
module = getattr( module, comp )
except Exception as e:
self.log.exception( "Error importing datatype class for '%s': %s" % ( str( dtype ), str( e ) ) )
self.log.exception( "Error importing datatype class for '%s'", str( dtype ) )
ok = False
if ok:
try:
aclass = getattr( module, datatype_class_name )()
except Exception as e:
self.log.exception( 'Error calling method %s from class %s: %s', str( datatype_class_name ), str( module ), str( e ) )
self.log.exception( 'Error calling method %s from class %s', str( datatype_class_name ), str( module ) )
ok = False
if ok:
if deactivate:
Expand Down Expand Up @@ -560,11 +560,11 @@ def load_datatype_converters( self, toolbox, installed_repository_dict=None, dea
self.datatype_converters[ source_datatype ] = odict()
self.datatype_converters[ source_datatype ][ target_datatype ] = converter
self.log.debug( "Loaded converter: %s", converter.id )
except Exception as e:
except Exception:
if deactivate:
self.log.exception( "Error deactivating converter from (%s): %s" % ( converter_path, str( e ) ) )
self.log.exception( "Error deactivating converter from (%s)" % converter_path )
else:
self.log.exception( "Error loading converter (%s): %s" % ( converter_path, str( e ) ) )
self.log.exception( "Error loading converter (%s)" % converter_path )

def load_display_applications( self, app, installed_repository_dict=None, deactivate=False ):
"""
Expand Down Expand Up @@ -628,11 +628,11 @@ def load_display_applications( self, app, installed_repository_dict=None, deacti
if inherit and ( self.datatypes_by_extension[ extension ], display_app ) not in self.inherit_display_application_by_class:
self.inherit_display_application_by_class.append( ( self.datatypes_by_extension[ extension ], display_app ) )
self.log.debug( "Loaded display application '%s' for datatype '%s', inherit=%s." % ( display_app.id, extension, inherit ) )
except Exception as e:
except Exception:
if deactivate:
self.log.exception( "Error deactivating display application (%s): %s" % ( config_path, str( e ) ) )
self.log.exception( "Error deactivating display application (%s)" % config_path )
else:
self.log.exception( "Error loading display application (%s): %s" % ( config_path, str( e ) ) )
self.log.exception( "Error loading display application (%s)" % config_path )
# Handle display_application subclass inheritance.
for extension, d_type1 in self.datatypes_by_extension.iteritems():
for d_type2, display_app in self.inherit_display_application_by_class:
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/datatypes/tabular.py
Expand Up @@ -167,7 +167,7 @@ def make_html_peek_header( self, dataset, skipchars=None, column_names=None, col
out.append( '</th>' )
out.append( '</tr>' )
except Exception as exc:
log.exception( 'make_html_peek_header failed on HDA %s' % dataset.id )
log.exception( 'make_html_peek_header failed on HDA %s', dataset.id )
raise Exception( "Can't create peek header %s" % str( exc ) )
return "".join( out )

Expand Down Expand Up @@ -198,7 +198,7 @@ def make_html_peek_rows( self, dataset, skipchars=None, **kwargs ):
out.append( '<td>%s</td>' % escape( elem ) )
out.append( '</tr>' )
except Exception as exc:
log.exception( 'make_html_peek_rows failed on HDA %s' % dataset.id )
log.exception( 'make_html_peek_rows failed on HDA %s', dataset.id )
raise Exception( "Can't create peek rows %s" % str( exc ) )
return "".join( out )

Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/datatypes/text.py
Expand Up @@ -150,7 +150,7 @@ def _display_data_trusted(self, trans, dataset, preview=False, filename=None, to
ofilename = '%s.html' % ofilename
except:
ofilename = dataset.file_name
log.exception( 'Command "%s" failed. Could not convert the Jupyter Notebook to HTML, defaulting to plain text.' % cmd )
log.exception( 'Command "%s" failed. Could not convert the Jupyter Notebook to HTML, defaulting to plain text.', cmd )
return open( ofilename )

def set_meta( self, dataset, **kwd ):
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/jobs/__init__.py
Expand Up @@ -631,8 +631,8 @@ def get_job_runner_plugins(self, handler_id):
try:
rval[id] = runner_class( self.app, runner[ 'workers' ], **runner.get( 'kwds', {} ) )
except TypeError:
log.exception( "Job runner '%s:%s' has not been converted to a new-style runner or encountered TypeError on load"
% ( module_name, class_name ) )
log.exception( "Job runner '%s:%s' has not been converted to a new-style runner or encountered TypeError on load",
module_name, class_name )
rval[id] = runner_class( self.app )
log.debug( "Loaded job runner '%s:%s' as '%s'" % ( module_name, class_name, id ) )
return rval
Expand Down Expand Up @@ -1443,7 +1443,7 @@ def cleanup( self, delete_files=True ):
if delete_files:
self.app.object_store.delete(self.get_job(), base_dir='job_work', entire_dir=True, dir_only=True, obj_dir=True)
except:
log.exception( "Unable to cleanup job %d" % self.job_id )
log.exception( "Unable to cleanup job %d", self.job_id )

def _collect_extra_files(self, dataset, job_working_directory):
temp_file_path = os.path.join( job_working_directory, "dataset_%s_files" % ( dataset.id ) )
Expand Down
10 changes: 5 additions & 5 deletions lib/galaxy/jobs/deferred/__init__.py
Expand Up @@ -40,7 +40,7 @@ def _load_plugins( self ):
try:
module = __import__( module_name )
except:
log.exception( 'Deferred job plugin appears to exist but is not loadable: %s' % module_name )
log.exception( 'Deferred job plugin appears to exist but is not loadable: %s', module_name )
continue
for comp in module_name.split( "." )[1:]:
module = getattr( module, comp )
Expand Down Expand Up @@ -105,16 +105,16 @@ def __monitor_step( self ):
if job.is_check_time:
try:
job_state = self.plugins[job.plugin].check_job( job )
except Exception as e:
except Exception:
self.__fail_job( job )
log.exception( 'Set deferred job %s to error because of an exception in check_job(): %s' % ( job.id, str( e ) ) )
log.exception( 'Set deferred job %s to error because of an exception in check_job()' % job.id )
continue
if job_state == self.job_states.READY:
try:
self.plugins[job.plugin].run_job( job )
except Exception as e:
except Exception:
self.__fail_job( job )
log.exception( 'Set deferred job %s to error because of an exception in run_job(): %s' % ( job.id, str( e ) ) )
log.exception( 'Set deferred job %s to error because of an exception in run_job()' % job.id )
continue
elif job_state == self.job_states.INVALID:
self.__fail_job( job )
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/jobs/handler.py
Expand Up @@ -309,7 +309,7 @@ def __monitor_step( self ):
log.error( "(%d) Job in unknown state '%s'" % ( job.id, job_state ) )
new_waiting_jobs.append( job.id )
except Exception:
log.exception( "failure running job %d" % job.id )
log.exception( "failure running job %d", job.id )
# Update the waiting list
if not self.track_jobs_in_database:
self.waiting_jobs = new_waiting_jobs
Expand Down Expand Up @@ -784,8 +784,8 @@ def url_to_destination( self, url ):
runner_name = url.split(':', 1)[0]
try:
return self.job_runners[runner_name].url_to_destination(url)
except Exception as e:
log.exception("Unable to convert legacy job runner URL '%s' to job destination, destination will be the '%s' runner with no params: %s" % (url, runner_name, e))
except Exception:
log.exception("Unable to convert legacy job runner URL '%s' to job destination, destination will be the '%s' runner with no params", url, runner_name)
return JobDestination(runner=runner_name)

def put( self, job_wrapper ):
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/jobs/metrics/__init__.py
Expand Up @@ -94,7 +94,7 @@ def pre_execute_commands(self, job_directory):
if plugin_commands:
commands.extend(util.listify(plugin_commands))
except Exception:
log.exception("Failed to generate pre-execute commands for plugin %s" % plugin)
log.exception("Failed to generate pre-execute commands for plugin %s", plugin)
return "\n".join([ c for c in commands if c ])

def post_execute_commands(self, job_directory):
Expand All @@ -105,7 +105,7 @@ def post_execute_commands(self, job_directory):
if plugin_commands:
commands.extend(util.listify(plugin_commands))
except Exception:
log.exception("Failed to generate post-execute commands for plugin %s" % plugin)
log.exception("Failed to generate post-execute commands for plugin %s", plugin)
return "\n".join([ c for c in commands if c ])

def collect_properties(self, job_id, job_directory):
Expand All @@ -116,7 +116,7 @@ def collect_properties(self, job_id, job_directory):
if properties:
per_plugin_properites[ plugin.plugin_type ] = properties
except Exception:
log.exception("Failed to collect job properties for plugin %s" % plugin)
log.exception("Failed to collect job properties for plugin %s", plugin)
return per_plugin_properites

def __plugins_from_source(self, plugins_source):
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/jobs/runners/__init__.py
Expand Up @@ -247,7 +247,7 @@ def get_work_dir_outputs( self, job_wrapper, job_working_directory=None, tool_wo
output_pairs.append( ( source_file, destination ) )
else:
# Security violation.
log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, job_wrapper.working_directory ) )
log.exception( "from_work_dir specified a location not in the working directory: %s, %s", source_file, job_wrapper.working_directory )
return output_pairs

def _walk_dataset_outputs( self, job ):
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/jobs/runners/local.py
Expand Up @@ -120,7 +120,7 @@ def queue_job( self, job_wrapper ):
stderr_file.close()
log.debug('execution finished: %s' % command_line)
except Exception:
log.exception("failure running job %d" % job_wrapper.job_id)
log.exception("failure running job %d", job_wrapper.job_id)
self._fail_job_local(job_wrapper, "failure running job")
return

Expand Down

0 comments on commit 88a3ee1

Please sign in to comment.