Skip to content

Commit

Permalink
Merge pull request #2420 from nsoranzo/log_warn
Browse files Browse the repository at this point in the history
Python3: use log.warning() instead of deprecated log.warn()
  • Loading branch information
martenson committed May 26, 2016
2 parents 1453dbd + 03a92ab commit 35da049
Show file tree
Hide file tree
Showing 38 changed files with 68 additions and 68 deletions.
4 changes: 2 additions & 2 deletions lib/galaxy/auth/providers/ldap_ad.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def authenticate(self, email, username, password, options):

# parse results
if suser is None or len(suser) == 0:
log.warn('LDAP authenticate: search returned no results')
log.warning('LDAP authenticate: search returned no results')
return (failure_mode, '', '')
dn, attrs = suser[0]
log.debug(("LDAP authenticate: dn is %s" % dn))
Expand Down Expand Up @@ -169,7 +169,7 @@ def authenticate(self, email, username, password, options):
if whoami is None:
raise RuntimeError('LDAP authenticate: anonymous bind')
except Exception:
log.warn('LDAP authenticate: bind exception', exc_info=True)
log.warning('LDAP authenticate: bind exception', exc_info=True)
return (failure_mode, '', '')

log.debug('LDAP authentication successful')
Expand Down
24 changes: 12 additions & 12 deletions lib/galaxy/datatypes/binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -509,7 +509,7 @@ def get_cram_version( self, filename):
header = fh.read(6)
return ord( header[4] ), ord( header[5] )
except Exception as exc:
log.warn( '%s, get_cram_version Exception: %s', self, exc )
log.warning( '%s, get_cram_version Exception: %s', self, exc )
return -1, -1

def set_index_file(self, dataset, index_file):
Expand All @@ -530,10 +530,10 @@ def set_index_file(self, dataset, index_file):
return index_file.file_name
else:
os.unlink( dataset_symlink )
log.warn( '%s, expected crai index not created for: %s', self, dataset.file_name )
log.warning( '%s, expected crai index not created for: %s', self, dataset.file_name )
return False
except Exception as exc:
log.warn( '%s, set_index_file Exception: %s', self, exc )
log.warning( '%s, set_index_file Exception: %s', self, exc )
return False

def set_peek( self, dataset, is_multi_byte=False ):
Expand Down Expand Up @@ -821,18 +821,18 @@ def set_meta( self, dataset, overwrite=True, **kwd ):
cols = [col[0] for col in cur.description]
columns[table] = cols
except Exception as exc:
log.warn( '%s, set_meta Exception: %s', self, exc )
log.warning( '%s, set_meta Exception: %s', self, exc )
for table in tables:
try:
row_query = "SELECT count(*) FROM %s" % table
rowcounts[table] = c.execute(row_query).fetchone()[0]
except Exception as exc:
log.warn( '%s, set_meta Exception: %s', self, exc )
log.warning( '%s, set_meta Exception: %s', self, exc )
dataset.metadata.tables = tables
dataset.metadata.table_columns = columns
dataset.metadata.table_row_count = rowcounts
except Exception as exc:
log.warn( '%s, set_meta Exception: %s', self, exc )
log.warning( '%s, set_meta Exception: %s', self, exc )

def sniff( self, filename ):
# The first 16 bytes of any SQLite3 database file is 'SQLite format 3\0', and the file is binary. For details
Expand Down Expand Up @@ -903,7 +903,7 @@ def set_meta( self, dataset, overwrite=True, **kwd ):
dataset.metadata.gemini_version = version
# TODO: Can/should we detect even more attributes, such as use of PED file, what was input annotation type, etc.
except Exception as e:
log.warn( '%s, set_meta Exception: %s', self, e )
log.warning( '%s, set_meta Exception: %s', self, e )

def sniff( self, filename ):
if super( GeminiSQLite, self ).sniff( filename ):
Expand All @@ -920,7 +920,7 @@ def sniff( self, filename ):
return False
return True
except Exception as e:
log.warn( '%s, sniff Exception: %s', self, e )
log.warning( '%s, sniff Exception: %s', self, e )
return False

def set_peek( self, dataset, is_multi_byte=False ):
Expand Down Expand Up @@ -959,7 +959,7 @@ def sniff( self, filename ):
return False
return True
except Exception as e:
log.warn( '%s, sniff Exception: %s', self, e )
log.warning( '%s, sniff Exception: %s', self, e )
return False


Expand Down Expand Up @@ -994,7 +994,7 @@ def sniff( self, filename ):
return False
return True
except Exception as e:
log.warn( '%s, sniff Exception: %s', self, e )
log.warning( '%s, sniff Exception: %s', self, e )
return False

def set_peek( self, dataset, is_multi_byte=False ):
Expand Down Expand Up @@ -1274,7 +1274,7 @@ def set_meta( self, dataset, overwrite=True, **kwd ):
fh.close()
tempzip.close()
except Exception as e:
log.warn( '%s, set_meta Exception: %s', self, e )
log.warning( '%s, set_meta Exception: %s', self, e )

def sniff( self, filename ):
try:
Expand All @@ -1284,7 +1284,7 @@ def sniff( self, filename ):
tempzip.close()
return is_searchgui
except Exception as e:
log.warn( '%s, sniff Exception: %s', self, e )
log.warning( '%s, sniff Exception: %s', self, e )
return False

def set_peek( self, dataset, is_multi_byte=False ):
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/datatypes/mothur.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,7 @@ def set_meta(self, dataset, overwrite=True, skip=0, **kwd):
dataset.metadata.sequence_count = int(''.join(line)) # seq count sometimes preceded by tab
break
except Exception as e:
log.warn("DistanceMatrix set_meta %s" % e)
log.warning("DistanceMatrix set_meta %s" % e)


class LowerTriangleDistanceMatrix(DistanceMatrix):
Expand Down Expand Up @@ -902,7 +902,7 @@ def set_meta(self, dataset, overwrite=True, skip=1, max_data_lines=None, **kwd):
flow_values = int(headers[0][0])
dataset.metadata.flow_values = flow_values
except Exception as e:
log.warn("SffFlow set_meta %s" % e)
log.warning("SffFlow set_meta %s" % e)

def make_html_table(self, dataset, skipchars=[]):
"""Create HTML table, used for displaying peek"""
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/datatypes/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,11 +129,11 @@ def __import_module( full_path, datatype_module, datatype_class_name ):
make_subclass = galaxy.util.string_as_bool( elem.get( 'subclass', False ) )
edam_format = elem.get( 'edam_format', None )
if edam_format and not make_subclass:
self.log.warn("Cannot specify edam_format without setting subclass to True, skipping datatype.")
self.log.warning("Cannot specify edam_format without setting subclass to True, skipping datatype.")
continue
edam_data = elem.get( 'edam_data', None )
if edam_data and not make_subclass:
self.log.warn("Cannot specify edam_data without setting subclass to True, skipping datatype.")
self.log.warning("Cannot specify edam_data without setting subclass to True, skipping datatype.")
continue
# Proprietary datatypes included in installed tool shed repositories will include two special attributes
# (proprietary_path and proprietary_datatype_module) if they depend on proprietary datatypes classes.
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/datatypes/text.py
Original file line number Diff line number Diff line change
Expand Up @@ -528,14 +528,14 @@ def set_meta( self, dataset, overwrite=True, **kwd ):
headers = lines[0].split('\t')
dataset.metadata.annotation = headers[4:]
except Exception as e:
log.warn("set_meta fname: %s %s" % (fname, str(e)))
log.warning("set_meta fname: %s %s" % (fname, str(e)))
finally:
fh.close()
if fname.endswith('.tbi'):
dataset.metadata.index = fname
self.regenerate_primary_file(dataset)
except Exception as e:
log.warn("set_meta fname: %s %s" % (dataset.file_name if dataset and dataset.file_name else 'Unkwown', str(e)))
log.warning("set_meta fname: %s %s" % (dataset.file_name if dataset and dataset.file_name else 'Unkwown', str(e)))

def set_peek( self, dataset, is_multi_byte=False ):
if not dataset.dataset.purged:
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/jobs/metrics/collectl/processes.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def get_statistics( self ):
if column == "AccumT":
# Only thing that makes sense is sum
if statistic_type != "max":
log.warn( "Only statistic max makes sense for AccumT" )
log.warning( "Only statistic max makes sense for AccumT" )
continue

value = sum( [ v.max for v in self.process_accum_statistics.itervalues() ] )
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/jobs/runners/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def queue_job( self, job_wrapper ):
try:
exit_code = int( open( exit_code_path, 'r' ).read() )
except Exception:
log.warn( "Failed to read exit code from path %s" % exit_code_path )
log.warning( "Failed to read exit code from path %s" % exit_code_path )
pass
stdout_file.seek( 0 )
stderr_file.seek( 0 )
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/jobs/runners/pulsar.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,7 @@ def _populate_parameter_defaults( self, job_destination ):
for key, value in self.destination_defaults.iteritems():
if key in params:
if value is PARAMETER_SPECIFICATION_IGNORED:
log.warn( "Pulsar runner in selected configuration ignores parameter %s" % key )
log.warning( "Pulsar runner in selected configuration ignores parameter %s" % key )
continue
# if self.runner_params.get( key, None ):
# # Let plugin define defaults for some parameters -
Expand Down Expand Up @@ -668,7 +668,7 @@ def __build_metadata_configuration(self, client, job_wrapper, remote_metadata, r
if PulsarJobRunner.__use_remote_datatypes_conf( client ):
remote_datatypes_config = remote_system_properties.get('galaxy_datatypes_config_file', None)
if not remote_datatypes_config:
log.warn(NO_REMOTE_DATATYPES_CONFIG)
log.warning(NO_REMOTE_DATATYPES_CONFIG)
remote_datatypes_config = os.path.join(remote_galaxy_home, 'datatypes_conf.xml')
metadata_kwds['datatypes_config'] = remote_datatypes_config
else:
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/managers/citations.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def parse_citation( elem, directory, citation_manager ):
citation_type = elem.attrib.get( 'type', None )
citation_class = CITATION_CLASSES.get( citation_type, None )
if not citation_class:
log.warn("Unknown or unspecified citation type: %s" % citation_type)
log.warning("Unknown or unspecified citation type: %s" % citation_type)
return None
return citation_class( elem, directory, citation_manager )

Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/managers/tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def apply_item_tag( self, user, item, name, value=None ):
# Create tag; if None, skip the tag (and log error).
tag = self._get_or_create_tag( lc_name )
if not tag:
log.warn( "Failed to create tag with name %s" % lc_name )
log.warning( "Failed to create tag with name %s" % lc_name )
return
# Create tag association based on item class.
item_tag_assoc_class = self.get_tag_assoc_class( item.__class__ )
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def upgrade(migrate_engine):
try:
table.create()
except:
log.warn( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )
log.warning( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )


def downgrade(migrate_engine):
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/model/migrate/versions/0046_post_job_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def upgrade(migrate_engine):
try:
table.create()
except:
log.warn( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )
log.warning( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )


def downgrade(migrate_engine):
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/model/migrate/versions/0056_workflow_outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def upgrade(migrate_engine):
try:
table.create()
except:
log.warn( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )
log.warning( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )


def downgrade(migrate_engine):
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/model/migrate/versions/0061_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def upgrade(migrate_engine):
try:
table.create()
except:
log.warn( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )
log.warning( "Failed to create table '%s', ignoring (might result in wrong schema)" % table.name )


def downgrade(migrate_engine):
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,7 +428,7 @@ def parse( self, tool_source, guid=None ):
if self.profile >= 16.04 and VERSION_MAJOR < self.profile:
template = "The tool %s targets version %s of Galaxy, you should upgrade Galaxy to ensure proper functioning of this tool."
message = template % (self.id, self.profile)
log.warn(message)
log.warning(message)

# Get the (user visible) name of the tool
self.name = tool_source.parse_name()
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/tools/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ def configure_and_load( self, config_element, tool_data_path, from_shed_config=F
self._update_version()
else:
self.missing_index_file = filename
log.warn( "Cannot find index file '%s' for tool data table '%s'" % ( filename, self.name ) )
log.warning( "Cannot find index file '%s' for tool data table '%s'" % ( filename, self.name ) )

if filename not in self.filenames or not self.filenames[ filename ][ 'found' ]:
self.filenames[ filename ] = dict( found=found, filename=filename, from_shed_config=from_shed_config, tool_data_path=tool_data_path,
Expand Down Expand Up @@ -461,7 +461,7 @@ def parse_file_fields( self, reader, errors=None, here="__HERE__" ):
line_error = "Line %i in tool data table '%s' is invalid (HINT: '%s' characters must be used to separate fields):\n%s" % ( ( i + 1 ), self.name, separator_char, line )
if errors is not None:
errors.append( line_error )
log.warn( line_error )
log.warning( line_error )
return rval

def get_column_name_list( self ):
Expand Down Expand Up @@ -590,7 +590,7 @@ def _remove_entry( self, values):
values = self._replace_field_separators( values )
self.filter_file_fields( filename, values )
else:
log.warn( "Cannot find index file '%s' for tool data table '%s'" % ( filename, self.name ) )
log.warning( "Cannot find index file '%s' for tool data table '%s'" % ( filename, self.name ) )

self.reload_from_files()

Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/tools/deps/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,9 @@ def __init__( self, default_base_path, conf_file=None, **extra_config ):
in `base_paths`. The default base path is app.config.tool_dependency_dir.
"""
if not os.path.exists( default_base_path ):
log.warn( "Path '%s' does not exist, ignoring", default_base_path )
log.warning( "Path '%s' does not exist, ignoring", default_base_path )
if not os.path.isdir( default_base_path ):
log.warn( "Path '%s' is not directory, ignoring", default_base_path )
log.warning( "Path '%s' is not directory, ignoring", default_base_path )
self.extra_config = extra_config
self.default_base_path = os.path.abspath( default_base_path )
self.resolver_classes = self.__resolvers_dict()
Expand All @@ -98,7 +98,7 @@ def dependency_shell_commands( self, requirements, **kwds ):
**kwds )
dependency_commands = dependency.shell_commands( requirement )
if not dependency_commands:
log.warn( "Failed to resolve dependency on '%s', ignoring", requirement.name )
log.warning( "Failed to resolve dependency on '%s', ignoring", requirement.name )
else:
commands.append( dependency_commands )
return commands
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/deps/resolvers/conda.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def resolve(self, name, version, type, **kwds):

job_directory = kwds.get("job_directory", None)
if job_directory is None:
log.warn("Conda dependency resolver not sent job directory.")
log.warning("Conda dependency resolver not sent job directory.")
return INDETERMINATE_DEPENDENCY

exact = not self.versionless or version is None
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/deps/resolvers/galaxy_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def exact(self):
def shell_commands( self, requirement ):
base_path = self.path
if self.script is None and base_path is None:
log.warn( "Failed to resolve dependency on '%s', ignoring", requirement.name )
log.warning( "Failed to resolve dependency on '%s', ignoring", requirement.name )
commands = None
elif requirement.type == 'package' and self.script is None:
commands = 'PACKAGE_BASE=%s; export PACKAGE_BASE; PATH="%s/bin:$PATH"; export PATH' % ( base_path, base_path )
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/deps/resolvers/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def __init__(self, module_dependency_resolver, modulepath, prefetch):
self.module_dependency_resolver = module_dependency_resolver
self.directories = modulepath.split(pathsep)
if prefetch:
log.warn("Created module dependency resolver with prefetch enabled, but directory module checker does not support this.")
log.warning("Created module dependency resolver with prefetch enabled, but directory module checker does not support this.")

def has_module(self, module, version):
has_module = False
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/tools/execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def record_success( self, job, outputs ):
def record_error( self, error ):
self.failed_jobs += 1
message = "There was a failure executing a job for tool [%s] - %s"
log.warn(message, self.tool.id, error)
log.warning(message, self.tool.id, error)
self.execution_errors.append( error )

def create_output_collections( self, trans, history, params ):
Expand Down Expand Up @@ -137,7 +137,7 @@ def create_output_collections( self, trans, history, params ):
if not len( structure ) == len( outputs ):
# Output does not have the same structure, if all jobs were
# successfully submitted this shouldn't have happened.
log.warn( "Problem matching up datasets while attempting to create implicit dataset collections")
log.warning( "Problem matching up datasets while attempting to create implicit dataset collections")
continue
output = self.tool.outputs[ output_name ]
element_identifiers = structure.element_identifiers_for_outputs( trans, outputs )
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/loader_directory.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

def load_exception_handler(path, exc_info):
"""Default exception handler for use by load_tool_elements_from_path."""
log.warn(LOAD_FAILURE_ERROR % path, exc_info=exc_info)
log.warning(LOAD_FAILURE_ERROR % path, exc_info=exc_info)


def find_possible_tools_from_path(
Expand Down
Loading

0 comments on commit 35da049

Please sign in to comment.