Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/release_18.01' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
natefoo committed Feb 7, 2018
2 parents 12f7241 + 62540a9 commit 65e2e0b
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 45 deletions.
67 changes: 25 additions & 42 deletions lib/galaxy/tools/deps/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,42 +109,38 @@ def get_app_option(self, key, default=None):

def dependency_shell_commands(self, requirements, **kwds):
requirements_to_dependencies = self.requirements_to_dependencies(requirements, **kwds)
shell_commands = []
for dependencies in requirements_to_dependencies:
ordered_dependencies = OrderedSet(dependencies.values())
try:
shell_commands = [dependency.shell_commands() for dependency in ordered_dependencies]
if 'tool_instance' in kwds:
# We log the dependencies on the tool instance,
# which subsequently is used to log the used dependencies for this job.
kwds['tool_instance'].dependencies = [dep.to_dict() for dep in ordered_dependencies]
except Exception as e:
log.exception(e)
return shell_commands
ordered_dependencies = OrderedSet(requirements_to_dependencies.values())
return [dependency.shell_commands() for dependency in ordered_dependencies]

def requirements_to_dependencies(self, requirements, **kwds):
"""
Takes a list of requirements and returns a list of dictionaries
with requirements as key and dependencies as value.
Each dictionary corresponds to one dependency resolver.
Takes a list of requirements and returns a dictionary
with requirements as key and dependencies as value caching
these on the tool instance if supplied.
"""
resolver_requirements = []
requirement_to_dependency = self._requirements_to_dependencies_dict(requirements, **kwds)

if 'tool_instance' in kwds:
kwds['tool_instance'].dependencies = [dep.to_dict() for dep in requirement_to_dependency.values()]

return requirement_to_dependency

def _requirements_to_dependencies_dict(self, requirements, **kwds):
"""Build simple requirements to dependencies dict for resolution."""
requirement_to_dependency = OrderedDict()
index = kwds.get('index', None)
require_exact = kwds.get('exact', False)
return_null_dependencies = kwds.get('return_null', False)

resolvable_requirements = requirements.resolvable

for i, resolver in enumerate(self.dependency_resolvers):
requirement_to_dependency = OrderedDict()

if index is not None and i != index:
continue

if len(requirement_to_dependency) == len(resolvable_requirements):
# Shortcut - resolution complete.
resolver_requirements.append(requirement_to_dependency)
continue
break

# Check requirements all at once
all_unmet = len(requirement_to_dependency) == 0
Expand All @@ -157,12 +153,8 @@ def requirements_to_dependencies(self, requirements, **kwds):
log.debug(dependency.resolver_msg)
requirement_to_dependency[requirement] = dependency

# Shortcut - resolution complete for this resolver.
resolver_requirements.append(requirement_to_dependency)
# We have a complete set of dependencies, don't install
# via lower-ranked resolvers
kwds['install'] = False
continue
# Shortcut - resolution complete.
break

# Check individual requirements
for requirement in resolvable_requirements:
Expand All @@ -179,27 +171,18 @@ def requirements_to_dependencies(self, requirements, **kwds):
elif return_null_dependencies and (resolver == self.dependency_resolvers[-1] or i == index):
log.debug(dependency.resolver_msg)
requirement_to_dependency[requirement] = dependency
if requirement_to_dependency:
resolver_requirements.append(requirement_to_dependency)
if (len(requirement_to_dependency) == len(resolvable_requirements) and
all(True for d in requirement_to_dependency if not isinstance(d, NullDependency))):
# We resolved all individual requirements, no need to further install dependencies
# resolved by subsequent resolvers.
kwds['install'] = False
if not resolver_requirements:
# Have at least a single empty dict
resolver_requirements.append(OrderedDict())
return resolver_requirements

return requirement_to_dependency

def uses_tool_shed_dependencies(self):
return any(map(lambda r: isinstance(r, ToolShedPackageDependencyResolver), self.dependency_resolvers))

def find_dep(self, name, version=None, type='package', **kwds):
log.debug('Find dependency %s version %s' % (name, version))
requirements = ToolRequirements([ToolRequirement(name=name, version=version, type=type)])
dependencies = self.requirements_to_dependencies(requirements, **kwds)[0]
if len(dependencies) > 0:
return dependencies.values()[0]
dep_dict = self._requirements_to_dependencies_dict(requirements, **kwds)
if len(dep_dict) > 0:
return dep_dict.values()[0]
else:
return NullDependency(name=name, version=version)

Expand Down Expand Up @@ -238,7 +221,7 @@ def __init__(self, default_base_path, conf_file=None, app_config={}, tool_depend
self.tool_dependency_cache_dir = self.get_app_option("tool_dependency_cache_dir")

def build_cache(self, requirements, **kwds):
resolved_dependencies = self.requirements_to_dependencies(requirements, **kwds)[0]
resolved_dependencies = self.requirements_to_dependencies(requirements, **kwds)
cacheable_dependencies = [dep for dep in resolved_dependencies.values() if dep.cacheable]
hashed_dependencies_dir = self.get_hashed_dependencies_path(cacheable_dependencies)
if os.path.exists(hashed_dependencies_dir):
Expand All @@ -261,7 +244,7 @@ def dependency_shell_commands(self, requirements, **kwds):
If cached environment exists or is successfully created, will generate
commands to activate it.
"""
resolved_dependencies = self.requirements_to_dependencies(requirements, **kwds)[0]
resolved_dependencies = self.requirements_to_dependencies(requirements, **kwds)
cacheable_dependencies = [dep for dep in resolved_dependencies.values() if dep.cacheable]
hashed_dependencies_dir = self.get_hashed_dependencies_path(cacheable_dependencies)
if not os.path.exists(hashed_dependencies_dir) and self.get_app_option("precache_dependencies", False):
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/tools/deps/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def show_dependencies(self, tool_requirements_d, installed_tool_dependencies=Non
kwds = {'install': False,
'return_null': True,
'installed_tool_dependencies': installed_tool_dependencies}
dependencies_per_tool = {tool: self._dependency_manager.requirements_to_dependencies(requirements, **kwds)[0] for tool, requirements in tool_requirements_d.items()}
dependencies_per_tool = {tool: self._dependency_manager.requirements_to_dependencies(requirements, **kwds) for tool, requirements in tool_requirements_d.items()}
return dependencies_per_tool

def uninstall_dependencies(self, index=None, **payload):
Expand Down Expand Up @@ -101,7 +101,7 @@ def remove_unused_dependency_paths(self, envs):
return list(removed_environments)

def install_dependencies(self, requirements):
return self._dependency_manager.requirements_to_dependencies(requirements, **{'install': True})
return self._dependency_manager._requirements_to_dependencies_dict(requirements, **{'install': True})

def install_dependency(self, index=None, **payload):
"""
Expand Down
2 changes: 2 additions & 0 deletions lib/galaxy/tools/parser/xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -857,6 +857,8 @@ def parse_error_level(self, err_level):
return_level = StdioErrorLevel.LOG
elif (re.search("warning", err_level, re.IGNORECASE)):
return_level = StdioErrorLevel.WARNING
elif (re.search("fatal_oom", err_level, re.IGNORECASE)):
return_level = StdioErrorLevel.FATAL_OOM
elif (re.search("fatal", err_level, re.IGNORECASE)):
return_level = StdioErrorLevel.FATAL
else:
Expand Down
2 changes: 1 addition & 1 deletion static/style/blue/base.css

Large diffs are not rendered by default.

0 comments on commit 65e2e0b

Please sign in to comment.