Skip to content

Commit

Permalink
Merge branch 'dev' into remove_samples_000
Browse files Browse the repository at this point in the history
  • Loading branch information
guerler committed Sep 21, 2017
2 parents ebbe5a1 + 98f2c10 commit 1494207
Show file tree
Hide file tree
Showing 50 changed files with 671 additions and 1,526 deletions.
2 changes: 2 additions & 0 deletions client/galaxy/style/less/upload.less
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,8 @@
resize: none;
background: inherit;
color: @black;
white-space: nowrap;
overflow: scroll;
}
.upload-text-info {
&:extend(.text-primary);
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -1019,7 +1019,7 @@ def _configure_tool_shed_registry(self):

# Set up the tool sheds registry
if os.path.isfile(self.config.tool_sheds_config_file):
self.tool_shed_registry = tool_shed.tool_shed_registry.Registry(self.config.root, self.config.tool_sheds_config_file)
self.tool_shed_registry = tool_shed.tool_shed_registry.Registry(self.config.tool_sheds_config_file)
else:
self.tool_shed_registry = None

Expand Down
5 changes: 5 additions & 0 deletions lib/galaxy/dependencies/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,11 @@ def check_azure_storage(self):
def check_kamaki(self):
return 'pithos' in self.object_stores

def check_watchdog(self):
install_set = {'auto', 'True', 'true', 'polling'}
return (self.config['watch_tools'] in install_set or
self.config['watch_tool_data_dir'] in install_set)


def optional(config_file):
rval = []
Expand Down
2 changes: 2 additions & 0 deletions lib/galaxy/dependencies/conditional-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,5 @@ chronos-python==0.38.0

# Synnefo / Pithos+ object store client
kamaki

watchdog
1 change: 1 addition & 0 deletions lib/galaxy/dependencies/dev-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ NoseHTML
twill==0.9.1
mock
selenium
watchdog

# For relase process
pygithub3
Expand Down
32 changes: 19 additions & 13 deletions lib/galaxy/managers/libraries.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,11 +108,17 @@ def list(self, trans, deleted=False):
:type deleted: boolean (optional)
:returns: query that will emit all accessible libraries
:rtype: sqlalchemy query
:rtype: sqlalchemy query
:returns: set of library ids that have restricted access (not public)
:rtype: set
"""
is_admin = trans.user_is_admin()
query = trans.sa_session.query(trans.app.model.Library)

library_access_action = trans.app.security_agent.permitted_actions.LIBRARY_ACCESS.action
restricted_library_ids = {lp.library_id for lp in (
trans.sa_session.query(trans.model.LibraryPermissions).filter(
trans.model.LibraryPermissions.table.c.action == library_access_action
).distinct())}
if is_admin:
if deleted is None:
# Flag is not specified, do not filter on it.
Expand All @@ -124,11 +130,6 @@ def list(self, trans, deleted=False):
else:
# Nonadmins can't see deleted libraries
current_user_role_ids = [role.id for role in trans.get_current_user_roles()]
library_access_action = trans.app.security_agent.permitted_actions.LIBRARY_ACCESS.action
restricted_library_ids = [lp.library_id for lp in (
trans.sa_session.query(trans.model.LibraryPermissions).filter(
trans.model.LibraryPermissions.table.c.action == library_access_action
).distinct())]
accessible_restricted_library_ids = [lp.library_id for lp in (
trans.sa_session.query(trans.model.LibraryPermissions).filter(
and_(
Expand All @@ -139,7 +140,7 @@ def list(self, trans, deleted=False):
not_(trans.model.Library.table.c.id.in_(restricted_library_ids)),
trans.model.Library.table.c.id.in_(accessible_restricted_library_ids)
))
return query
return query, restricted_library_ids

def secure(self, trans, library, check_accessible=True):
"""
Expand Down Expand Up @@ -171,22 +172,27 @@ def check_accessible(self, trans, library):
else:
return library

def get_library_dict(self, trans, library):
def get_library_dict(self, trans, library, restricted_library_ids=None):
"""
Return library data in the form of a dictionary.
:param library: library
:type library: galaxy.model.Library
:param library: library
:type library: galaxy.model.Library
:param restricted_library_ids: ids of restricted libraries to speed up the
detection of public libraries
:type restricted_library_ids: list of ints
:returns: dict with data about the library
:rtype: dictionary
"""
library_dict = library.to_dict(view='element', value_mapper={'id': trans.security.encode_id, 'root_folder_id': trans.security.encode_id})
if trans.app.security_agent.library_is_public(library, contents=False):
if restricted_library_ids and library.id in restricted_library_ids:
library_dict['public'] = False
else:
library_dict['public'] = True
library_dict['create_time_pretty'] = pretty_print_time_interval(library.create_time, precise=True)
current_user_roles = trans.get_current_user_roles()
if not trans.user_is_admin():
current_user_roles = trans.get_current_user_roles()
library_dict['can_user_add'] = trans.app.security_agent.can_add_library_item(current_user_roles, library)
library_dict['can_user_modify'] = trans.app.security_agent.can_modify_library_item(current_user_roles, library)
library_dict['can_user_manage'] = trans.app.security_agent.can_manage_library_item(current_user_roles, library)
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1441,7 +1441,7 @@ def disk_size(cls):
HistoryDatasetAssociation.table.c.dataset_id == Dataset.table.c.id)
distinct_datasets = (
select([
# use labels here to better accrss from the query above
# use labels here to better access from the query above
HistoryDatasetAssociation.table.c.history_id.label('history_id'),
Dataset.total_size.label('dataset_size'),
Dataset.id.label('dataset_id')
Expand Down
2 changes: 2 additions & 0 deletions lib/galaxy/queue_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,8 @@ def reload_data_managers(app, **kwargs):
app.data_managers._reload_count = reload_count + 1
if hasattr(app, 'tool_cache'):
app.tool_cache.reset_status()
if hasattr(app, 'watchers'):
app.watchers.update_watch_data_table_paths()
log.debug("Data managers reloaded %s", reload_timer)


Expand Down
2 changes: 2 additions & 0 deletions lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -720,6 +720,8 @@ def parse(self, tool_source, guid=None):
# Determine if this tool can be used in workflows
self.is_workflow_compatible = self.check_workflow_compatible(tool_source)
self.__parse_trackster_conf(tool_source)
# Record macro paths so we can reload a tool if any of its macro has changes
self._macro_paths = tool_source._macro_paths

def __parse_legacy_features(self, tool_source):
self.code_namespace = dict()
Expand Down
17 changes: 17 additions & 0 deletions lib/galaxy/tools/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ def __init__(self):
self._hash_by_tool_paths = {}
self._tools_by_path = {}
self._tool_paths_by_id = {}
self._macro_paths_by_id = {}
self._tool_ids_by_macro_paths = {}
self._mod_time_by_path = {}
self._new_tool_ids = set()
self._removed_tool_ids = set()
Expand Down Expand Up @@ -54,6 +56,11 @@ def _should_cleanup(self, config_filename):
if self._mod_time_by_path.get(config_filename) < new_mtime:
if md5_hash_file(config_filename) != self._hash_by_tool_paths.get(config_filename):
return True
tool = self._tools_by_path[config_filename]
for macro_path in tool._macro_paths:
new_mtime = os.path.getmtime(macro_path)
if self._mod_time_by_path.get(macro_path) < new_mtime:
return True
return False

def get_tool(self, config_filename):
Expand Down Expand Up @@ -82,6 +89,16 @@ def cache_tool(self, config_filename, tool):
self._tool_paths_by_id[tool_id] = config_filename
self._tools_by_path[config_filename] = tool
self._new_tool_ids.add(tool_id)
for macro_path in tool._macro_paths:
self._mod_time_by_path[macro_path] = os.path.getmtime(macro_path)
if tool_id not in self._macro_paths_by_id:
self._macro_paths_by_id[tool_id] = {macro_path}
else:
self._macro_paths_by_id[tool_id].add(macro_path)
if macro_path not in self._macro_paths_by_id:
self._tool_ids_by_macro_paths[macro_path] = {tool_id}
else:
self._tool_ids_by_macro_paths[macro_path].add(tool_id)

def reset_status(self):
"""Reset self._new_tool_ids and self._removed_tool_ids once
Expand Down
13 changes: 11 additions & 2 deletions lib/galaxy/tools/loader.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,19 @@
from galaxy.util.xml_macros import (
imported_macro_paths,
load,
raw_tool_xml_tree,
load_with_refereces,
raw_xml_tree,
template_macro_params,
)

load_tool = load
load_tool_with_refereces = load_with_refereces
raw_tool_xml_tree = raw_xml_tree

__all__ = ("load_tool", "raw_tool_xml_tree", "imported_macro_paths", "template_macro_params")
__all__ = (
"imported_macro_paths",
"load_tool",
"load_tool_with_refereces",
"raw_tool_xml_tree",
"template_macro_params",
)
51 changes: 31 additions & 20 deletions lib/galaxy/tools/parameters/dynamic_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,29 +493,16 @@ def load_from_parameter(from_parameter, transform_lines=None):
self.missing_index_file = None
dataset_file = elem.get('from_dataset', None)
from_parameter = elem.get('from_parameter', None)
tool_data_table_name = elem.get('from_data_table', None)
self.tool_data_table_name = elem.get('from_data_table', None)
# Options are defined from a data table loaded by the app
self.tool_data_table = None
self.missing_tool_data_table_name = None
if tool_data_table_name:
app = tool_param.tool.app
if tool_data_table_name in app.tool_data_tables:
self.tool_data_table = app.tool_data_tables[tool_data_table_name]
# Column definitions are optional, but if provided override those from the table
if elem.find("column") is not None:
self.parse_column_definitions(elem)
else:
self.columns = self.tool_data_table.columns
# Set self.missing_index_file if the index file to
# which the tool_data_table refers does not exist.
if self.tool_data_table.missing_index_file:
self.missing_index_file = self.tool_data_table.missing_index_file
else:
self.missing_tool_data_table_name = tool_data_table_name
log.warning("Data table named '%s' is required by tool but not configured" % tool_data_table_name)
self._tool_data_table = None
self.elem = elem
self.column_elem = elem.find("column")
self.tool_data_table # Need to touch tool data table once to populate self.columns

# Options are defined by parsing tabular text data from a data file
# on disk, a dataset, or the value of another parameter
elif data_file is not None or dataset_file is not None or from_parameter is not None:
if not self.tool_data_table_name and (data_file is not None or dataset_file is not None or from_parameter is not None):
self.parse_column_definitions(elem)
if data_file is not None:
data_file = data_file.strip()
Expand Down Expand Up @@ -545,6 +532,30 @@ def load_from_parameter(from_parameter, transform_lines=None):
if self.dataset_ref_name:
tool_param.data_ref = self.dataset_ref_name

@property
def tool_data_table(self):
if self.tool_data_table_name:
tool_data_table = self.tool_param.tool.app.tool_data_tables.get(self.tool_data_table_name, None)
if tool_data_table:
# Column definitions are optional, but if provided override those from the table
if self.column_elem is not None:
self.parse_column_definitions(self.elem)
else:
self.columns = tool_data_table.columns
# Set self.missing_index_file if the index file to
# which the tool_data_table refers does not exist.
if tool_data_table.missing_index_file:
self.missing_index_file = tool_data_table.missing_index_file
return tool_data_table
return None

@property
def missing_tool_data_table_name(self):
if not self.tool_data_table:
log.warning("Data table named '%s' is required by tool but not configured" % self.tool_data_table_name)
return self.tool_data_table_name
return None

def parse_column_definitions(self, elem):
for column_elem in elem.findall('column'):
name = column_elem.get('name', None)
Expand Down
12 changes: 6 additions & 6 deletions lib/galaxy/tools/parser/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import yaml

from galaxy.tools.loader import load_tool as load_tool_xml
from galaxy.tools.loader import load_tool_with_refereces
from galaxy.util.odict import odict

from .cwl import CwlToolSource
Expand All @@ -22,7 +22,7 @@ def get_tool_source(config_file=None, xml_tree=None, enable_beta_formats=True, t
"""Return a ToolSource object corresponding to supplied source.
The supplied source may be specified as a file path (using the config_file
parameter) or as an XML object loaded with load_tool_xml.
parameter) or as an XML object loaded with load_tool_with_refereces.
"""
if xml_tree is not None:
return XmlToolSource(xml_tree, source_path=config_file)
Expand All @@ -34,8 +34,8 @@ def get_tool_source(config_file=None, xml_tree=None, enable_beta_formats=True, t

config_file = tool_location_fetcher.to_tool_path(config_file)
if not enable_beta_formats:
tree = load_tool_xml(config_file)
return XmlToolSource(tree, source_path=config_file)
tree, macro_paths = load_tool_with_refereces(config_file)
return XmlToolSource(tree, source_path=config_file, macro_paths=macro_paths)

if config_file.endswith(".yml"):
log.info("Loading tool from YAML - this is experimental - tool will not function in future.")
Expand All @@ -46,8 +46,8 @@ def get_tool_source(config_file=None, xml_tree=None, enable_beta_formats=True, t
log.info("Loading CWL tool - this is experimental - tool likely will not function in future at least in same way.")
return CwlToolSource(config_file)
else:
tree = load_tool_xml(config_file)
return XmlToolSource(tree, source_path=config_file)
tree, macro_paths = load_tool_with_refereces(config_file)
return XmlToolSource(tree, source_path=config_file, macro_paths=macro_paths)


def ordered_load(stream):
Expand Down
3 changes: 2 additions & 1 deletion lib/galaxy/tools/parser/xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,11 @@ class XmlToolSource(ToolSource):
""" Responsible for parsing a tool from classic Galaxy representation.
"""

def __init__(self, xml_tree, source_path=None):
def __init__(self, xml_tree, source_path=None, macro_paths=None):
self.xml_tree = xml_tree
self.root = xml_tree.getroot()
self._source_path = source_path
self._macro_paths = macro_paths or []
self.legacy_defaults = self.parse_profile() == "16.01"

def parse_version(self):
Expand Down
1 change: 1 addition & 0 deletions lib/galaxy/tools/parser/yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ class YamlToolSource(ToolSource):
def __init__(self, root_dict, source_path=None):
self.root_dict = root_dict
self._source_path = source_path
self._macro_paths = []

def parse_id(self):
return self.root_dict.get("id")
Expand Down
11 changes: 8 additions & 3 deletions lib/galaxy/tools/toolbox/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,11 @@ def __init__(self, config_filenames, tool_root_dir, app):
self.app = app
if hasattr(self.app, 'watchers'):
self._tool_watcher = self.app.watchers.tool_watcher
self._tool_config_watcher = self.app.watchers.tool_config_watcher
else:
# Toolbox is loaded but not used during toolshed tests
self._tool_watcher = None
self._tool_config_watcher = None
self._filter_factory = FilterFactory(self)
self._tool_tag_manager = tool_tag_manager(app)
self._init_tools_from_configs(config_filenames)
Expand Down Expand Up @@ -741,14 +743,17 @@ def load_tool(self, config_file, guid=None, repository_id=None, use_cached=False
tool = None
if use_cached:
tool = self.load_tool_from_cache(config_file)
if not tool:
if not tool or guid and guid != tool.guid:
tool = self.create_tool(config_file=config_file, repository_id=repository_id, guid=guid, **kwds)
if tool.tool_shed_repository or not guid:
self.add_tool_to_cache(tool, config_file)
if not tool.id.startswith("__") and self._tool_watcher:
if not tool.id.startswith("__"):
# do not monitor special tools written to tmp directory - no reason
# to monitor such a large directory.
self._tool_watcher.watch_file(config_file, tool.id)
if self._tool_watcher:
self._tool_watcher.watch_file(config_file, tool.id)
if self._tool_config_watcher:
[self._tool_config_watcher.watch_file(macro_path) for macro_path in tool._macro_paths]
return tool

def add_tool_to_cache(self, tool, config_file):
Expand Down

0 comments on commit 1494207

Please sign in to comment.