Skip to content

Commit

Permalink
Merge branch 'release_18.01' into release_18.05
Browse files Browse the repository at this point in the history
  • Loading branch information
martenson committed May 7, 2018
2 parents 400bcf1 + 8255d1c commit 0cfac51
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 26 deletions.
2 changes: 1 addition & 1 deletion lib/galaxy/tools/__init__.py
Expand Up @@ -1871,6 +1871,7 @@ def to_json(self, trans, kwd={}, job=None, workflow_building_mode=False):
if self.input_translator:
self.input_translator.translate(params)

set_dataset_matcher_factory(request_context, self)
# create tool state
state_inputs = {}
state_errors = {}
Expand All @@ -1879,7 +1880,6 @@ def to_json(self, trans, kwd={}, job=None, workflow_building_mode=False):
# create tool model
tool_model = self.to_dict(request_context)
tool_model['inputs'] = []
set_dataset_matcher_factory(request_context, self, state_inputs)
self.populate_model(request_context, self.inputs, state_inputs, tool_model['inputs'])
unset_dataset_matcher_factory(request_context)

Expand Down
31 changes: 17 additions & 14 deletions lib/galaxy/tools/parameters/dataset_matcher.py
Expand Up @@ -5,8 +5,8 @@
log = getLogger(__name__)


def set_dataset_matcher_factory(trans, tool, param_values):
trans.dataset_matcher_factory = DatasetMatcherFactory(trans, tool, param_values)
def set_dataset_matcher_factory(trans, tool):
trans.dataset_matcher_factory = DatasetMatcherFactory(trans, tool)


def unset_dataset_matcher_factory(trans):
Expand All @@ -21,7 +21,7 @@ def get_dataset_matcher_factory(trans):
class DatasetMatcherFactory(object):
""""""

def __init__(self, trans, tool=None, param_values=None):
def __init__(self, trans, tool=None):
self._trans = trans
self._tool = tool
self._data_inputs = []
Expand All @@ -32,8 +32,9 @@ def __init__(self, trans, tool=None, param_values=None):
valid_input_states = galaxy.model.Dataset.valid_input_states
self.valid_input_states = valid_input_states
can_process_summary = False
if tool is not None and param_values is not None:
self._collect_data_inputs(tool, param_values)
if tool is not None:
for input in tool.inputs.values():
self._collect_data_inputs(input)

require_public = self._tool and self._tool.tool_type == 'data_destination'
if not require_public and self._data_inputs:
Expand Down Expand Up @@ -62,15 +63,17 @@ def matches_format(self, hda_extension, format):

return formats[format]

def _collect_data_inputs(self, tool, param_values):
def visitor(input, value, prefix, parent=None, **kwargs):
type_name = type(input).__name__
if "DataToolParameter" in type_name:
self._data_inputs.append(input)
elif "DataCollectionToolParameter" in type_name:
self._data_inputs.append(input)

tool.visit_inputs(param_values, visitor)
def _collect_data_inputs(self, input):
type_name = input.type
if type_name == "repeat" or type_name == "upload_dataset" or type_name == "section":
for child_input in input.inputs.values():
self._collect_data_inputs(child_input)
elif type_name == "conditional":
for case in input.cases:
for child_input in case.inputs.values():
self._collect_data_inputs(child_input)
elif type_name == "data" or type_name == "data_collection":
self._data_inputs.append(input)

def dataset_matcher(self, param, other_values):
return DatasetMatcher(self, self._trans, param, other_values)
Expand Down
3 changes: 2 additions & 1 deletion lib/galaxy/util/permutations.py
Expand Up @@ -83,7 +83,8 @@ def __extend_with_matched_combos(single_inputs, multi_inputs):
if multi_input_key == first_multi_input_key:
continue
if len(multi_input_values) != len(first_multi_value):
raise InputMatchedException()
raise InputMatchedException("Received %d inputs for '%s' and %d inputs for '%s', these should be of equal length" %
(len(multi_input_values), multi_input_key, len(first_multi_value), first_multi_input_key))

for index, value in enumerate(multi_input_values):
matched_multi_inputs[index][multi_input_key] = value
Expand Down
23 changes: 13 additions & 10 deletions scripts/grt/export.py
Expand Up @@ -69,7 +69,7 @@ def __init__(self, sanitization_config, model, sa_session):
self.sanitization_config['tool_params'] = {}

def blacklisted_tree(self, path):
if path.lstrip('.') in self.sanitization_config['tool_params'][self.tool_id]:
if self.tool_id in self.sanitization_config['tool_params'] and path.lstrip('.') in self.sanitization_config['tool_params'][self.tool_id]:
return True
return False

Expand All @@ -79,21 +79,23 @@ def sanitize_data(self, tool_id, key, value):
return 'null'
# Thus, all tools below here are not blacklisted at the top level.

# If it isn't in tool_params, there are no keys being sanitized for
# this tool so we can return quickly without parsing.
if tool_id not in self.sanitization_config['tool_params']:
return value

# If the key is listed precisely (not a sub-tree), we can also return slightly more quickly.
if key in self.sanitization_config['tool_params'][tool_id]:
if tool_id in self.sanitization_config['tool_params'] and key in self.sanitization_config['tool_params'][tool_id]:
return 'null'

# If the key isn't a prefix for any of the keys being sanitized, then this is safe.
if not any(san_key.startswith(key) for san_key in self.sanitization_config['tool_params'][tool_id]):
if tool_id in self.sanitization_config['tool_params'] and not any(san_key.startswith(key) for san_key in self.sanitization_config['tool_params'][tool_id]):
return value

# Slow path.
unsanitized = {key: json.loads(value)}
if isinstance(value, str):
try:
unsanitized = {key: json.loads(value)}
except ValueError:
unsanitized = {key: value}
else:
unsanitized = {key: value}

self.tool_id = tool_id
return json.dumps(self._sanitize_value(unsanitized))

Expand All @@ -117,7 +119,8 @@ def _file_dict(self, data):
self.filesize_cache[data['id']] = data
return data
else:
raise Exception("Cannot handle {src} yet".format(data))
logging.warning("Cannot handle {src} yet".format(data))
return data

def _sanitize_dict(self, unsanitized_dict, path=""):
# if it is a file dictionary, handle specially.
Expand Down

0 comments on commit 0cfac51

Please sign in to comment.