Skip to content

Commit

Permalink
Consolidate hiding HDAs in DatabaseOperation tools
Browse files Browse the repository at this point in the history
This fixes dataset being added to histories when the source
HDA is visible.
  • Loading branch information
mvdbeek committed Mar 8, 2019
1 parent 54adb6e commit 199468e
Showing 1 changed file with 3 additions and 10 deletions.
13 changes: 3 additions & 10 deletions lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2423,10 +2423,11 @@ def check_dataset_instance(input_dataset):

map(check_dataset_instance, input_dataset_collection.dataset_instances)

def _add_datasets_to_history(self, history, elements):
def _add_datasets_to_history(self, history, elements, datasets_visible=False):
datasets = []
for element_object in elements:
if getattr(element_object, "history_content_type", None) == "dataset":
element_object.visible = datasets_visible
datasets.append(element_object)

if datasets:
Expand Down Expand Up @@ -2518,8 +2519,7 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
raise Exception("Invalid tool parameters.")
extracted = extracted_element.element_object
extracted_o = extracted.copy(copy_tags=tags, new_name=extracted_element.element_identifier)
extracted_o.visible = True
self._add_datasets_to_history(history, [extracted_o])
self._add_datasets_to_history(history, [extracted_o], datasets_visible=True)

out_data["output"] = extracted_o

Expand Down Expand Up @@ -2598,7 +2598,6 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
for key, value in new_element_structure.items():
if getattr(value, "history_content_type", None) == "dataset":
copied_value = value.copy(force_flush=False)
copied_value.visible = False
else:
copied_value = value.copy()
new_elements[key] = copied_value
Expand All @@ -2617,7 +2616,6 @@ def _get_new_elements(self, history, elements_to_copy):
element_identifier = dce.element_identifier
if getattr(dce.element_object, "history_content_type", None) == "dataset":
copied_value = dce.element_object.copy(force_flush=False)
copied_value.visible = False
else:
copied_value = dce.element_object.copy()
new_elements[element_identifier] = copied_value
Expand Down Expand Up @@ -2693,7 +2691,6 @@ def add_elements(collection, prefix=""):
add_elements(dce_object, prefix=identifier)
else:
copied_dataset = dce_object.copy(force_flush=False)
copied_dataset.visible = False
new_elements[identifier] = copied_dataset
copied_datasets.append(copied_dataset)

Expand Down Expand Up @@ -2739,7 +2736,6 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
for dce in sorted_elements:
dce_object = dce.element_object
copied_dataset = dce_object.copy(force_flush=False)
copied_dataset.visible = False
new_elements[dce.element_identifier] = copied_dataset

self._add_datasets_to_history(history, itervalues(new_elements))
Expand All @@ -2764,7 +2760,6 @@ def add_copied_value_to_new_elements(new_label, dce_object):
raise Exception("New identifier [%s] appears twice in resulting collection, these values must be unique." % new_label)
if getattr(dce_object, "history_content_type", None) == "dataset":
copied_value = dce_object.copy(force_flush=False)
copied_value.visible = False
else:
copied_value = dce_object.copy()
new_elements[new_label] = copied_value
Expand Down Expand Up @@ -2810,7 +2805,6 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history

def copy_dataset(dataset):
copied_dataset = dataset.copy(force_flush=False)
copied_dataset.visible = False
copied_datasets.append(copied_dataset)
return copied_dataset

Expand Down Expand Up @@ -2907,7 +2901,6 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history

if getattr(dce_object, "history_content_type", None) == "dataset":
copied_value = dce_object.copy(force_flush=False)
copied_value.visible = False
else:
copied_value = dce_object.copy()

Expand Down

0 comments on commit 199468e

Please sign in to comment.