Skip to content

Commit

Permalink
Fix collection operations not having a history.
Browse files Browse the repository at this point in the history
Fixes #3437.
  • Loading branch information
jmchilton committed Mar 21, 2017
1 parent a5b0e76 commit f70ec38
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 11 deletions.
17 changes: 13 additions & 4 deletions lib/galaxy/tools/__init__.py
Expand Up @@ -2352,6 +2352,8 @@ def produce_outputs( self, trans, out_data, output_collections, incoming, histor
new_elements = odict()
new_elements["forward"] = forward
new_elements["reverse"] = reverse
history.add_dataset( forward, set_hid=False )
history.add_dataset( reverse, set_hid=False )

output_collections.create_collection(
next(iter(self.outputs.values())), "output", elements=new_elements
Expand Down Expand Up @@ -2430,7 +2432,10 @@ def produce_outputs( self, trans, out_data, output_collections, incoming, histor
# Don't copy until we know everything is fine and we have the structure of the list ready to go.
new_elements = odict()
for key, value in new_element_structure.items():
new_elements[key] = value.copy()
copied_value = value.copy()
if getattr(copied_value, "history_content_type", None) == "dataset":
history.add_dataset(copied_value, set_hid=False)
new_elements[key] = copied_value

output_collections.create_collection(
next(iter(self.outputs.values())), "output", elements=new_elements
Expand Down Expand Up @@ -2465,7 +2470,10 @@ def produce_outputs( self, trans, out_data, output_collections, incoming, histor

if valid:
element_identifier = dce.element_identifier
new_elements[element_identifier] = element.copy()
copied_value = element.copy()
if getattr(copied_value, "history_content_type", None) == "dataset":
history.add_dataset(copied_value, set_hid=False)
new_elements[element_identifier] = copied_value

output_collections.create_collection(
next(iter(self.outputs.values())), "output", elements=new_elements
Expand All @@ -2488,8 +2496,9 @@ def add_elements(collection, prefix=""):
if dce.is_collection:
add_elements(dce_object, prefix=identifier)
else:
new_elements[identifier] = dce_object.copy()

copied_dataset = dce_object.copy()
history.add_dataset(copied_dataset, set_hid=False)
new_elements[identifier] = copied_dataset
add_elements(hdca.collection)
output_collections.create_collection(
next(iter(self.outputs.values())), "output", elements=new_elements
Expand Down
10 changes: 10 additions & 0 deletions lib/galaxy/tools/actions/__init__.py
Expand Up @@ -679,6 +679,16 @@ def create_collection(self, output, name, **element_kwds):

collection_type = input_collections[collection_type_source].collection.collection_type

if "elements" in element_kwds:
elements = element_kwds["elements"]
for key, value in elements.items():
# Either a HDA (if) or a DatasetCollection (the else)
if getattr(value, "history_content_type", None) == "dataset":
assert value.history is not None
else:
for dataset in value.dataset_instances:
assert dataset.history is not None

if self.mapping_over_collection:
dc = collections_manager.create_dataset_collection(
self.trans,
Expand Down
6 changes: 6 additions & 0 deletions test/api/test_tools.py
Expand Up @@ -166,6 +166,9 @@ def test_zip_inputs( self ):
response = self._run( "__ZIP_COLLECTION__", history_id, inputs, assert_ok=True )
output_collections = response[ "output_collections" ]
self.assertEquals( len(output_collections), 1 )
self.dataset_populator.wait_for_job( response["jobs"][0]["id"], assert_ok=True )
zipped_hdca = self.dataset_populator.get_history_collection_details(history_id, hid=output_collections[0]["hid"])
assert zipped_hdca["collection_type"] == "paired"

def test_zip_list_inputs( self ):
history_id = self.dataset_populator.new_history()
Expand All @@ -179,6 +182,9 @@ def test_zip_list_inputs( self ):
response = self._run( "__ZIP_COLLECTION__", history_id, inputs, assert_ok=True )
implicit_collections = response[ "implicit_collections" ]
self.assertEquals( len(implicit_collections), 1 )
self.dataset_populator.wait_for_job( response["jobs"][0]["id"], assert_ok=True )
zipped_hdca = self.dataset_populator.get_history_collection_details(history_id, hid=implicit_collections[0]["hid"])
assert zipped_hdca["collection_type"] == "list:paired"

def test_filter_failed( self ):
history_id = self.dataset_populator.new_history()
Expand Down
18 changes: 11 additions & 7 deletions test/base/populators.py
Expand Up @@ -173,19 +173,23 @@ def __history_content_id( self, history_id, wait=True, **kwds ):
# kwds should contain a 'dataset' object response, a 'dataset_id' or
# the last dataset in the history will be fetched.
if "dataset_id" in kwds:
dataset_id = kwds[ "dataset_id" ]
history_content_id = kwds[ "dataset_id" ]
elif "dataset" in kwds:
dataset_id = kwds[ "dataset" ][ "id" ]
history_content_id = kwds[ "dataset" ][ "id" ]
else:
hid = kwds.get( "hid", None ) # If not hid, just grab last dataset
history_contents = self.__get_contents_request( history_id ).json()
if hid:
index = hid - 1
history_content_id = None
for history_item in history_contents:
if history_item["hid"] == hid:
history_content_id = history_item["id"]
if history_content_id is None:
raise Exception("Could not find content with HID [%s] in [%s]" % (hid, history_contents))
else:
# No hid specified - just grab most recent element.
index = -1
dataset_contents = self.__get_contents_request( history_id ).json()
dataset_id = dataset_contents[ index ][ "id" ]
return dataset_id
history_content_id = history_contents[-1]["id"]
return history_content_id

def __get_contents_request( self, history_id, suffix=""):
url = "histories/%s/contents" % history_id
Expand Down

0 comments on commit f70ec38

Please sign in to comment.