Skip to content

Commit

Permalink
Merge pull request galaxyproject#5418 from jmchilton/collection_pja
Browse files Browse the repository at this point in the history
[18.01] Implement more intuitive collection PJA
  • Loading branch information
mvdbeek committed Feb 3, 2018
2 parents fbfbdd5 + 8e32e93 commit 5aed347
Show file tree
Hide file tree
Showing 4 changed files with 231 additions and 11 deletions.
51 changes: 43 additions & 8 deletions lib/galaxy/jobs/actions/post.py
Expand Up @@ -86,7 +86,7 @@ class RenameDatasetAction(DefaultJobAction):
verbose_name = "Rename Dataset"

@classmethod
def execute_on_mapped_over(cls, app, sa_session, action, step_inputs, step_outputs, replacement_dict):
def execute_on_mapped_over(cls, trans, sa_session, action, step_inputs, step_outputs, replacement_dict):
# Prevent renaming a dataset to the empty string.
input_names = {}
# Lookp through inputs find one with "to_be_replaced" input
Expand Down Expand Up @@ -226,6 +226,16 @@ def execute(cls, app, sa_session, action, job, replacement_dict):
if dataset_assoc.dataset.state != dataset_assoc.dataset.states.ERROR and (action.output_name == '' or dataset_assoc.name == action.output_name):
dataset_assoc.dataset.visible = False

for dataset_collection_assoc in job.output_dataset_collection_instances:
if action.output_name == '' or dataset_collection_assoc.name == action.output_name:
dataset_collection_assoc.dataset_collection_instance.visible = False

@classmethod
def execute_on_mapped_over(cls, trans, sa_session, action, step_inputs, step_outputs, replacement_dict):
for name, step_output in step_outputs.items():
if action.output_name == '' or name == action.output_name:
step_output.visible = False

@classmethod
def get_short_str(cls, pja):
return "Hide output '%s'." % escape(pja.output_name)
Expand All @@ -242,6 +252,16 @@ def execute(cls, app, sa_session, action, job, replacement_dict):
if action.output_name == '' or dataset_assoc.name == action.output_name:
dataset_assoc.dataset.deleted = True

for dataset_collection_assoc in job.output_dataset_collection_instances:
if action.output_name == '' or dataset_collection_assoc.name == action.output_name:
dataset_collection_assoc.dataset_collection_instance.deleted = True

@classmethod
def execute_on_mapped_over(cls, trans, sa_session, action, step_inputs, step_outputs, replacement_dict):
for name, step_output in step_outputs.items():
if action.output_name == '' or name == action.output_name:
step_output.deleted = True

@classmethod
def get_short_str(cls, pja):
return "Delete this dataset after creation."
Expand Down Expand Up @@ -357,6 +377,15 @@ class TagDatasetAction(DefaultJobAction):
action = "Add"
direction = "to"

@classmethod
def execute_on_mapped_over(cls, trans, sa_session, action, step_inputs, step_outputs, replacement_dict):
if action.action_arguments:
tags = [t.replace('#', 'name:') if t.startswith('#') else t for t in [t.strip() for t in action.action_arguments.get('tags', '').split(',') if t.strip()]]
if tags:
for name, step_output in step_outputs.items():
if action.output_name == '' or name == action.output_name:
cls._execute(trans.app, trans.user, step_output, tags)

@classmethod
def execute(cls, app, sa_session, action, job, replacement_dict):
if action.action_arguments:
Expand All @@ -365,11 +394,16 @@ def execute(cls, app, sa_session, action, job, replacement_dict):
for dataset_assoc in job.output_datasets:
if action.output_name == '' or dataset_assoc.name == action.output_name:
cls._execute(app, job.user, dataset_assoc.dataset, tags)

for dataset_collection_assoc in job.output_dataset_collection_instances:
if action.output_name == '' or dataset_collection_assoc.name == action.output_name:
cls._execute(app, job.user, dataset_collection_assoc.dataset_collection_instance, tags)

sa_session.flush()

@classmethod
def _execute(cls, app, user, dataset, tags):
app.tag_handler.add_tags_from_list(user, dataset, tags)
def _execute(cls, app, user, output, tags):
app.tag_handler.add_tags_from_list(user, output, tags)

@classmethod
def get_short_str(cls, pja):
Expand All @@ -389,8 +423,8 @@ class RemoveTagDatasetAction(TagDatasetAction):
direction = "from"

@classmethod
def _execute(cls, app, user, dataset, tags):
app.tag_handler.remove_tags_from_list(user, dataset, tags)
def _execute(cls, app, user, output, tags):
app.tag_handler.remove_tags_from_list(user, output, tags)


class ActionBox(object):
Expand All @@ -413,7 +447,8 @@ class ActionBox(object):
'TagDatasetAction', 'RemoveTagDatasetAction']
# Actions that will be applied to implicit mapped over collection outputs and not
# just individual outputs when steps include mapped over tools and implicit collection outputs.
mapped_over_output_actions = ['RenameDatasetAction']
mapped_over_output_actions = ['RenameDatasetAction', 'HideDatasetAction',
'TagDatasetAction', 'RemoveTagDatasetAction']

@classmethod
def get_short_str(cls, action):
Expand Down Expand Up @@ -445,9 +480,9 @@ def handle_incoming(cls, incoming):
return npd

@classmethod
def execute_on_mapped_over(cls, app, sa_session, pja, step_inputs, step_outputs, replacement_dict=None):
def execute_on_mapped_over(cls, trans, sa_session, pja, step_inputs, step_outputs, replacement_dict=None):
if pja.action_type in ActionBox.actions:
ActionBox.actions[pja.action_type].execute_on_mapped_over(app, sa_session, pja, step_inputs, step_outputs, replacement_dict)
ActionBox.actions[pja.action_type].execute_on_mapped_over(trans, sa_session, pja, step_inputs, step_outputs, replacement_dict)

@classmethod
def execute(cls, app, sa_session, pja, job, replacement_dict=None):
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/workflow/modules.py
Expand Up @@ -986,7 +986,7 @@ def _handle_mapped_over_post_job_actions(self, step, step_inputs, step_outputs,
effective_post_job_actions = self._effective_post_job_actions(step)
for pja in effective_post_job_actions:
if pja.action_type in ActionBox.mapped_over_output_actions:
ActionBox.execute_on_mapped_over(self.trans.app, self.trans.sa_session, pja, step_inputs, step_outputs, replacement_dict)
ActionBox.execute_on_mapped_over(self.trans, self.trans.sa_session, pja, step_inputs, step_outputs, replacement_dict)

def _handle_post_job_actions(self, step, job, replacement_dict):
# Create new PJA associations with the created job, to be run on completion.
Expand Down
167 changes: 165 additions & 2 deletions test/api/test_workflows.py
Expand Up @@ -2000,7 +2000,7 @@ def test_optional_workflow_output(self):
assert okay_dataset["state"] == "ok"

@skip_without_tool("cat")
def test_run_rename_on_mapped_over_dataset(self):
def test_run_rename_on_mapped_over_collection(self):
history_id = self.dataset_populator.new_history()
self._run_jobs("""
class: GalaxyWorkflow
Expand Down Expand Up @@ -2036,7 +2036,7 @@ def test_run_rename_on_mapped_over_dataset(self):
assert name == "my new name", name

@skip_without_tool("cat")
def test_run_rename_based_on_inputs_on_mapped_over_dataset(self):
def test_run_rename_based_on_inputs_on_mapped_over_collection(self):
history_id = self.dataset_populator.new_history()
self._run_jobs("""
class: GalaxyWorkflow
Expand Down Expand Up @@ -2283,6 +2283,169 @@ def test_run_rename_based_on_input_collection(self):
name = content["name"]
assert name == "the_dataset_pair suffix", name

@skip_without_tool("collection_creates_pair")
def test_run_hide_on_collection_output(self):
with self.dataset_populator.test_history() as history_id:
self._run_jobs("""
class: GalaxyWorkflow
inputs:
- id: input1
steps:
- tool_id: collection_creates_pair
state:
input1:
$link: input1
outputs:
paired_output:
hide: true
test_data:
input1:
value: 1.fasta
type: File
name: fasta1
""", history_id=history_id)
details1 = self.dataset_populator.get_history_collection_details(history_id, hid=4, wait=True, assert_ok=True)

assert details1["history_content_type"] == "dataset_collection"
assert not details1["visible"], details1

@skip_without_tool("cat")
def test_run_hide_on_mapped_over_collection(self):
history_id = self.dataset_populator.new_history()
self._run_jobs("""
class: GalaxyWorkflow
inputs:
- id: input1
type: data_collection_input
collection_type: list
steps:
- tool_id: cat
label: first_cat
state:
input1:
$link: input1
outputs:
out_file1:
hide: true
test_data:
input1:
type: list
name: the_dataset_list
elements:
- identifier: el1
value: 1.fastq
type: File
""", history_id=history_id)

content = self.dataset_populator.get_history_dataset_details(history_id, hid=4, wait=True, assert_ok=True)
assert content["history_content_type"] == "dataset"
assert content["visible"] is False

content = self.dataset_populator.get_history_collection_details(history_id, hid=3, wait=True, assert_ok=True)
assert content["history_content_type"] == "dataset_collection", content
assert content["visible"] is False

@skip_without_tool("collection_creates_pair")
def test_run_add_tag_on_collection_output(self):
with self.dataset_populator.test_history() as history_id:
self._run_jobs("""
class: GalaxyWorkflow
inputs:
- id: input1
steps:
- tool_id: collection_creates_pair
state:
input1:
$link: input1
outputs:
paired_output:
add_tags:
- "name:foo"
test_data:
input1:
value: 1.fasta
type: File
name: fasta1
""", history_id=history_id)
details1 = self.dataset_populator.get_history_collection_details(history_id, hid=4, wait=True, assert_ok=True)

assert details1["history_content_type"] == "dataset_collection"
assert details1["tags"][0] == "name:foo", details1

@skip_without_tool("collection_creates_pair")
def test_run_add_tag_on_mapped_over_collection(self):
with self.dataset_populator.test_history() as history_id:
self._run_jobs("""
class: GalaxyWorkflow
inputs:
- id: input1
type: data_collection_input
collection_type: list
steps:
- tool_id: cat
label: first_cat
state:
input1:
$link: input1
outputs:
out_file1:
add_tags:
- "name:foo"
test_data:
input1:
type: list
name: the_dataset_list
elements:
- identifier: el1
value: 1.fastq
type: File
""", history_id=history_id)
details1 = self.dataset_populator.get_history_collection_details(history_id, hid=3, wait=True, assert_ok=True)

assert details1["history_content_type"] == "dataset_collection"
assert details1["tags"][0] == "name:foo", details1

@skip_without_tool("collection_creates_pair")
@skip_without_tool("cat")
def test_run_remove_tag_on_collection_output(self):
with self.dataset_populator.test_history() as history_id:
self._run_jobs("""
class: GalaxyWorkflow
inputs:
- id: input1
steps:
- tool_id: cat
label: first_cat
state:
input1:
$link: input1
outputs:
out_file1:
add_tags:
- "name:foo"
- tool_id: collection_creates_pair
state:
input1:
$link: first_cat#out_file1
outputs:
paired_output:
remove_tags:
- "name:foo"
test_data:
input1:
value: 1.fasta
type: File
name: fasta1
""", history_id=history_id)
details_dataset_with_tag = self.dataset_populator.get_history_dataset_details(history_id, hid=2, wait=True, assert_ok=True)

assert details_dataset_with_tag["history_content_type"] == "dataset", details_dataset_with_tag
assert details_dataset_with_tag["tags"][0] == "name:foo", details_dataset_with_tag

details_collection_without_tag = self.dataset_populator.get_history_collection_details(history_id, hid=5, wait=True, assert_ok=True)
assert details_collection_without_tag["history_content_type"] == "dataset_collection", details_collection_without_tag
assert len(details_collection_without_tag["tags"]) == 0, details_collection_without_tag

@skip_without_tool("cat1")
def test_run_with_runtime_pja(self):
workflow = self.workflow_populator.load_workflow(name="test_for_pja_runtime")
Expand Down
22 changes: 22 additions & 0 deletions test/base/workflows_format_2/converter.py
Expand Up @@ -390,6 +390,28 @@ def replace_links(value, key=""):
)
post_job_actions[action_name] = action

add_tags = output.get("add_tags", [])
if add_tags:
action_name = "TagDatasetAction%s" % name
arguments = dict(tags=",".join(add_tags))
action = _action(
"TagDatasetAction",
name,
arguments
)
post_job_actions[action_name] = action

remove_tags = output.get("remove_tags", [])
if remove_tags:
action_name = "RemoveTagDatasetAction%s" % name
arguments = dict(tags=",".join(remove_tags))
action = _action(
"RemoveTagDatasetAction",
name,
arguments
)
post_job_actions[action_name] = action

del step["outputs"]


Expand Down

0 comments on commit 5aed347

Please sign in to comment.