Skip to content

Commit

Permalink
Fill in needed state defaults when importing manually curated workflows.
Browse files Browse the repository at this point in the history
  • Loading branch information
jmchilton committed Sep 24, 2018
1 parent 50019e5 commit 3e77471
Show file tree
Hide file tree
Showing 8 changed files with 86 additions and 13 deletions.
5 changes: 4 additions & 1 deletion lib/galaxy/managers/workflows.py
Expand Up @@ -244,6 +244,7 @@ def build_workflow_from_dict(
publish=False,
create_stored_workflow=True,
exact_tools=True,
fill_defaults=False,
):
# Put parameters in workflow mode
trans.workflow_building_mode = workflow_building_modes.ENABLED
Expand All @@ -257,6 +258,7 @@ def build_workflow_from_dict(
data,
name=name,
exact_tools=exact_tools,
fill_defaults=fill_defaults,
)
if 'uuid' in data:
workflow.uuid = data['uuid']
Expand Down Expand Up @@ -298,14 +300,15 @@ def build_workflow_from_dict(
missing_tools=missing_tool_tups
)

def update_workflow_from_dict(self, trans, stored_workflow, workflow_data):
def update_workflow_from_dict(self, trans, stored_workflow, workflow_data, **kwds):
# Put parameters in workflow mode
trans.workflow_building_mode = workflow_building_modes.ENABLED

workflow, missing_tool_tups = self._workflow_from_dict(
trans,
workflow_data,
name=stored_workflow.name,
**kwds
)

if missing_tool_tups:
Expand Down
3 changes: 2 additions & 1 deletion lib/galaxy/web/base/controller.py
Expand Up @@ -1237,7 +1237,7 @@ def _import_shared_workflow(self, trans, stored):
session.flush()
return imported_stored

def _workflow_from_dict(self, trans, data, source=None, add_to_menu=False, publish=False, exact_tools=True):
def _workflow_from_dict(self, trans, data, source=None, add_to_menu=False, publish=False, exact_tools=True, fill_defaults=False):
"""
Creates a workflow from a dict. Created workflow is stored in the database and returned.
"""
Expand All @@ -1250,6 +1250,7 @@ def _workflow_from_dict(self, trans, data, source=None, add_to_menu=False, publi
add_to_menu=add_to_menu,
publish=publish,
exact_tools=exact_tools,
fill_defaults=fill_defaults,
)
return created_workflow.stored_workflow, created_workflow.missing_tools

Expand Down
29 changes: 22 additions & 7 deletions lib/galaxy/webapps/galaxy/api/workflows.py
Expand Up @@ -515,10 +515,12 @@ def update(self, trans, id, payload, **kwds):

if 'steps' in workflow_dict:
try:
from_dict_kwds = self.__import_or_update_kwds(payload)
workflow, errors = self.workflow_contents_manager.update_workflow_from_dict(
trans,
stored_workflow,
workflow_dict,
**from_dict_kwds
)
except workflows.MissingToolsException:
raise exceptions.MessageException("This workflow contains missing tools. It cannot be saved until they have been removed from the workflow or installed.")
Expand Down Expand Up @@ -578,18 +580,17 @@ def __api_import_new_workflow(self, trans, payload, **kwd):
import_tools = util.string_as_bool(payload.get("import_tools", False))
if import_tools and not trans.user_is_admin():
raise exceptions.AdminRequiredException()

from_dict_kwds = self.__import_or_update_kwds(payload)

publish = util.string_as_bool(payload.get("publish", False))
# If 'publish' set, default to importable.
importable = util.string_as_bool(payload.get("importable", publish))
# Galaxy will try to upgrade tool versions that don't match exactly during import,
# this prevents that.
exact_tools = util.string_as_bool(payload.get("exact_tools", True))

if publish and not importable:
raise exceptions.RequestParameterInvalidException("Published workflow must be importable.")
from_dict_kwds = dict(
publish=publish,
exact_tools=exact_tools,
)

from_dict_kwds["publish"] = publish
workflow, missing_tool_tups = self._workflow_from_dict(trans, data, **from_dict_kwds)
if importable:
self._make_item_accessible(trans.sa_session, workflow)
Expand Down Expand Up @@ -629,6 +630,20 @@ def __api_import_new_workflow(self, trans, payload, **kwd):
payload)
return item

def __import_or_update_kwds(self, payload):
# Galaxy will try to upgrade tool versions that don't match exactly during import,
# this prevents that.
exact_tools = util.string_as_bool(payload.get("exact_tools", True))

# Fill in missing tool state for hand built so the workflow can run, default of this
# should become True at some point in the future I imagine.
fill_defaults = util.string_as_bool(payload.get("fill_defaults", False))

return {
'exact_tools': exact_tools,
'fill_defaults': fill_defaults,
}

@expose_api
def import_shared_workflow_deprecated(self, trans, payload, **kwd):
"""
Expand Down
14 changes: 13 additions & 1 deletion lib/galaxy/workflow/modules.py
Expand Up @@ -77,7 +77,7 @@ def __init__(self, trans, content_id=None, **kwds):
@classmethod
def from_dict(Class, trans, d, **kwds):
module = Class(trans, **kwds)
module.recover_state(d.get("tool_state"))
module.recover_state(d.get("tool_state"), **kwds)
module.label = d.get("label")
return module

Expand Down Expand Up @@ -848,6 +848,18 @@ def get_post_job_actions(self, incoming):

# ---- Run time ---------------------------------------------------------

def recover_state(self, state, **kwds):
""" Recover state `dict` from simple dictionary describing configuration
state (potentially from persisted step state).
Sub-classes should supply a `default_state` method which contains the
initial state `dict` with key, value pairs for all available attributes.
"""
super(ToolModule, self).recover_state(state, **kwds)
if kwds.get("fill_defaults", False) and self.tool:
self.compute_runtime_state(self.trans, step_updates=None)
self.tool.check_and_update_param_values(self.state.inputs, self.trans, workflow_building_mode=True)

def get_runtime_state(self):
state = DefaultToolState()
state.inputs = self.state.inputs
Expand Down
34 changes: 34 additions & 0 deletions test/api/test_workflows_from_yaml.py
Expand Up @@ -3,6 +3,7 @@
import json
import os

from base.populators import uses_test_history
from base.workflow_fixtures import (
WORKFLOW_RUNTIME_PARAMETER_SIMPLE,
)
Expand Down Expand Up @@ -300,6 +301,39 @@ def test_implicit_connections(self):
workflow = self._get("workflows/%s/download" % workflow_id).json()
print(workflow)

@uses_test_history()
def test_conditional_ints(self, history_id):
self._run_jobs("""
class: GalaxyWorkflow
steps:
- label: test_input
tool_id: disambiguate_cond
state:
p3:
use: true
files:
attach_files: false
""", test_data={}, history_id=history_id)
content = self.dataset_populator.get_history_dataset_content(history_id)
assert "no file specified" in content
assert "7 7 4" in content

self._run_jobs("""
class: GalaxyWorkflow
steps:
- label: test_input
tool_id: disambiguate_cond
state:
p3:
use: true
p3v: 5
files:
attach_files: false
""", test_data={}, history_id=history_id)
content = self.dataset_populator.get_history_dataset_content(history_id)
assert "no file specified" in content
assert "7 7 5" in content

def _steps_by_label(self, workflow_as_dict):
by_label = {}
assert "steps" in workflow_as_dict, workflow_as_dict
Expand Down
2 changes: 1 addition & 1 deletion test/base/populators.py
Expand Up @@ -570,7 +570,7 @@ def invoke_workflow(self, history_id, workflow_id, inputs={}, request={}, assert
if inputs:
request["inputs"] = json.dumps(inputs)
request["inputs_by"] = 'step_index'
invocation_response = self.invoke_workflow_raw(request)
invocation_response = self.invoke_workflow_raw(workflow_id, request)
if assert_ok:
api_asserts.assert_status_code_is(invocation_response, 200)
invocation_id = invocation_response.json()["id"]
Expand Down
5 changes: 4 additions & 1 deletion test/base/workflows_format_2/main.py
Expand Up @@ -35,7 +35,10 @@ def convert_and_import_workflow(has_workflow, **kwds):
workflow["name"] = name
publish = kwds.get("publish", False)
exact_tools = kwds.get("exact_tools", False)
import_kwds = {}
fill_defaults = kwds.get("fill_defaults", True)
import_kwds = {
"fill_defaults": fill_defaults
}
if publish:
import_kwds["publish"] = True
if exact_tools:
Expand Down
7 changes: 6 additions & 1 deletion test/functional/tools/disambiguate_cond.xml
@@ -1,6 +1,11 @@
<tool id="disambiguate_cond" name="disambiguate_cond">
<command>
echo "$p1.p1v $p2.p2v $p3.p3v" > $out_file1; cat "$files.p4.file" >> $out_file1;
echo "$p1.p1v $p2.p2v $p3.p3v" > $out_file1;
#if $files.attach_files
cat "$files.p4.file" >> $out_file1;
#else
echo "no file specified" >> $out_file1;
#end if
</command>
<inputs>
<conditional name="p1">
Expand Down

0 comments on commit 3e77471

Please sign in to comment.