Skip to content

Commit

Permalink
Merge pull request #6746 from jmchilton/workflow_state_touch_1
Browse files Browse the repository at this point in the history
Fill in needed state defaults when importing manually crafted workflows.
  • Loading branch information
mvdbeek committed Oct 6, 2018
2 parents 56ca1cd + 3e77471 commit 10e984c
Show file tree
Hide file tree
Showing 9 changed files with 155 additions and 73 deletions.
5 changes: 4 additions & 1 deletion lib/galaxy/managers/workflows.py
Expand Up @@ -244,6 +244,7 @@ def build_workflow_from_dict(
publish=False,
create_stored_workflow=True,
exact_tools=True,
fill_defaults=False,
):
# Put parameters in workflow mode
trans.workflow_building_mode = workflow_building_modes.ENABLED
Expand All @@ -257,6 +258,7 @@ def build_workflow_from_dict(
data,
name=name,
exact_tools=exact_tools,
fill_defaults=fill_defaults,
)
if 'uuid' in data:
workflow.uuid = data['uuid']
Expand Down Expand Up @@ -298,14 +300,15 @@ def build_workflow_from_dict(
missing_tools=missing_tool_tups
)

def update_workflow_from_dict(self, trans, stored_workflow, workflow_data):
def update_workflow_from_dict(self, trans, stored_workflow, workflow_data, **kwds):
# Put parameters in workflow mode
trans.workflow_building_mode = workflow_building_modes.ENABLED

workflow, missing_tool_tups = self._workflow_from_dict(
trans,
workflow_data,
name=stored_workflow.name,
**kwds
)

if missing_tool_tups:
Expand Down
3 changes: 2 additions & 1 deletion lib/galaxy/web/base/controller.py
Expand Up @@ -1237,7 +1237,7 @@ def _import_shared_workflow(self, trans, stored):
session.flush()
return imported_stored

def _workflow_from_dict(self, trans, data, source=None, add_to_menu=False, publish=False, exact_tools=True):
def _workflow_from_dict(self, trans, data, source=None, add_to_menu=False, publish=False, exact_tools=True, fill_defaults=False):
"""
Creates a workflow from a dict. Created workflow is stored in the database and returned.
"""
Expand All @@ -1250,6 +1250,7 @@ def _workflow_from_dict(self, trans, data, source=None, add_to_menu=False, publi
add_to_menu=add_to_menu,
publish=publish,
exact_tools=exact_tools,
fill_defaults=fill_defaults,
)
return created_workflow.stored_workflow, created_workflow.missing_tools

Expand Down
29 changes: 22 additions & 7 deletions lib/galaxy/webapps/galaxy/api/workflows.py
Expand Up @@ -522,10 +522,12 @@ def update(self, trans, id, payload, **kwds):

if 'steps' in workflow_dict:
try:
from_dict_kwds = self.__import_or_update_kwds(payload)
workflow, errors = self.workflow_contents_manager.update_workflow_from_dict(
trans,
stored_workflow,
workflow_dict,
**from_dict_kwds
)
except workflows.MissingToolsException:
raise exceptions.MessageException("This workflow contains missing tools. It cannot be saved until they have been removed from the workflow or installed.")
Expand Down Expand Up @@ -585,18 +587,17 @@ def __api_import_new_workflow(self, trans, payload, **kwd):
import_tools = util.string_as_bool(payload.get("import_tools", False))
if import_tools and not trans.user_is_admin():
raise exceptions.AdminRequiredException()

from_dict_kwds = self.__import_or_update_kwds(payload)

publish = util.string_as_bool(payload.get("publish", False))
# If 'publish' set, default to importable.
importable = util.string_as_bool(payload.get("importable", publish))
# Galaxy will try to upgrade tool versions that don't match exactly during import,
# this prevents that.
exact_tools = util.string_as_bool(payload.get("exact_tools", True))

if publish and not importable:
raise exceptions.RequestParameterInvalidException("Published workflow must be importable.")
from_dict_kwds = dict(
publish=publish,
exact_tools=exact_tools,
)

from_dict_kwds["publish"] = publish
workflow, missing_tool_tups = self._workflow_from_dict(trans, data, **from_dict_kwds)
if importable:
self._make_item_accessible(trans.sa_session, workflow)
Expand Down Expand Up @@ -636,6 +637,20 @@ def __api_import_new_workflow(self, trans, payload, **kwd):
payload)
return item

def __import_or_update_kwds(self, payload):
# Galaxy will try to upgrade tool versions that don't match exactly during import,
# this prevents that.
exact_tools = util.string_as_bool(payload.get("exact_tools", True))

# Fill in missing tool state for hand built so the workflow can run, default of this
# should become True at some point in the future I imagine.
fill_defaults = util.string_as_bool(payload.get("fill_defaults", False))

return {
'exact_tools': exact_tools,
'fill_defaults': fill_defaults,
}

@expose_api
def import_shared_workflow_deprecated(self, trans, payload, **kwd):
"""
Expand Down
14 changes: 13 additions & 1 deletion lib/galaxy/workflow/modules.py
Expand Up @@ -78,7 +78,7 @@ def __init__(self, trans, content_id=None, **kwds):
@classmethod
def from_dict(Class, trans, d, **kwds):
module = Class(trans, **kwds)
module.recover_state(d.get("tool_state"))
module.recover_state(d.get("tool_state"), **kwds)
module.label = d.get("label")
return module

Expand Down Expand Up @@ -857,6 +857,18 @@ def get_post_job_actions(self, incoming):

# ---- Run time ---------------------------------------------------------

def recover_state(self, state, **kwds):
""" Recover state `dict` from simple dictionary describing configuration
state (potentially from persisted step state).
Sub-classes should supply a `default_state` method which contains the
initial state `dict` with key, value pairs for all available attributes.
"""
super(ToolModule, self).recover_state(state, **kwds)
if kwds.get("fill_defaults", False) and self.tool:
self.compute_runtime_state(self.trans, step_updates=None)
self.tool.check_and_update_param_values(self.state.inputs, self.trans, workflow_building_mode=True)

def get_runtime_state(self):
state = DefaultToolState()
state.inputs = self.state.inputs
Expand Down
61 changes: 3 additions & 58 deletions test/api/test_workflows.py
Expand Up @@ -2,19 +2,16 @@

import json
import time
from collections import namedtuple
from json import dumps
from uuid import uuid4

import yaml
from requests import delete, put

from base import api # noqa: I100,I202
from base import rules_test_data # noqa: I100
from base.populators import ( # noqa: I100
DatasetCollectionPopulator,
DatasetPopulator,
load_data_dict,
skip_without_tool,
wait_on,
WorkflowPopulator
Expand All @@ -31,7 +28,6 @@
WORKFLOW_WITH_RULES_1,
)
from galaxy.exceptions import error_codes # noqa: I201
from galaxy.tools.verify.test_data import TestDataResolver


SIMPLE_NESTED_WORKFLOW_YAML = """
Expand Down Expand Up @@ -208,59 +204,11 @@ def _invocation_details(self, workflow_id, invocation_id, **kwds):
invocation_details = invocation_details_response.json()
return invocation_details

def _run_jobs(self, has_workflow, history_id=None, wait=True, source_type=None, jobs_descriptions=None, expected_response=200, assert_ok=True):
def read_test_data(test_dict):
test_data_resolver = TestDataResolver()
filename = test_data_resolver.get_filename(test_dict["value"])
content = open(filename, "r").read()
return content

def _run_jobs(self, has_workflow, history_id=None, **kwds):
if history_id is None:
history_id = self.history_id
workflow_id = self._upload_yaml_workflow(
has_workflow, source_type=source_type
)
if jobs_descriptions is None:
assert source_type != "path"
jobs_descriptions = yaml.safe_load(has_workflow)

test_data = jobs_descriptions.get("test_data", {})
parameters = test_data.pop('step_parameters', {})
replacement_parameters = test_data.pop("replacement_parameters", {})
inputs, label_map, has_uploads = load_data_dict(history_id, test_data, self.dataset_populator, self.dataset_collection_populator)
workflow_request = dict(
history="hist_id=%s" % history_id,
workflow_id=workflow_id,
)
workflow_request["inputs"] = dumps(label_map)
workflow_request["inputs_by"] = 'name'
if parameters:
workflow_request["parameters"] = dumps(parameters)
workflow_request["parameters_normalized"] = True
if replacement_parameters:
workflow_request["replacement_params"] = dumps(replacement_parameters)
if has_uploads:
self.dataset_populator.wait_for_history(history_id, assert_ok=True)
url = "workflows/%s/usage" % (workflow_id)
invocation_response = self._post(url, data=workflow_request)
self._assert_status_code_is(invocation_response, expected_response)
invocation = invocation_response.json()
invocation_id = invocation.get('id')
if invocation_id:
# Wait for workflow to become fully scheduled and then for all jobs
# complete.
if wait:
self.workflow_populator.wait_for_workflow(workflow_id, invocation_id, history_id, assert_ok=assert_ok)
jobs = self._history_jobs(history_id)
return RunJobsSummary(
history_id=history_id,
workflow_id=workflow_id,
invocation_id=invocation_id,
inputs=inputs,
jobs=jobs,
invocation=invocation,
workflow_request=workflow_request
)

return self.workflow_populator.run_workflow(has_workflow, history_id=history_id, **kwds)

def _history_jobs(self, history_id):
return self._get("jobs", {"history_id": history_id, "order_by": "create_time"}).json()
Expand Down Expand Up @@ -3383,6 +3331,3 @@ def _all_user_invocation_ids(self):
self._assert_status_code_is(all_invocations_for_user, 200)
invocation_ids = [i["id"] for i in all_invocations_for_user.json()]
return invocation_ids


RunJobsSummary = namedtuple('RunJobsSummary', ['history_id', 'workflow_id', 'invocation_id', 'inputs', 'jobs', 'invocation', 'workflow_request'])
34 changes: 34 additions & 0 deletions test/api/test_workflows_from_yaml.py
Expand Up @@ -3,6 +3,7 @@
import json
import os

from base.populators import uses_test_history
from base.workflow_fixtures import (
WORKFLOW_RUNTIME_PARAMETER_SIMPLE,
)
Expand Down Expand Up @@ -300,6 +301,39 @@ def test_implicit_connections(self):
workflow = self._get("workflows/%s/download" % workflow_id).json()
print(workflow)

@uses_test_history()
def test_conditional_ints(self, history_id):
self._run_jobs("""
class: GalaxyWorkflow
steps:
- label: test_input
tool_id: disambiguate_cond
state:
p3:
use: true
files:
attach_files: false
""", test_data={}, history_id=history_id)
content = self.dataset_populator.get_history_dataset_content(history_id)
assert "no file specified" in content
assert "7 7 4" in content

self._run_jobs("""
class: GalaxyWorkflow
steps:
- label: test_input
tool_id: disambiguate_cond
state:
p3:
use: true
p3v: 5
files:
attach_files: false
""", test_data={}, history_id=history_id)
content = self.dataset_populator.get_history_dataset_content(history_id)
assert "no file specified" in content
assert "7 7 5" in content

def _steps_by_label(self, workflow_as_dict):
by_label = {}
assert "steps" in workflow_as_dict, workflow_as_dict
Expand Down

0 comments on commit 10e984c

Please sign in to comment.