Skip to content

Commit

Permalink
Merge pull request #6148 from jmchilton/cwl-small-stuff-2
Browse files Browse the repository at this point in the history
Small fixes and small refactorings toward reuse in CWL branch (retry)
  • Loading branch information
martenson committed Jun 12, 2018
2 parents d514ebc + b50ae94 commit 5a27b0c
Show file tree
Hide file tree
Showing 7 changed files with 42 additions and 23 deletions.
1 change: 0 additions & 1 deletion lib/galaxy/model/mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -886,7 +886,6 @@
Column("subworkflow_id", Integer, ForeignKey("workflow.id"), index=True, nullable=True),
Column("type", String(64)),
Column("tool_id", TEXT),
# Reserved for future
Column("tool_version", TEXT),
Column("tool_inputs", JSONType),
Column("tool_errors", JSONType),
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -754,7 +754,7 @@ def parse(self, tool_source, guid=None):
self.is_workflow_compatible = self.check_workflow_compatible(tool_source)
self.__parse_trackster_conf(tool_source)
# Record macro paths so we can reload a tool if any of its macro has changes
self._macro_paths = tool_source._macro_paths
self._macro_paths = tool_source.macro_paths()

def __parse_legacy_features(self, tool_source):
self.code_namespace = dict()
Expand Down
3 changes: 3 additions & 0 deletions lib/galaxy/tools/parser/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,9 @@ def parse_profile(self):
""" Return tool profile version as Galaxy major e.g. 16.01 or 16.04.
"""

def macro_paths(self):
return []

def parse_tests_to_dict(self):
return {'tests': []}

Expand Down
3 changes: 3 additions & 0 deletions lib/galaxy/tools/parser/xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -382,6 +382,9 @@ def parse_help(self):
help_elem = self.root.find('help')
return help_elem.text if help_elem is not None else None

def macro_paths(self):
return self._macro_paths

def parse_tests_to_dict(self):
tests_elem = self.root.find("tests")
tests = []
Expand Down
10 changes: 9 additions & 1 deletion lib/galaxy/workflow/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,15 @@
# actions (i.e. PJA specified at runtime on top of the workflow-wide defined
# ones.
RUNTIME_POST_JOB_ACTIONS_KEY = "__POST_JOB_ACTIONS__"
NO_REPLACEMENT = object()


class NoReplacement(object):

def __str__(self):
return "NO_REPLACEMENT singleton"


NO_REPLACEMENT = NoReplacement()


class WorkflowModule(object):
Expand Down
35 changes: 18 additions & 17 deletions test/api/test_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,24 @@ def _assert_history_job_count(self, history_id, n):
jobs = self._history_jobs(history_id)
self.assertEqual(len(jobs), n)

def _download_workflow(self, workflow_id, style=None):
params = {}
if style:
params = {"style": style}
download_response = self._get("workflows/%s/download" % workflow_id, params)
self._assert_status_code_is(download_response, 200)
downloaded_workflow = download_response.json()
return downloaded_workflow

def wait_for_invocation_and_jobs(self, history_id, workflow_id, invocation_id, assert_ok=True):
state = self.workflow_populator.wait_for_invocation(workflow_id, invocation_id)
if assert_ok:
assert state == "scheduled", state
self.workflow_populator.wait_for_invocation(workflow_id, invocation_id)
time.sleep(.5)
self.dataset_populator.wait_for_history_jobs(history_id, assert_ok=assert_ok)
time.sleep(.5)


# Workflow API TODO:
# - Allow history_id as param to workflow run action. (hist_id)
Expand Down Expand Up @@ -1730,14 +1748,6 @@ def test_run_with_text_connection(self):
content = self.dataset_populator.get_history_dataset_content(history_id)
self.assertEqual("chr5\t131424298\t131424460\tCCDS4149.1_cds_0_0_chr5_131424299_f\t0\t+\n", content)

def wait_for_invocation_and_jobs(self, history_id, workflow_id, invocation_id, assert_ok=True):
state = self.workflow_populator.wait_for_invocation(workflow_id, invocation_id)
if assert_ok:
assert state == "scheduled", state
time.sleep(.5)
self.dataset_populator.wait_for_history_jobs(history_id, assert_ok=assert_ok)
time.sleep(.5)

@flakey
@skip_without_tool('cat1')
def test_workflow_rerun_with_use_cached_job(self):
Expand Down Expand Up @@ -2927,15 +2937,6 @@ def __import_workflow(self, workflow_id, deprecated_route=False):
)
return self._post(route, import_data)

def _download_workflow(self, workflow_id, style=None):
params = {}
if style:
params = {"style": style}
download_response = self._get("workflows/%s/download" % workflow_id, params)
self._assert_status_code_is(download_response, 200)
downloaded_workflow = download_response.json()
return downloaded_workflow

def _show_workflow(self, workflow_id):
show_response = self._get("workflows/%s" % workflow_id)
self._assert_status_code_is(show_response, 200)
Expand Down
11 changes: 8 additions & 3 deletions test/base/populators.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,9 +277,14 @@ def get_remote_files(self, target="ftp"):
return self._get("remote_files", data={"target": target}).json()

def run_tool_payload(self, tool_id, inputs, history_id, **kwds):
if "files_0|file_data" in inputs:
kwds["__files"] = {"files_0|file_data": inputs["files_0|file_data"]}
del inputs["files_0|file_data"]
# Remove files_%d|file_data parameters from inputs dict and attach
# as __files dictionary.
for key, value in list(inputs.items()):
if key.startswith("files_") and key.endswith("|file_data"):
if "__files" not in kwds:
kwds["__files"] = {}
kwds["__files"][key] = value
del inputs[key]

return dict(
tool_id=tool_id,
Expand Down

0 comments on commit 5a27b0c

Please sign in to comment.