forked from galaxyproject/gxformat2
-
Notifications
You must be signed in to change notification settings - Fork 0
/
converter.py
759 lines (597 loc) · 25 KB
/
converter.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
"""Functionality for converting a Format 2 workflow into a standard Galaxy workflow."""
from __future__ import print_function
import argparse
import copy
import json
import logging
import os
import sys
import uuid
from collections import OrderedDict
from typing import Dict, Optional
from ._labels import Labels
from .model import (
convert_dict_to_id_list_if_needed,
ensure_step_position,
inputs_as_native_steps,
with_step_ids,
)
from .yaml import ordered_load
SCRIPT_DESCRIPTION = """
Convert a Format 2 Galaxy workflow description into a native format.
"""
# source: step#output and $link: step#output instead of outputSource: step/output and $link: step/output
SUPPORT_LEGACY_CONNECTIONS = os.environ.get("GXFORMAT2_SUPPORT_LEGACY_CONNECTIONS") == "1"
STEP_TYPES = [
"subworkflow",
"data_input",
"data_collection_input",
"tool",
"pause",
"parameter_input",
]
STEP_TYPE_ALIASES = {
'input': 'data_input',
'input_collection': 'data_collection_input',
'parameter': 'parameter_input',
}
RUN_ACTIONS_TO_STEPS = {
'GalaxyWorkflow': 'run_workflow_to_step',
'GalaxyTool': 'run_tool_to_step',
}
POST_JOB_ACTIONS = {
'hide': {
'action_class': "HideDatasetAction",
'default': False,
'arguments': lambda x: x,
},
'rename': {
'action_class': 'RenameDatasetAction',
'default': {},
'arguments': lambda x: {'newname': x},
},
'delete_intermediate_datasets': {
'action_class': 'DeleteIntermediatesAction',
'default': False,
'arguments': lambda x: x,
},
'change_datatype': {
'action_class': 'ChangeDatatypeAction',
'default': {},
'arguments': lambda x: {'newtype': x},
},
'set_columns': {
'action_class': 'ColumnSetAction',
'default': {},
'arguments': lambda x: x,
},
'add_tags': {
'action_class': 'TagDatasetAction',
'default': [],
'arguments': lambda x: {'tags': ",".join(x)},
},
'remove_tags': {
'action_class': 'RemoveTagDatasetAction',
'default': [],
'arguments': lambda x: {'tags': ",".join(x)},
},
}
log = logging.getLogger(__name__)
def rename_arg(argument):
return argument
def clean_connection(value):
if value and "#" in value and SUPPORT_LEGACY_CONNECTIONS:
# Hope these are just used by Galaxy testing workflows and such, and not in production workflows.
log.warn("Legacy workflow syntax for connections [%s] will not be supported in the future" % value)
value = value.replace("#", "/", 1)
else:
return value
class ImportOptions(object):
def __init__(self):
self.deduplicate_subworkflows = False
def yaml_to_workflow(has_yaml, galaxy_interface, workflow_directory, import_options=None):
"""Convert a Format 2 workflow into standard Galaxy format from supplied stream."""
as_python = ordered_load(has_yaml)
return python_to_workflow(as_python, galaxy_interface, workflow_directory, import_options=import_options)
def python_to_workflow(as_python, galaxy_interface, workflow_directory=None, import_options=None):
"""Convert a Format 2 workflow into standard Galaxy format from supplied dictionary."""
if "yaml_content" in as_python:
as_python = ordered_load(as_python["yaml_content"])
if workflow_directory is None:
workflow_directory = os.path.abspath(".")
conversion_context = ConversionContext(
galaxy_interface,
workflow_directory,
import_options,
)
as_python = _preprocess_graphs(as_python, conversion_context)
subworkflows = None
if conversion_context.import_options.deduplicate_subworkflows:
# TODO: import only required workflows...
# TODO: dag sort these...
subworkflows = OrderedDict()
for graph_id, subworkflow_content in conversion_context.graph_ids.items():
if graph_id == "main":
continue
subworkflow_conversion_context = conversion_context.get_subworkflow_conversion_context_graph("#" + graph_id)
subworkflows[graph_id] = _python_to_workflow(copy.deepcopy(subworkflow_content), subworkflow_conversion_context)
converted = _python_to_workflow(as_python, conversion_context)
if subworkflows is not None:
converted["subworkflows"] = subworkflows
return converted
# move to a utils file?
def steps_as_list(format2_workflow: dict, add_ids: bool = False, inputs_offset: int = 0, mutate: bool = False):
"""Return steps as a list, converting ID map to list representation if needed.
This method does mutate the supplied steps, try to make progress toward not doing this.
Add keys as labels instead of IDs. Why am I doing this?
"""
if "steps" not in format2_workflow:
raise Exception("No 'steps' key in dict, keys are %s" % format2_workflow.keys())
steps = format2_workflow["steps"]
steps = convert_dict_to_id_list_if_needed(steps, add_label=True, mutate=mutate)
if add_ids:
if mutate:
_append_step_id_to_step_list_elements(steps, inputs_offset=inputs_offset)
else:
steps = with_step_ids(steps, inputs_offset=inputs_offset)
return steps
def _append_step_id_to_step_list_elements(steps: list, inputs_offset: int = 0):
assert isinstance(steps, list)
for i, step in enumerate(steps):
if "id" not in step:
step["id"] = i + inputs_offset
assert step["id"] is not None
def _python_to_workflow(as_python, conversion_context):
if "class" not in as_python:
raise Exception("This is not a not a valid Galaxy workflow definition, must define a class.")
if as_python["class"] != "GalaxyWorkflow":
raise Exception("This is not a not a valid Galaxy workflow definition, 'class' must be 'GalaxyWorkflow'.")
# .ga files don't have this, drop it so it isn't interpreted as a format 2 workflow.
as_python.pop("class")
_ensure_defaults(as_python, {
"a_galaxy_workflow": "true",
"format-version": "0.1",
"name": "Workflow",
"uuid": str(uuid.uuid4()),
})
_populate_annotation(as_python)
steps = steps_as_list(as_python, mutate=True)
convert_inputs_to_steps(as_python, steps)
if isinstance(steps, list):
_append_step_id_to_step_list_elements(steps)
steps_as_dict = OrderedDict()
for i, step in enumerate(steps):
steps_as_dict[str(i)] = step
if "label" in step:
label = step["label"]
conversion_context.labels[label] = i
# TODO: this really should be optional in Galaxy API.
ensure_step_position(step, i)
as_python["steps"] = steps_as_dict
steps = steps_as_dict
for step in steps.values():
step_type = step.get("type", None)
if "run" in step:
if step_type is not None:
raise Exception("Steps specified as run actions cannot specify a type.")
run_action = step.get("run")
run_action = conversion_context.get_runnable_description(run_action)
if isinstance(run_action, dict):
run_class = run_action["class"]
run_to_step_function = eval(RUN_ACTIONS_TO_STEPS[run_class])
run_to_step_function(conversion_context, step, run_action)
else:
step["content_id"] = run_action
step["type"] = "subworkflow"
del step["run"]
for step in steps.values():
step_type = step.get("type", "tool")
step_type = STEP_TYPE_ALIASES.get(step_type, step_type)
if step_type not in STEP_TYPES:
raise Exception("Unknown step type encountered %s" % step_type)
step["type"] = step_type
eval("transform_%s" % step_type)(conversion_context, step)
outputs = as_python.pop("outputs", [])
outputs = convert_dict_to_id_list_if_needed(outputs)
for output in outputs:
assert isinstance(output, dict), "Output definition must be dictionary"
assert "source" in output or "outputSource" in output, "Output definition must specify source"
if "label" in output and "id" in output:
raise Exception("label and id are aliases for outputs, may only define one")
if "label" not in output and "id" not in output:
label = ""
raw_label = output.pop("label", None)
raw_id = output.pop("id", None)
label = raw_label or raw_id
if Labels.is_anonymous_output_label(label):
label = None
source = clean_connection(output.get("outputSource"))
if source is None and SUPPORT_LEGACY_CONNECTIONS:
source = output.get("source").replace("#", "/", 1)
id, output_name = conversion_context.step_output(source)
step = steps[str(id)]
workflow_output = {
"output_name": output_name,
"label": label,
"uuid": output.get("uuid", None)
}
if "workflow_outputs" not in step:
step["workflow_outputs"] = []
step["workflow_outputs"].append(workflow_output)
return as_python
def _preprocess_graphs(as_python, conversion_context):
if not isinstance(as_python, dict):
raise Exception("This is not a not a valid Galaxy workflow definition.")
format_version = as_python.get("format-version", "v2.0")
assert format_version == "v2.0"
if "class" not in as_python and "$graph" in as_python:
for subworkflow in as_python["$graph"]:
if not isinstance(subworkflow, dict):
raise Exception("Malformed workflow content in $graph")
if "id" not in subworkflow:
raise Exception("No subworkflow ID found for entry in $graph.")
subworkflow_id = subworkflow["id"]
if subworkflow_id == "main":
as_python = subworkflow
conversion_context.register_runnable(subworkflow)
return as_python
def convert_inputs_to_steps(workflow_dict: dict, steps: list):
"""Convert workflow inputs to a steps in array - like in native Galaxy.
workflow_dict is a Format 2 representation of a workflow and steps is a
list of steps. This method will prepend all the inputs as as steps to the
steps list. This method modifies both workflow_dict and steps.
"""
if "inputs" not in workflow_dict:
return
input_steps = inputs_as_native_steps(workflow_dict)
workflow_dict.pop("inputs")
for i, new_step in enumerate(input_steps):
steps.insert(i, new_step)
def run_workflow_to_step(conversion_context, step, run_action):
step["type"] = "subworkflow"
if conversion_context.import_options.deduplicate_subworkflows and _is_graph_id_reference(run_action):
step["content_id"] = run_action
else:
subworkflow_conversion_context = conversion_context.get_subworkflow_conversion_context(step)
step["subworkflow"] = _python_to_workflow(
copy.deepcopy(run_action),
subworkflow_conversion_context,
)
def _is_graph_id_reference(run_action):
return run_action and not isinstance(run_action, dict)
def transform_data_input(context, step):
transform_input(context, step, default_name="Input dataset")
def transform_data_collection_input(context, step):
transform_input(context, step, default_name="Input dataset collection")
def transform_parameter_input(context, step):
transform_input(context, step, default_name="input_parameter")
def transform_input(context, step, default_name):
default_name = step.get("label", default_name)
_populate_annotation(step)
_ensure_inputs_connections(step)
if "inputs" not in step:
step["inputs"] = [{}]
step_inputs = step["inputs"][0]
if "name" in step_inputs:
name = step_inputs["name"]
else:
name = default_name
_ensure_defaults(step_inputs, {
"name": name,
"description": "",
})
tool_state = {
"name": name
}
for attrib in ["collection_type", "parameter_type", "optional", "default", "format", "restrictions", "restrictOnConnections", "suggestions"]:
if attrib in step:
tool_state[attrib] = step[attrib]
_populate_tool_state(step, tool_state)
def transform_pause(context, step, default_name="Pause for dataset review"):
default_name = step.get("label", default_name)
_populate_annotation(step)
_ensure_inputs_connections(step)
if "inputs" not in step:
step["inputs"] = [{}]
step_inputs = step["inputs"][0]
if "name" in step_inputs:
name = step_inputs["name"]
else:
name = default_name
_ensure_defaults(step_inputs, {
"name": name,
})
tool_state = {
"name": name
}
connect = _init_connect_dict(step)
_populate_input_connections(context, step, connect)
_populate_tool_state(step, tool_state)
def transform_subworkflow(context, step):
_populate_annotation(step)
_ensure_inputs_connections(step)
tool_state = {
}
connect = _init_connect_dict(step)
_populate_input_connections(context, step, connect)
_populate_tool_state(step, tool_state)
def _runtime_value():
return {"__class__": "RuntimeValue"}
def transform_tool(context, step):
if "tool_id" not in step:
raise Exception("Tool steps must define a tool_id.")
_ensure_defaults(step, {
"name": step['tool_id'],
"post_job_actions": {},
"tool_version": None,
})
post_job_actions = step["post_job_actions"]
_populate_annotation(step)
tool_state = {
# TODO: Galaxy should not require tool state actually specify a __page__.
"__page__": 0,
}
connect = _init_connect_dict(step)
def append_link(key, value):
if key not in connect:
connect[key] = []
assert "$link" in value
link_value = value["$link"]
connect[key].append(clean_connection(link_value))
def replace_links(value, key=""):
if _is_link(value):
append_link(key, value)
# Filled in by the connection, so to force late
# validation of the field just mark as RuntimeValue.
# It would be better I guess if this were some other
# value dedicated to this purpose (e.g. a ficitious
# {"__class__": "ConnectedValue"}) that could be further
# validated by Galaxy.
return _runtime_value()
if isinstance(value, dict):
new_values = {}
for k, v in value.items():
new_key = _join_prefix(key, k)
new_values[k] = replace_links(v, new_key)
return new_values
elif isinstance(value, list):
new_values = []
for i, v in enumerate(value):
# If we are a repeat we need to modify the key
# but not if values are actually $links.
if _is_link(v):
append_link(key, v)
new_values.append(None)
else:
new_key = "%s_%d" % (key, i)
new_values.append(replace_links(v, new_key))
return new_values
else:
return value
# TODO: handle runtime inputs and state together.
runtime_inputs = step.get("runtime_inputs", [])
if "state" in step or runtime_inputs:
step_state = step.pop("state", {})
step_state = replace_links(step_state)
for key, value in step_state.items():
tool_state[key] = json.dumps(value)
for runtime_input in runtime_inputs:
tool_state[runtime_input] = json.dumps(_runtime_value())
elif "tool_state" in step:
tool_state.update(step.get("tool_state"))
# Fill in input connections
_populate_input_connections(context, step, connect)
_populate_tool_state(step, tool_state)
# Handle outputs.
out = step.pop("out", None)
if out is None:
# Handle LEGACY 19.XX outputs key.
out = step.pop("outputs", [])
out = convert_dict_to_id_list_if_needed(out)
for output in out:
name = output["id"]
for action_key, action_dict in POST_JOB_ACTIONS.items():
action_argument = output.get(action_key, action_dict['default'])
if action_argument:
action_class = action_dict['action_class']
action_name = action_class + name
action = _action(
action_class,
name,
arguments=action_dict['arguments'](action_argument)
)
post_job_actions[action_name] = action
def run_tool_to_step(conversion_context, step, run_action):
tool_description = conversion_context.galaxy_interface.import_tool(
run_action
)
step["type"] = "tool"
step["tool_id"] = tool_description["tool_id"]
step["tool_version"] = tool_description["tool_version"]
step["tool_hash"] = tool_description.get("tool_hash")
step["tool_uuid"] = tool_description.get("uuid")
class BaseConversionContext(object):
def __init__(self):
self.labels = {}
self.subworkflow_conversion_contexts = {}
def step_id(self, label_or_id):
if label_or_id in self.labels:
id_ = self.labels[label_or_id]
else:
id_ = label_or_id
return int(id_)
def step_output(self, value):
value_parts = str(value).split("/")
if len(value_parts) == 1:
value_parts.append("output")
id = self.step_id(value_parts[0])
return id, value_parts[1]
def get_subworkflow_conversion_context(self, step):
# TODO: sometimes this method takes format2 steps and some times converted native ones
# (for input connections) - redo this so the type signature is stronger.
step_id = step.get("id")
run_action = step.get("run")
if self.import_options.deduplicate_subworkflows and _is_graph_id_reference(run_action):
subworkflow_conversion_context = self.get_subworkflow_conversion_context_graph(run_action)
return subworkflow_conversion_context
if "content_id" in step:
subworkflow_conversion_context = self.get_subworkflow_conversion_context_graph(step["content_id"])
return subworkflow_conversion_context
if step_id not in self.subworkflow_conversion_contexts:
subworkflow_conversion_context = SubworkflowConversionContext(
self
)
self.subworkflow_conversion_contexts[step_id] = subworkflow_conversion_context
return self.subworkflow_conversion_contexts[step_id]
def get_runnable_description(self, run_action):
if "@import" in run_action:
if len(run_action) > 1:
raise Exception("@import must be only key if present.")
run_action_path = run_action["@import"]
runnable_path = os.path.join(self.workflow_directory, run_action_path)
with open(runnable_path, "r") as f:
runnable_description = ordered_load(f)
run_action = runnable_description
if not self.import_options.deduplicate_subworkflows and _is_graph_id_reference(run_action):
run_action = self.graph_ids[run_action[1:]]
return run_action
class ConversionContext(BaseConversionContext):
def __init__(self, galaxy_interface, workflow_directory, import_options: Optional[ImportOptions] = None):
super(ConversionContext, self).__init__()
self.import_options = import_options or ImportOptions()
self.graph_ids = OrderedDict() # type: Dict
self.graph_id_subworkflow_conversion_contexts = {} # type: Dict
self.workflow_directory = workflow_directory
self.galaxy_interface = galaxy_interface
def register_runnable(self, run_action):
assert "id" in run_action
self.graph_ids[run_action["id"]] = run_action
def get_subworkflow_conversion_context_graph(self, graph_id):
if graph_id not in self.graph_id_subworkflow_conversion_contexts:
subworkflow_conversion_context = SubworkflowConversionContext(
self
)
self.graph_id_subworkflow_conversion_contexts[graph_id] = subworkflow_conversion_context
return self.graph_id_subworkflow_conversion_contexts[graph_id]
class SubworkflowConversionContext(BaseConversionContext):
def __init__(self, parent_context):
super(SubworkflowConversionContext, self).__init__()
self.parent_context = parent_context
@property
def graph_ids(self):
return self.parent_context.graph_ids
@property
def workflow_directory(self):
return self.parent_context.workflow_directory
@property
def import_options(self):
return self.parent_context.import_options
@property
def galaxy_interface(self):
return self.parent_context.galaxy_interface
def get_subworkflow_conversion_context_graph(self, graph_id):
return self.parent_context.get_subworkflow_conversion_context_graph(graph_id)
def _action(type, name, arguments={}):
return {
"action_arguments": arguments,
"action_type": type,
"output_name": name,
}
def _is_link(value):
return isinstance(value, dict) and "$link" in value
def _join_prefix(prefix, key):
if prefix:
new_key = "%s|%s" % (prefix, key)
else:
new_key = key
return new_key
def _init_connect_dict(step):
if "connect" not in step:
step["connect"] = {}
connect = step["connect"]
del step["connect"]
# handle CWL-style in dict connections.
if "in" in step:
step_in = step["in"]
assert isinstance(step_in, dict)
connection_keys = set()
for key, value in step_in.items():
# TODO: this can be a list right?
if isinstance(value, dict) and 'source' in value:
value = value["source"]
elif isinstance(value, dict) and 'default' in value:
continue
elif isinstance(value, dict):
raise KeyError('step input must define either source or default %s' % value)
connect[key] = [value]
connection_keys.add(key)
for key in connection_keys:
del step_in[key]
if len(step_in) == 0:
del step['in']
return connect
def _populate_input_connections(context, step, connect):
_ensure_inputs_connections(step)
input_connections = step["input_connections"]
is_subworkflow_step = step.get("type") == "subworkflow"
for key, values in connect.items():
input_connection_value = []
if not isinstance(values, list):
values = [values]
for value in values:
if not isinstance(value, dict):
if key == "$step":
value += "/__NO_INPUT_OUTPUT_NAME__"
id, output_name = context.step_output(value)
value = {"id": id, "output_name": output_name}
if is_subworkflow_step:
subworkflow_conversion_context = context.get_subworkflow_conversion_context(step)
input_subworkflow_step_id = subworkflow_conversion_context.step_id(key)
value["input_subworkflow_step_id"] = input_subworkflow_step_id
input_connection_value.append(value)
if key == "$step":
key = "__NO_INPUT_OUTPUT_NAME__"
input_connections[key] = input_connection_value
def _populate_annotation(step):
if "annotation" not in step and "doc" in step:
annotation = step.pop("doc")
step["annotation"] = annotation
elif "annotation" not in step:
step["annotation"] = ""
def _ensure_inputs_connections(step):
if "input_connections" not in step:
step["input_connections"] = {}
def _ensure_defaults(in_dict, defaults):
for key, value in defaults.items():
if key not in in_dict:
in_dict[key] = value
def _populate_tool_state(step, tool_state):
step["tool_state"] = json.dumps(tool_state)
def main(argv=None):
"""Entry point for script to conversion from Format 2 interface."""
if argv is None:
argv = sys.argv[1:]
args = _parser().parse_args(argv)
format2_path = args.input_path
output_path = args.output_path or (format2_path + ".gxwf.yml")
workflow_directory = os.path.abspath(format2_path)
galaxy_interface = None
with open(format2_path, "r") as f:
has_workflow = ordered_load(f)
output = python_to_workflow(has_workflow, galaxy_interface=galaxy_interface, workflow_directory=workflow_directory)
with open(output_path, "w") as f:
json.dump(output, f, indent=4)
def _parser():
parser = argparse.ArgumentParser(description=SCRIPT_DESCRIPTION)
parser.add_argument('input_path', metavar='INPUT', type=str,
help='input workflow path (.ga)')
parser.add_argument('output_path', metavar='OUTPUT', type=str, nargs="?",
help='output workflow path (.gxfw.yml)')
return parser
if __name__ == "__main__":
main(sys.argv)
__all__ = (
'main',
'python_to_workflow',
'yaml_to_workflow',
)