From 16c6c0fb1ca7391f2077c556ab73a104b07fd3fc Mon Sep 17 00:00:00 2001 From: John Chilton Date: Sun, 1 Jul 2018 11:20:56 -0700 Subject: [PATCH 01/26] Template to command to launch a workflow for training... --- planemo/commands/cmd_training_init.py | 50 +++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 planemo/commands/cmd_training_init.py diff --git a/planemo/commands/cmd_training_init.py b/planemo/commands/cmd_training_init.py new file mode 100644 index 000000000..e20f0c1d1 --- /dev/null +++ b/planemo/commands/cmd_training_init.py @@ -0,0 +1,50 @@ +"""Module describing the planemo ``training_init`` command.""" +import os + +import click + +from planemo import options +from planemo.config import planemo_option +from planemo.cli import command_function +from planemo.io import write_file +from planemo.runnable import for_path +from planemo.engine import ( + engine_context, + is_galaxy_engine, +) + + +@click.command('training_init') +@options.required_workflow_arg() +@options.force_option() +@planemo_option( + "-o", "--output", + default=None, + type=click.Path( + file_okay=True, + dir_okay=False, + readable=True, + resolve_path=True, + ) +) +@options.galaxy_serve_options() +@command_function +def cli(ctx, workflow_path, output=None, force=False, **kwds): + """Build training template from workflow. + """ + assert is_galaxy_engine(**kwds) + + kwds["no_dependency_resolution"] = True + + if output is None: + output = os.path.splitext(workflow_path)[0] + ".ga" + + runnable = for_path(workflow_path) + with engine_context(ctx, **kwds) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([runnable]) as config: + workflow_id = config.workflow_id(workflow_path) + output_dict = config.gi.workflows.export_workflow_dict(workflow_id) + print(output_dict) + import time + time.sleep(10000) + write_file(output, "Test File Here...", force=force) From 69a9f7a017adf0f5a738961c95f9c2a51e232659 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Mon, 2 Jul 2018 18:26:35 +0200 Subject: [PATCH 02/26] Add command to create training topic/tutorial --- planemo/commands/cmd_training_init.py | 66 +++++---- planemo/options.py | 50 +++++++ planemo/training.py | 193 ++++++++++++++++++++++++++ requirements.txt | 1 + 4 files changed, 283 insertions(+), 27 deletions(-) create mode 100644 planemo/training.py diff --git a/planemo/commands/cmd_training_init.py b/planemo/commands/cmd_training_init.py index e20f0c1d1..a38a3191f 100644 --- a/planemo/commands/cmd_training_init.py +++ b/planemo/commands/cmd_training_init.py @@ -4,6 +4,7 @@ import click from planemo import options +from planemo import training from planemo.config import planemo_option from planemo.cli import command_function from planemo.io import write_file @@ -14,37 +15,48 @@ ) + @click.command('training_init') -@options.required_workflow_arg() -@options.force_option() -@planemo_option( - "-o", "--output", - default=None, - type=click.Path( - file_okay=True, - dir_okay=False, - readable=True, - resolve_path=True, - ) -) -@options.galaxy_serve_options() +@options.optional_tools_arg(multiple=True, allow_uris=True) +@options.training_init_options() +# @options.force_option() +# @options.galaxy_serve_options() @command_function -def cli(ctx, workflow_path, output=None, force=False, **kwds): +def cli(ctx, uris, **kwds): """Build training template from workflow. """ assert is_galaxy_engine(**kwds) - kwds["no_dependency_resolution"] = True + training.init(kwds) - if output is None: - output = os.path.splitext(workflow_path)[0] + ".ga" - - runnable = for_path(workflow_path) - with engine_context(ctx, **kwds) as galaxy_engine: - with galaxy_engine.ensure_runnables_served([runnable]) as config: - workflow_id = config.workflow_id(workflow_path) - output_dict = config.gi.workflows.export_workflow_dict(workflow_id) - print(output_dict) - import time - time.sleep(10000) - write_file(output, "Test File Here...", force=force) +# + #topic_dir = Path("topics") / Path(args.topic_name) + #if not topic_dir.is_dir(): + # print("The topic {} does not exist. It will be created".format(args.topic_name)) + # create_topic(args, topic_dir, template_dir) +# + #if args.tutorial_name: + # tuto_dir = topic_dir / Path("tutorials") / Path(args.tutorial_name) + # if not tuto_dir.is_dir(): + # template_dir = template_dir / Path("tutorials") / Path("tutorial1") + # print("The tutorial {} in topic {} does not exist. It will be created.".format(args.tutorial_name, args.topic_name)) + # create_tutorial(args, tuto_dir, topic_dir, template_dir) + # else: + # print("The tutorial {} in topic {} already exists. It will be updated with the other arguments".format(args.tutorial_name, args.topic_name)) + # update_tutorial(args, tuto_dir, topic_dir) +# +# +# + #if output is None: + # output = os.path.splitext(workflow_path)[0] + ".ga" +# + #runnable = for_path(workflow_path) + #with engine_context(ctx, **kwds) as galaxy_engine: + # with galaxy_engine.ensure_runnables_served([runnable]) as config: + # workflow_id = config.workflow_id(workflow_path) + # output_dict = config.gi.workflows.export_workflow_dict(workflow_id) + # print(output_dict) + # import time + # time.sleep(10000) + # write_file(output, "Test File Here...", force=force) +# \ No newline at end of file diff --git a/planemo/options.py b/planemo/options.py index 95da73d3b..d8c4fe88b 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -1123,6 +1123,56 @@ def galaxy_serve_options(): ) +def training_topic_option(): + return _compose( + planemo_option( + "--topic_name", + required=True, + help="Name (directory name) of the topic to create or in which " + "the tutorial should be create"), + planemo_option( + "--topic_title", + help="Title of the topic to create"), + planemo_option( + "--topic_target", + type=click.Choice(['use', 'admin-dev', 'instructors']), + default="use", + help="Target audience for the topic"), + planemo_option( + "--topic_summary", + help="Summary of the topic") + ) + + +def training_tutorial_option(): + return _compose( + planemo_option( + "--tutorial_name", + help="Name (directory name) of the new tutorial to create " + "(it will be the directory name)"), + planemo_option( + "--tutorial_title", + help="Title of the tutorial"), + planemo_option( + "--hands_on", + is_flag=True, + default=True, + help="Add hands-on for the new tutorial"), + planemo_option( + "--slides", + is_flag=True, + default=False, + help="Add slides for the new tutorial") + ) + + +def training_init_options(): + return _compose( + training_topic_option(), + training_tutorial_option() + ) + + def shed_fail_fast_option(): return planemo_option( "--fail_fast", diff --git a/planemo/training.py b/planemo/training.py new file mode 100644 index 000000000..cec89e1c8 --- /dev/null +++ b/planemo/training.py @@ -0,0 +1,193 @@ +""" gtdk: Galaxy training development kit """ + +import collections +import os +import shutil +import oyaml as yaml +from planemo.io import info + + +def load_yaml(filepath): + """Load the content of a YAML file to a dictionary""" + with open(filepath, "r") as m_file: + content = yaml.load(m_file) + return content + + +def save_to_yaml(content, filepath): + """Save a dictionary to a YAML file""" + with open(filepath, 'w') as stream: + yaml.dump(content, + stream, + indent=2, + default_flow_style=False, + default_style='', + explicit_start=True) + + +def get_template_dir(): + """Check and return the templates directory""" + template_dir = "templates" + if not os.path.isdir(template_dir): + raise Exception("This script needs to be run in the training material repository") + return template_dir + + +def change_topic_name(topic_name, filepath): + """Change the topic name in the top metadata of a file""" + with open(filepath, "r") as in_f: + content = in_f.read() + + content = content.replace("your_topic", topic_name) + content = content.replace("your_tutorial_name", "tutorial1") + + with open(filepath, 'w') as out_f: + out_f.write(content) + + +def create_topic(kwds, topic_dir, template_dir): + """Create the skeleton of a new topic: + + 1. copy templates + 2. update the index.md to match your topic's name + 3. fill the metadata + 4. add a symbolic link to the metadata.yaml from the metadata folder + + """ + # copy templates + shutil.copytree(template_dir, topic_dir) + + # update the index.md to match your topic's name + index_path = os.path.join(topic_dir, "index.md") + change_topic_name(kwds["topic_name"], index_path) + + # update the metadata file + metadata_path = os.path.join(topic_dir, "metadata.yaml") + + metadata = load_yaml(metadata_path) + metadata['name'] = kwds["topic_name"] + metadata['title'] = kwds["topic_title"] + metadata['type'] = kwds["topic_target"] + metadata['summary'] = kwds["topic_summary"] + + save_to_yaml(metadata, metadata_path) + + # update the metadata in top of tutorial.md and slides.html + tuto_path = os.path.join(topic_dir, "tutorials", "tutorial1") + hand_on_path = os.path.join(tuto_path, "tutorial.md") + change_topic_name(kwds["topic_name"], hand_on_path) + slides_path = os.path.join(tuto_path, "slides.html") + change_topic_name(kwds["topic_name"], slides_path) + + # add a symbolic link to the metadata.yaml + os.chdir("metadata") + os.symlink(os.path.join("..", metadata_path), "%s.yaml" % kwds["topic_name"]) + os.chdir("..") + + +def update_tuto_file(filepath, keep, topic_name, tutorial_name): + """Update or delete a tutorial (hands-on or slide) file""" + if keep: + with open(filepath, "r") as in_f: + content = in_f.read() + + content = content.replace("your_topic", topic_name) + content = content.replace("your_tutorial_name", tutorial_name) + + with open(filepath, 'w') as out_f: + out_f.write(content) + + elif filepath.is_file(): + filepath.unlink() + + +def update_tutorial(kwds, tuto_dir, topic_dir): + """Update the metadata information of a tutorial""" + # update the metadata file to add the new tutorial + metadata_path = os.path.join(topic_dir, "metadata.yaml") + + metadata = load_yaml(metadata_path) + found = False + for mat in metadata["material"]: + if mat["name"] == kwds["tutorial_name"]: + mat["name"] = kwds["tutorial_name"] + mat["title"] = kwds["tutorial_title"] + mat["hands_on"] = kwds["tutorial_hands_on"] + mat["slides"] = kwds["tutorial_slides"] + found = True + + if not found: + new_mat = collections.OrderedDict() + new_mat["title"] = kwds["tutorial_title"] + new_mat["name"] = kwds["tutorial_name"] + new_mat["type"] = 'tutorial' + new_mat["zenodo_link"] = '' + new_mat["hands_on"] = kwds["tutorial_hands_on"] + new_mat["slides"] = kwds["tutorial_slides"] + new_mat["workflows"] = False + new_mat["galaxy_tour"] = False + new_mat["questions"] = ['', ''] + new_mat["objectives"] = ['', ''] + new_mat["time_estimation"] = '1d/3h/6h' + new_mat["key_points"] = ['', ''] + new_mat["contributors"] = ['contributor1', 'contributor2'] + metadata["material"].append(new_mat) + + save_to_yaml(metadata, metadata_path) + + # update the metadata in top of tutorial.md or remove it if not needed + hand_on_path = os.path.join(tuto_dir, "tutorial.md") + update_tuto_file(hand_on_path, kwds["tutorial_hands_on"], kwds["topic_name"], kwds["tutorial_name"]) + + # update the metadata in top of slides.md or remove it if not needed + slides_path = os.path.join(tuto_dir, "slides.html") + update_tuto_file(slides_path, kwds["tutorial_slides"], kwds["topic_name"], kwds["tutorial_name"]) + + +def create_tutorial(args, tuto_dir, topic_dir, template_dir): + """Create the skeleton of a new tutorial""" + # copy or rename templates + template_tuto_path = os.path.join(topic_dir, "tutorials", "tutorial1") + if template_tuto_path.isdir(): + template_tuto_path.rename(tuto_dir) + else: + shutil.copytree(template_dir, tuto_dir) + + # fill the metadata of the new tutorial + update_tutorial(args, tuto_dir, topic_dir) + + +def init(kwds): + """Create/update a topic/tutorial""" + topic_template_dir = get_template_dir() + + topic_dir = os.path.join("topics", kwds['topic_name']) + if not os.path.isdir(topic_dir): + info("The topic %s does not exist. It will be created" % kwds['topic_name']) + create_topic(kwds, topic_dir, topic_template_dir) + + if kwds['tutorial_name']: + tuto_dir = os.path.join("topic_dir", "tutorials", kwds['tutorial_name']) + if not os.path.is_dir(tuto_dir): + tuto_template_dir = os.path.join(topic_dir, "tutorials", "tutorial1") + info("The tutorial %s in topic %s does not exist. It will be created." % (args.tutorial_name, args.topic_name)) + create_tutorial(kwds, tuto_dir, topic_dir, tuto_template_dir) + else: + info("The tutorial %s in topic %s already exists. It will be updated with the other arguments" % (args.tutorial_name, args.topic_name)) + update_tutorial(kwds, tuto_dir, topic_dir) +# +# +# + #if output is None: + # output = os.path.splitext(workflow_path)[0] + ".ga" +# + #runnable = for_path(workflow_path) + #with engine_context(ctx, **kwds) as galaxy_engine: + # with galaxy_engine.ensure_runnables_served([runnable]) as config: + # workflow_id = config.workflow_id(workflow_path) + # output_dict = config.gi.workflows.export_workflow_dict(workflow_id) + # print(output_dict) + # import time + # time.sleep(10000) + # write_file(output, "Test File Here...", force=force) +# \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 27f1838e4..5cc03a8ae 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,6 +5,7 @@ configparser six>=1.7.0 pyaml pyyaml +oyaml bioblend>=0.10.0 docutils jinja2 From 5c30316496c5da2394f4fd79f0acc696d0b4425e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Tue, 3 Jul 2018 03:17:31 +0200 Subject: [PATCH 03/26] Add tutorial skeleton creation from workflow --- planemo/commands/cmd_training_init.py | 47 +-- planemo/options.py | 50 +++- planemo/training.py | 406 +++++++++++++++++++++++--- 3 files changed, 410 insertions(+), 93 deletions(-) diff --git a/planemo/commands/cmd_training_init.py b/planemo/commands/cmd_training_init.py index a38a3191f..1c3a674b7 100644 --- a/planemo/commands/cmd_training_init.py +++ b/planemo/commands/cmd_training_init.py @@ -7,56 +7,15 @@ from planemo import training from planemo.config import planemo_option from planemo.cli import command_function -from planemo.io import write_file -from planemo.runnable import for_path -from planemo.engine import ( - engine_context, - is_galaxy_engine, -) - @click.command('training_init') @options.optional_tools_arg(multiple=True, allow_uris=True) @options.training_init_options() # @options.force_option() -# @options.galaxy_serve_options() +@options.galaxy_serve_options() @command_function def cli(ctx, uris, **kwds): - """Build training template from workflow. - """ - assert is_galaxy_engine(**kwds) + """Build training template from workflow.""" kwds["no_dependency_resolution"] = True - training.init(kwds) - -# - #topic_dir = Path("topics") / Path(args.topic_name) - #if not topic_dir.is_dir(): - # print("The topic {} does not exist. It will be created".format(args.topic_name)) - # create_topic(args, topic_dir, template_dir) -# - #if args.tutorial_name: - # tuto_dir = topic_dir / Path("tutorials") / Path(args.tutorial_name) - # if not tuto_dir.is_dir(): - # template_dir = template_dir / Path("tutorials") / Path("tutorial1") - # print("The tutorial {} in topic {} does not exist. It will be created.".format(args.tutorial_name, args.topic_name)) - # create_tutorial(args, tuto_dir, topic_dir, template_dir) - # else: - # print("The tutorial {} in topic {} already exists. It will be updated with the other arguments".format(args.tutorial_name, args.topic_name)) - # update_tutorial(args, tuto_dir, topic_dir) -# -# -# - #if output is None: - # output = os.path.splitext(workflow_path)[0] + ".ga" -# - #runnable = for_path(workflow_path) - #with engine_context(ctx, **kwds) as galaxy_engine: - # with galaxy_engine.ensure_runnables_served([runnable]) as config: - # workflow_id = config.workflow_id(workflow_path) - # output_dict = config.gi.workflows.export_workflow_dict(workflow_id) - # print(output_dict) - # import time - # time.sleep(10000) - # write_file(output, "Test File Here...", force=force) -# \ No newline at end of file + training.init(ctx, kwds) diff --git a/planemo/options.py b/planemo/options.py index d8c4fe88b..79c81b805 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -1123,13 +1123,18 @@ def galaxy_serve_options(): ) +def training_topic_name_option(): + return planemo_option( + "--topic_name", + required=True, + help="Name (directory name) of the topic to create or in which " + "the tutorial should be create" + ) + + def training_topic_option(): return _compose( - planemo_option( - "--topic_name", - required=True, - help="Name (directory name) of the topic to create or in which " - "the tutorial should be create"), + training_topic_name_option(), planemo_option( "--topic_title", help="Title of the topic to create"), @@ -1144,12 +1149,33 @@ def training_topic_option(): ) -def training_tutorial_option(): - return _compose( - planemo_option( +def training_tutorial_name_option(): + return planemo_option( "--tutorial_name", help="Name (directory name) of the new tutorial to create " - "(it will be the directory name)"), + "(it will be the directory name)" + ) + + +def training_tutorial_worflow_option(): + return _compose( + planemo_option( + "--workflow", + type=click.Path(file_okay=True, resolve_path=True), + help="Workflow of the tutorial (locally)", + default=None), + planemo_option( + "--galaxy_url", + help="URL of a Galaxy instance with the workflow"), + planemo_option( + "--workflow_id", + help="ID of the workflow on the Galaxy instance") + ) + + +def training_tutorial_option(): + return _compose( + training_tutorial_name_option(), planemo_option( "--tutorial_title", help="Title of the tutorial"), @@ -1162,7 +1188,11 @@ def training_tutorial_option(): "--slides", is_flag=True, default=False, - help="Add slides for the new tutorial") + help="Add slides for the new tutorial"), + training_tutorial_worflow_option(), + planemo_option( + "--zenodo", + help="Zenodo URL with the input data") ) diff --git a/planemo/training.py b/planemo/training.py index cec89e1c8..6642782fa 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -1,10 +1,150 @@ """ gtdk: Galaxy training development kit """ import collections +import json import os +import requests import shutil +import time import oyaml as yaml + + +from planemo import templates from planemo.io import info +from planemo.runnable import for_path +from planemo.engine import ( + engine_context, + is_galaxy_engine, +) + + +INPUT_TEMPLATE = """ +> - icon {{icon}} *"{{input_name}}"*: {{input_value}} +""" + +INPUT_TEMPLATE_2 = """ +> - {{ '{%' }} icon {{icon}} {{ '%}' }} *"{{input_name}}"*: {{input_value}} +""" + + +HANDS_ON_TOOL_BOX_TEMPLATE = """ +> ### {{ '{%' }} icon hands_on {{ '%}' }} Hands-on: TODO: task description +> +> 1. **{{tool_name}}** {{ '{%' }} icon tool {{ '%}' }} with the following parameters:{{inputlist}}{{paramlist}} +> +> TODO: check parameter descriptions +> TODO: some of these parameters may be the default values and can be removed +> unless they have some didactic value. +> +{: .hands_on} + +<-- Consider adding a question to test the learners understanding of the previous exercise --> +> ### {{ '{%' }} icon question {{ '%}' }} Questions +> +> 1. Question1? +> 2. Question2? +> +> > ### {{ '{%' }} icon solution {{ '%}' }} Solution +> > +> > 1. Answer for question1 +> > 2. Answer for question2 +> > +> {: .solution} +> +{: .question} +""" + + +TUTORIAL_TEMPLATE = """ +--- +layout: tutorial_hands_on +topic_name: {{ topic_name }} +tutorial_name: {{ tutorial_name }} +--- + +# Introduction +{:.no_toc} + + + +General introduction about the topic and then an introduction of the +tutorial (the questions and the objectives). It is nice also to have a +scheme to sum up the pipeline used during the tutorial. The idea is to +give to trainees insight into the content of the tutorial and the (theoretical +and technical) key concepts they will learn. + +**Please follow our +[tutorial to learn how to fill the Markdown]({{ '{{' }} site.baseurl {{ '}}' }}/topics/contributing/tutorials/create-new-tutorial-content/tutorial.html)** + +> ### Agenda +> +> In this tutorial, we will cover: +> +> 1. TOC +> {:toc} +> +{: .agenda} + +# Title for your first section + +Give some background about what the trainees will be doing in the section. + +Below are a series of hand-on boxes, one for each tool in your workflow file. +Often you may wish to combine several boxes into one or make other adjustments such +as breaking the tutorial into sections, we encourage you to make such changes as you +see fit, this is just a starting point :) + +Anywhere you find the word `TODO`, there is something that needs to be changed +depending on the specifics of your tutorial. + +have fun! + +## Get data + +> ### {{ '{%' }} icon hands_on {{ '%}' }} Hands-on: Data upload +> +> 1. Import the following files from [Zenodo]({{ zenodo_link }}) or from a data +> library named `TODO` if available (ask your instructor) +> +> ``` +> TODO: add the files by the ones on Zenodo here (if not added) +> TODO: remove the useless files (if added) +> TODO: so that they can easily be copy-pasted into Galaxy's upload dialog +> {{ z_file_links }} +> ``` +> +> > ### {{ '{%' }} icon tip {{ '%}' }} Tip: Importing data via links +> > +> > * Copy the link location +> > * Open the Galaxy Upload Manager +> > * Select **Paste/Fetch Data** +> > * Paste the link into the text field +> > * Press **Start** +> > +> > By default, Galaxy uses the url as the name, so please rename them to something more pleasing. +> {: .tip} +> +> > ### {{ '{%' }} icon tip {{ '%}' }} Tip: Importing data from a data library +> > +> > * Go into "Shared data" (top panel) then "Data libraries" +> > * Click on "Training data" and then "Analyses of metagenomics data" +> > * Select interesting file +> > * Click on "Import selected datasets into history" +> > * Import in a new history +> {: .tip} +> +{: .hands_on} + +# Different steps + +{{ body }} + +# Conclusion +{:.no_toc} + +Sum up the tutorial and the key takeaways here. We encourage adding an overview image of the +pipeline used. +""" def load_yaml(filepath): @@ -52,7 +192,6 @@ def create_topic(kwds, topic_dir, template_dir): 2. update the index.md to match your topic's name 3. fill the metadata 4. add a symbolic link to the metadata.yaml from the metadata folder - """ # copy templates shutil.copytree(template_dir, topic_dir) @@ -97,8 +236,8 @@ def update_tuto_file(filepath, keep, topic_name, tutorial_name): with open(filepath, 'w') as out_f: out_f.write(content) - elif filepath.is_file(): - filepath.unlink() + elif os.path.isfile(filepath): + os.remove(filepath) def update_tutorial(kwds, tuto_dir, topic_dir): @@ -112,8 +251,10 @@ def update_tutorial(kwds, tuto_dir, topic_dir): if mat["name"] == kwds["tutorial_name"]: mat["name"] = kwds["tutorial_name"] mat["title"] = kwds["tutorial_title"] - mat["hands_on"] = kwds["tutorial_hands_on"] - mat["slides"] = kwds["tutorial_slides"] + mat["hands_on"] = kwds["hands_on"] + mat["slides"] = kwds["slides"] + mat["workflows"] = True if kwds["workflow"] else False + mat["zenodo_link"] = kwds["zenodo"] if kwds["zenodo"] else '' found = True if not found: @@ -121,10 +262,10 @@ def update_tutorial(kwds, tuto_dir, topic_dir): new_mat["title"] = kwds["tutorial_title"] new_mat["name"] = kwds["tutorial_name"] new_mat["type"] = 'tutorial' - new_mat["zenodo_link"] = '' - new_mat["hands_on"] = kwds["tutorial_hands_on"] - new_mat["slides"] = kwds["tutorial_slides"] - new_mat["workflows"] = False + new_mat["zenodo_link"] = kwds["zenodo"] if kwds["zenodo"] else '' + new_mat["hands_on"] = kwds["hands_on"] + new_mat["slides"] = kwds["slides"] + new_mat["workflows"] = True if kwds["workflow"] else False new_mat["galaxy_tour"] = False new_mat["questions"] = ['', ''] new_mat["objectives"] = ['', ''] @@ -137,27 +278,225 @@ def update_tutorial(kwds, tuto_dir, topic_dir): # update the metadata in top of tutorial.md or remove it if not needed hand_on_path = os.path.join(tuto_dir, "tutorial.md") - update_tuto_file(hand_on_path, kwds["tutorial_hands_on"], kwds["topic_name"], kwds["tutorial_name"]) + update_tuto_file(hand_on_path, kwds["hands_on"], kwds["topic_name"], kwds["tutorial_name"]) # update the metadata in top of slides.md or remove it if not needed slides_path = os.path.join(tuto_dir, "slides.html") - update_tuto_file(slides_path, kwds["tutorial_slides"], kwds["topic_name"], kwds["tutorial_name"]) + update_tuto_file(slides_path, kwds["slides"], kwds["topic_name"], kwds["tutorial_name"]) + + +def get_zenodo_file_url(zenodo_link): + """Get the list of URLs of the files on Zenodo""" + links = [] + if not zenodo_link: + return links + + # get the record in the Zenodo link + if 'doi' in zenodo_link: + z_record = zenodo_link.split('.')[-1] + else: + z_record = zenodo_link.split('/')[-1] + + # get JSON corresponding to the record from Zenodo API + req = "https://zenodo.org/api/records/%s" % (z_record) + r = requests.get(req) + r.raise_for_status() + req_res = r.json() + + # extract the URLs from the JSON + if 'files' not in req_res: + return links + + for f in req_res['files']: + links.append(f['links']['self']) + + return links + + +def get_input_tool_name(step_id, steps): + """Get the string with the name of the tool that generated an input""" + inp_provenance = '' + inp_prov_id = str(step_id) + if inp_prov_id in steps: + inp_provenance = '(output of `%s`)' % steps[inp_prov_id]['name'] + return inp_provenance + + +def get_input_label(inp_n, inputs): + """Get the label of an input""" + #for inp in inputs: + # if inp["name"] == inp_n: + # return inp["label"] + return inp_n + + +def get_handson_box(step_id, steps, tools): + """Get the string for an hands-on box based on a step in a workflow""" + step = steps[step_id] + + # get tool + tool_name = step['name'] + if len(step['input_connections']) == 0: + return '' + tool = {}#tools[tool_name] + + # add input description + input_conn = step['input_connections'] + inputlist = '' + for inp_n, inp in input_conn.items(): + inps = [] + if isinstance(inp, list): # multiple input (not collection) + icon = 'param-files' + for i in inp: + inps.append('`%s` %s' % ( + i['output_name'], + get_input_tool_name(i['id'], steps))) + else: # sinle input + icon = 'param-file' + inps = ['`%s` %s' % ( + inp['output_name'], + get_input_tool_name(inp['id'], steps))] + + context = { + "icon": icon, + "input_name": get_input_label(inp_n, tool["inputs"]), + "input_value": ', '.join(inps) + } + inputlist += templates.render(INPUT_TEMPLATE, **context) + + # add parameters + parameters = step['tool_state'] + print(parameters) + + #g = nested_dict_iter(json.loads(parameters)) + #print(g) + + paramlist = '' + + # while True: + # try: + # (k, v) = next(g) + # print("param: ", k, v) + # except StopIteration: + # break + + # if not v or v == 'null' or v == '[]': + # pass + # elif 'RuntimeValue' in str(v): + # pass + # print("myinputs:", v, inputs) + # print(inputs) + # elif '__' not in k and k != 'chromInfo': + # paramlist += '\n> - *"' + k + '"*: `' + str(v).strip('"[]') + '`' + + # print(paramlist) + + context = { + "tool_name": tool_name, + "inputlist": inputlist, + "paramlist": paramlist + } + return templates.render(HANDS_ON_TOOL_BOX_TEMPLATE, **context) + + +def get_wf_from_running_galaxy(kwds, ctx): + """Get the workflow dictionary from a running Galaxy instance with the workflow installed there""" + return {} + + +def get_wf_tool_description(wf, gi): + """Get a dictionary with description of all tools in a workflow""" + tools = {} + for s in wf['steps']: + step = wf['steps'][s] + if len(step['input_connections']) == 0: + continue + print() + print(step) + tools.setdefault(step['name'], + gi.tools.show_tool(step['tool_id'], io_details = True)) + return tools + + +def serve_wf_locally(kwds, wf_filepath, ctx): + """Server local Galaxy and get the workflow dictionary""" + assert is_galaxy_engine(**kwds) + runnable = for_path(wf_filepath) + with engine_context(ctx, **kwds) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([runnable]) as config: + workflow_id = config.workflow_id(wf_filepath) + wf = config.gi.workflows.export_workflow_dict(workflow_id) + print(wf) + tools = {} # get_wf_tool_description(wf, config.gi) + return wf, tools + + +def create_tutorial_from_workflow(kwds, tuto_dir, ctx): + """Create tutorial structure from the workflow file""" + # load workflow + if kwds['workflow_id']: + if kwds['galaxy_url']: + wf = get_wf_from_running_galaxy(kwds, ctx) + else: + wf, tools = serve_wf_locally(kwds, kwds["workflow"], ctx) + + # get + z_file_links = get_zenodo_file_url(kwds['zenodo']) + body = '' + for step in wf['steps']: + body += get_handson_box(step, wf['steps'], tools) -def create_tutorial(args, tuto_dir, topic_dir, template_dir): + context = { + "topic_name": kwds["topic_name"], + "tutorial_name": kwds["tutorial_name"], + "zenodo_link": kwds["zenodo"] if kwds["zenodo"] else '', + "z_file_links": "\n> ".join(z_file_links), + "hands_on_boxes": body + } + template = templates.render(TUTORIAL_TEMPLATE, **context) + + # create the tutorial markdown file + md_path = os.path.join(tuto_dir, "tutorial.md") + with open(md_path, 'w') as md: + md.write(template) + + +def extract_tools_from_workflow(kwds, tuto_dir): + """Create and fill tools.yaml file from workflow""" + info("Test") + + +def extract_data_library_from_zenodo(zenodo_link, tuto_dir): + """Create the data_library from Zenodo""" + info("Test") + + +def create_tutorial(kwds, tuto_dir, topic_dir, template_dir, ctx): """Create the skeleton of a new tutorial""" # copy or rename templates template_tuto_path = os.path.join(topic_dir, "tutorials", "tutorial1") - if template_tuto_path.isdir(): - template_tuto_path.rename(tuto_dir) + if os.path.isdir(template_tuto_path): + os.rename(template_tuto_path, tuto_dir) else: shutil.copytree(template_dir, tuto_dir) + print(kwds) + # create tutorial skeleton from workflow + if kwds["workflow"] or kwds['workflow_id']: + info("Create tutorial skeleton from workflow") + create_tutorial_from_workflow(kwds, tuto_dir, ctx) + extract_tools_from_workflow(kwds, tuto_dir) + # fill the metadata of the new tutorial - update_tutorial(args, tuto_dir, topic_dir) + update_tutorial(kwds, tuto_dir, topic_dir) + # extract the data library from Zenodo + if kwds["zenodo"]: + extract_data_library_from_zenodo(kwds["zenodo"], tuto_dir) -def init(kwds): + +def init(ctx, kwds): """Create/update a topic/tutorial""" topic_template_dir = get_template_dir() @@ -166,28 +505,17 @@ def init(kwds): info("The topic %s does not exist. It will be created" % kwds['topic_name']) create_topic(kwds, topic_dir, topic_template_dir) - if kwds['tutorial_name']: - tuto_dir = os.path.join("topic_dir", "tutorials", kwds['tutorial_name']) - if not os.path.is_dir(tuto_dir): - tuto_template_dir = os.path.join(topic_dir, "tutorials", "tutorial1") - info("The tutorial %s in topic %s does not exist. It will be created." % (args.tutorial_name, args.topic_name)) - create_tutorial(kwds, tuto_dir, topic_dir, tuto_template_dir) + if not kwds['tutorial_name']: + if kwds['workflow'] or kwds['workflow_id']: + raise Exception("A tutorial name is needed to create the skeleton of the tutorial from a workflow") + if kwds['zenodo']: + raise Exception("A tutorial name is needed to add Zenodo information") + else: + tuto_dir = os.path.join(topic_dir, "tutorials", kwds['tutorial_name']) + if not os.path.isdir(tuto_dir): + tuto_template_dir = os.path.join(topic_template_dir, "tutorials", "tutorial1") + info("The tutorial %s in topic %s does not exist. It will be created." % (kwds['tutorial_name'], kwds['topic_name'])) + create_tutorial(kwds, tuto_dir, topic_dir, tuto_template_dir, ctx) else: - info("The tutorial %s in topic %s already exists. It will be updated with the other arguments" % (args.tutorial_name, args.topic_name)) + info("The tutorial %s in topic %s already exists. It will be updated with the other arguments" % (kwds['tutorial_name'], kwds['topic_name'])) update_tutorial(kwds, tuto_dir, topic_dir) -# -# -# - #if output is None: - # output = os.path.splitext(workflow_path)[0] + ".ga" -# - #runnable = for_path(workflow_path) - #with engine_context(ctx, **kwds) as galaxy_engine: - # with galaxy_engine.ensure_runnables_served([runnable]) as config: - # workflow_id = config.workflow_id(workflow_path) - # output_dict = config.gi.workflows.export_workflow_dict(workflow_id) - # print(output_dict) - # import time - # time.sleep(10000) - # write_file(output, "Test File Here...", force=force) -# \ No newline at end of file From 065702ae241a188bbd8c48dcc227796453566300 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Fri, 20 Jul 2018 18:05:48 +0200 Subject: [PATCH 04/26] Add parameter description in the tutorial skeleton --- planemo/training.py | 227 +++++++++++++++++++++++++++++--------------- 1 file changed, 152 insertions(+), 75 deletions(-) diff --git a/planemo/training.py b/planemo/training.py index 6642782fa..d93acda2d 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -18,16 +18,39 @@ ) -INPUT_TEMPLATE = """ -> - icon {{icon}} *"{{input_name}}"*: {{input_value}} +INPUT_FILE_TEMPLATE = """ +> - {{ '{%' }} icon {{icon}} {{ '%}' }} *"{{input_name}}"*: {{input_value}} """ -INPUT_TEMPLATE_2 = """ -> - {{ '{%' }} icon {{icon}} {{ '%}' }} *"{{input_name}}"*: {{input_value}} +INPUT_SECTION = """ +>{{space}}- In *"{{section_label}}"*: +""" + +INPUT_ADD_REPEAT = """ +>{{space}}- Click on *"Instert {{section_label}}"*: """ +INPUT_PARAM = """ +>{{space}}- *"{{param_label}}"*: `{{param_value}}` +""" HANDS_ON_TOOL_BOX_TEMPLATE = """ +# Title of the section usually corresponding to a big step + +Description of the step: some background and some theory. Some image can be added there to support the theory explanation: + +![Alternative text](../../images/image_name "Legend of the image") + +The idea is to keep the theory description before quite simple to focus more on the practical part. + +<-- Consider adding a detail box to expand the theory --> + +> ### {{ '{%' }} icon details {{ '%}' }} More details about the theory +> +> But to describe more details, it is possible to use the detail boxes which are expandable +> +{: .details} + > ### {{ '{%' }} icon hands_on {{ '%}' }} Hands-on: TODO: task description > > 1. **{{tool_name}}** {{ '{%' }} icon tool {{ '%}' }} with the following parameters:{{inputlist}}{{paramlist}} @@ -36,22 +59,32 @@ > TODO: some of these parameters may be the default values and can be removed > unless they have some didactic value. > +> <-- Consider adding a comment or tip box --> +> +> > ### {{ '{%' }} icon comment {{ '%}' }}} Comment +> > +> > A comment about the tool or something else. This box can also be in the main text +> {: .comment} +> {: .hands_on} <-- Consider adding a question to test the learners understanding of the previous exercise --> + > ### {{ '{%' }} icon question {{ '%}' }} Questions > > 1. Question1? > 2. Question2? > -> > ### {{ '{%' }} icon solution {{ '%}' }} Solution -> > -> > 1. Answer for question1 -> > 2. Answer for question2 -> > -> {: .solution} +> > ### {{ '{%' }} icon solution {{ '%}' }} Solution +> > +> > 1. Answer for question1 +> > 2. Answer for question2 +> > +> {: .solution} > {: .question} + + """ @@ -135,8 +168,6 @@ > {: .hands_on} -# Different steps - {{ body }} # Conclusion @@ -318,84 +349,134 @@ def get_input_tool_name(step_id, steps): inp_provenance = '' inp_prov_id = str(step_id) if inp_prov_id in steps: - inp_provenance = '(output of `%s`)' % steps[inp_prov_id]['name'] + inp_provenance = '(output of **%s** {% icon tool %})' % steps[inp_prov_id]['name'] return inp_provenance -def get_input_label(inp_n, inputs): - """Get the label of an input""" - #for inp in inputs: - # if inp["name"] == inp_n: - # return inp["label"] - return inp_n - - -def get_handson_box(step_id, steps, tools): - """Get the string for an hands-on box based on a step in a workflow""" - step = steps[step_id] +def get_tool_input(tool_desc): + """Get a dictionary with label being the tool parameter name and the value the description + of the parameter extracted from the show_tool function of bioblend""" + tool_inp = collections.OrderedDict() + for inp in tool_desc["inputs"]: + tool_inp.setdefault(inp['name'], inp) + return tool_inp - # get tool - tool_name = step['name'] - if len(step['input_connections']) == 0: - return '' - tool = {}#tools[tool_name] - # add input description - input_conn = step['input_connections'] +def format_inputs(wf_inputs, tp_desc, wf_steps): inputlist = '' - for inp_n, inp in input_conn.items(): + for inp_n, inp in wf_inputs.items(): + if inp_n != tp_desc['name']: + continue inps = [] if isinstance(inp, list): # multiple input (not collection) icon = 'param-files' for i in inp: inps.append('`%s` %s' % ( i['output_name'], - get_input_tool_name(i['id'], steps))) + get_input_tool_name(i['id'], wf_steps))) else: # sinle input icon = 'param-file' inps = ['`%s` %s' % ( inp['output_name'], - get_input_tool_name(inp['id'], steps))] - + get_input_tool_name(inp['id'], wf_steps))] context = { "icon": icon, - "input_name": get_input_label(inp_n, tool["inputs"]), + "input_name": tp_desc['label'], "input_value": ', '.join(inps) } - inputlist += templates.render(INPUT_TEMPLATE, **context) + inputlist += templates.render(INPUT_FILE_TEMPLATE, **context) + return inputlist - # add parameters - parameters = step['tool_state'] - print(parameters) - #g = nested_dict_iter(json.loads(parameters)) - #print(g) - +def format_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): + """Format the parameter description (label and value) given the type of parameter""" + paramlist = '' + if 'type' not in tp_desc: + raise ValueError("No type for the paramater %s" % tp_desc['name']) + if tp_desc['type'] == 'data': + paramlist += format_inputs(wf_inputs, tp_desc, wf_steps) + elif tp_desc['type'] == 'data_collection': + info("data_collection parameters are currently not supported") + elif tp_desc['type'] == 'section': + context = {'space': ' ' * level, 'section_label': tp_desc['title']} + sub_param_desc = get_param_desc(wf_params, wf_inputs, get_tool_input(tp_desc), level+1, wf_steps) + if sub_param_desc != '': + paramlist += templates.render(INPUT_SECTION, **context) + paramlist += sub_param_desc + elif tp_desc['type'] == 'conditional': + test_param = tp_desc['test_param'] + paramlist += format_param_desc(wf_params[test_param['name']], wf_inputs, test_param, level, wf_steps) + for case in tp_desc['cases']: + if case['value'] == wf_params[test_param['name']]: + if len(case['inputs']) > 1: + paramlist += get_param_desc(wf_params, wf_inputs, get_tool_input(case), level+1, wf_steps) + elif tp_desc['type'] == 'repeat': + repeat_inp_desc = get_tool_input(tp_desc) + context = {'space': ' ' * level, 'section_label': tp_desc['title']} + paramlist += templates.render(INPUT_SECTION, **context) + for r in range(len(wf_params)): + context = { + 'space': ' ' * (level+1), + 'section_label': "%s: %s" % (r+1, tp_desc['title'])} + paramlist += templates.render(INPUT_SECTION, **context) + paramlist += get_param_desc(wf_params[r], wf_inputs, repeat_inp_desc, level+2, wf_steps) + if r < len(wf_params) - 1: + context = {'space': ' ' * (level+1), 'section_label': tp_desc['title']} + paramlist += templates.render(INPUT_ADD_REPEAT, **context) + else: + if tp_desc['value'] == wf_params: + return paramlist + elif tp_desc['type'] == 'boolean': + if bool(tp_desc['value']) == wf_params: + return paramlist + param_value = 'Yes' if wf_params else 'No' + elif tp_desc['type'] == 'select': + param_value = '' + for opt in tp_desc['options']: + if opt[1] == wf_params: + param_value = opt[0] + elif tp_desc['type'] == 'data_column': + param_value = "c%s" % wf_params + else: + param_value = wf_params + context = { + 'space': ' ' * level, + 'param_label': tp_desc['label'], + 'param_value': param_value} + paramlist += templates.render(INPUT_PARAM, **context) + return paramlist + + +def get_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): + """Parse the parameters of the tool and return the formatted list of the + parameters and values set in the workflow""" paramlist = '' + for n, tp_d in tp_desc.items(): + if n not in wf_params: + raise ValueError("%s not in workflow" % n) + wf_param = wf_params[n] + if isinstance(wf_param, str): + wf_param = json.loads(wf_param) + paramlist += format_param_desc(wf_param, wf_inputs, tp_d, level, wf_steps) + return paramlist + - # while True: - # try: - # (k, v) = next(g) - # print("param: ", k, v) - # except StopIteration: - # break +def get_handson_box(step_id, steps, tools): + """Get the string for an hands-on box based on a step in a workflow""" + step = steps[step_id] - # if not v or v == 'null' or v == '[]': - # pass - # elif 'RuntimeValue' in str(v): - # pass - # print("myinputs:", v, inputs) - # print(inputs) - # elif '__' not in k and k != 'chromInfo': - # paramlist += '\n> - *"' + k + '"*: `' + str(v).strip('"[]') + '`' + # get tool + tool_name = step['name'] + if len(step['input_connections']) == 0: + return '' + tp_desc = tools[tool_name] - # print(paramlist) + # add description + wf_inputs = step['input_connections'] + wf_params = json.loads(step['tool_state']) + paramlist = get_param_desc(wf_params, wf_inputs, tp_desc, 1, steps) - context = { - "tool_name": tool_name, - "inputlist": inputlist, - "paramlist": paramlist - } + context = {"tool_name": tool_name, "paramlist": paramlist} return templates.render(HANDS_ON_TOOL_BOX_TEMPLATE, **context) @@ -405,16 +486,14 @@ def get_wf_from_running_galaxy(kwds, ctx): def get_wf_tool_description(wf, gi): - """Get a dictionary with description of all tools in a workflow""" + """Get a dictionary with description of inputs of all tools in a workflow""" tools = {} for s in wf['steps']: step = wf['steps'][s] if len(step['input_connections']) == 0: continue - print() - print(step) - tools.setdefault(step['name'], - gi.tools.show_tool(step['tool_id'], io_details = True)) + tool_desc = gi.tools.show_tool(step['tool_id'], io_details = True) + tools.setdefault(step['name'], get_tool_input(tool_desc)) return tools @@ -426,8 +505,7 @@ def serve_wf_locally(kwds, wf_filepath, ctx): with galaxy_engine.ensure_runnables_served([runnable]) as config: workflow_id = config.workflow_id(wf_filepath) wf = config.gi.workflows.export_workflow_dict(workflow_id) - print(wf) - tools = {} # get_wf_tool_description(wf, config.gi) + tools = get_wf_tool_description(wf, config.gi) return wf, tools @@ -440,19 +518,19 @@ def create_tutorial_from_workflow(kwds, tuto_dir, ctx): else: wf, tools = serve_wf_locally(kwds, kwds["workflow"], ctx) - # get + # get link to data on zenodo z_file_links = get_zenodo_file_url(kwds['zenodo']) body = '' - for step in wf['steps']: - body += get_handson_box(step, wf['steps'], tools) + for step in range(len(wf['steps'].keys())): + body += get_handson_box(str(step), wf['steps'], tools) context = { "topic_name": kwds["topic_name"], "tutorial_name": kwds["tutorial_name"], "zenodo_link": kwds["zenodo"] if kwds["zenodo"] else '', "z_file_links": "\n> ".join(z_file_links), - "hands_on_boxes": body + "body": body } template = templates.render(TUTORIAL_TEMPLATE, **context) @@ -481,7 +559,6 @@ def create_tutorial(kwds, tuto_dir, topic_dir, template_dir, ctx): else: shutil.copytree(template_dir, tuto_dir) - print(kwds) # create tutorial skeleton from workflow if kwds["workflow"] or kwds['workflow_id']: info("Create tutorial skeleton from workflow") From e0f43dabbeeace90758051ff26a320180699ad30 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Mon, 23 Jul 2018 18:00:52 +0200 Subject: [PATCH 05/26] Create data_library.yaml file from Zenodo --- planemo/options.py | 10 ++- planemo/training.py | 145 +++++++++++++++++++++++++++++++++++--------- 2 files changed, 124 insertions(+), 31 deletions(-) diff --git a/planemo/options.py b/planemo/options.py index 79c81b805..ae027155d 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -1137,6 +1137,7 @@ def training_topic_option(): training_topic_name_option(), planemo_option( "--topic_title", + default="Title of the topic", help="Title of the topic to create"), planemo_option( "--topic_target", @@ -1145,6 +1146,7 @@ def training_topic_option(): help="Target audience for the topic"), planemo_option( "--topic_summary", + default="Summary of the topic", help="Summary of the topic") ) @@ -1178,6 +1180,7 @@ def training_tutorial_option(): training_tutorial_name_option(), planemo_option( "--tutorial_title", + default="Title of the tutorial", help="Title of the tutorial"), planemo_option( "--hands_on", @@ -1199,7 +1202,12 @@ def training_tutorial_option(): def training_init_options(): return _compose( training_topic_option(), - training_tutorial_option() + training_tutorial_option(), + planemo_option( + "--datatypes", + type=click.Path(file_okay=True, resolve_path=True), + help="YAML file with the correspondance between Zenodo extension and Galaxy datatypes", + default="shared/datatypes.yaml") ) diff --git a/planemo/training.py b/planemo/training.py index d93acda2d..6a76df629 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -27,7 +27,7 @@ """ INPUT_ADD_REPEAT = """ ->{{space}}- Click on *"Instert {{section_label}}"*: +>{{space}}- Click on *"Insert {{section_label}}"*: """ INPUT_PARAM = """ @@ -316,12 +316,8 @@ def update_tutorial(kwds, tuto_dir, topic_dir): update_tuto_file(slides_path, kwds["slides"], kwds["topic_name"], kwds["tutorial_name"]) -def get_zenodo_file_url(zenodo_link): - """Get the list of URLs of the files on Zenodo""" - links = [] - if not zenodo_link: - return links - +def get_zenodo_record(zenodo_link): + """Get the content of a Zenodo record""" # get the record in the Zenodo link if 'doi' in zenodo_link: z_record = zenodo_link.split('.')[-1] @@ -334,13 +330,114 @@ def get_zenodo_file_url(zenodo_link): r.raise_for_status() req_res = r.json() + return(z_record, req_res) + + +def fill_data_library(files, kwds, z_record, tuto_dir): + """Fill or update the data library file""" + data_lib_filepath = os.path.join(tuto_dir, "data-library.yaml") + + if os.path.exists(data_lib_filepath): + data_lib = load_yaml(data_lib_filepath) + else: + data_lib = collections.OrderedDict() + + # set default information + data_lib.setdefault('destination', collections.OrderedDict({ + 'type': 'library', + 'name': 'GTN - Material', + 'description': 'Galaxy Training Network Material', + 'synopsis': 'Galaxy Training Network Material. See https://training.galaxyproject.org'})) + data_lib.setdefault('items', []) + data_lib.pop('libraries', None) + + # get topic or create new one + topic = collections.OrderedDict() + for item in data_lib['items']: + if item['name'] == kwds['topic_title']: + topic = item + if not topic: + topic = collections.OrderedDict({ + 'name': kwds['topic_title'], + 'description': kwds['topic_summary'], + 'items': []}) + + # get tutorial or create new one + tuto = collections.OrderedDict() + for item in topic['items']: + if item['name'] == kwds['tutorial_title']: + tuto = item + if not tuto: + tuto = collections.OrderedDict({ + 'name': kwds['tutorial_title'], + 'items': []}) + + # get current data library and/or previous data library for the tutorial + # remove the latest tag of any existing library + # remove the any other existing library + current_data_lib = collections.OrderedDict() + previous_data_lib = collections.OrderedDict() + for item in tuto['items']: + if item['name'] == "DOI: 10.5281/zenodo.%s" % z_record: + current_data_lib = item + elif item['description'] == 'latest': + previous_data_lib = item + previous_data_lib['description'] = '' + if not current_data_lib: + current_data_lib = collections.OrderedDict({ + 'name': "DOI: 10.5281/zenodo.%s" % z_record, + 'description': 'latest', + 'items': []}) + current_data_lib['items'] = files + + # add data lib, tuto and topic + tuto['items'] = [current_data_lib] + if previous_data_lib: + tuto['items'].append(previous_data_lib) + topic['items'].append(tuto) + data_lib['items'].append(topic) + + save_to_yaml(data_lib, data_lib_filepath) + + +def get_galaxy_datatype(z_ext, kwds): + """Get the Galaxy datatype corresponding to a Zenodo file type""" + g_datatype = '' + datatypes = load_yaml(kwds['datatypes']) + if z_ext in datatypes: + g_datatype = datatypes[z_ext] + if g_datatype == '': + g_datatype = '# Please add a Galaxy datatype or update the shared/datatypes.yaml file' + info("Get Galaxy datatypes: %s --> %s" %(z_ext, g_datatype)) + return g_datatype + + +def extract_from_zenodo(kwds, tuto_dir): + """Get the list of URLs of the files on Zenodo and fill the data library file""" + links = [] + if not kwds['zenodo']: + return links + + z_record, req_res = get_zenodo_record(kwds['zenodo']) + # extract the URLs from the JSON if 'files' not in req_res: - return links - + raise ValueError("No files in the Zenodo record") + + files = [] for f in req_res['files']: + file_dict = {'url':'', 'src': 'url', 'ext': '', 'info': kwds['zenodo']} + if 'type' in f: + file_dict['ext'] = get_galaxy_datatype(f['type'], kwds) + if 'links' not in f and 'self' not in f['links']: + raise ValueError("No link for file %s" % f) + file_dict['url'] = f['links']['self'] links.append(f['links']['self']) + files.append(file_dict) + # prepare the data library dictionary + fill_data_library(files, kwds, z_record, tuto_dir) + return links @@ -349,7 +446,7 @@ def get_input_tool_name(step_id, steps): inp_provenance = '' inp_prov_id = str(step_id) if inp_prov_id in steps: - inp_provenance = '(output of **%s** {% icon tool %})' % steps[inp_prov_id]['name'] + inp_provenance = "(output of **%s** {%% icon tool %%})" % steps[inp_prov_id]['name'] return inp_provenance @@ -509,7 +606,7 @@ def serve_wf_locally(kwds, wf_filepath, ctx): return wf, tools -def create_tutorial_from_workflow(kwds, tuto_dir, ctx): +def create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx): """Create tutorial structure from the workflow file""" # load workflow if kwds['workflow_id']: @@ -518,9 +615,6 @@ def create_tutorial_from_workflow(kwds, tuto_dir, ctx): else: wf, tools = serve_wf_locally(kwds, kwds["workflow"], ctx) - # get link to data on zenodo - z_file_links = get_zenodo_file_url(kwds['zenodo']) - body = '' for step in range(len(wf['steps'].keys())): body += get_handson_box(str(step), wf['steps'], tools) @@ -540,16 +634,6 @@ def create_tutorial_from_workflow(kwds, tuto_dir, ctx): md.write(template) -def extract_tools_from_workflow(kwds, tuto_dir): - """Create and fill tools.yaml file from workflow""" - info("Test") - - -def extract_data_library_from_zenodo(zenodo_link, tuto_dir): - """Create the data_library from Zenodo""" - info("Test") - - def create_tutorial(kwds, tuto_dir, topic_dir, template_dir, ctx): """Create the skeleton of a new tutorial""" # copy or rename templates @@ -559,19 +643,20 @@ def create_tutorial(kwds, tuto_dir, topic_dir, template_dir, ctx): else: shutil.copytree(template_dir, tuto_dir) + # extract the data library from Zenodo and the links for the tutorial + z_file_links = '' + if kwds["zenodo"]: + info("Create the data library from Zenodo") + z_file_links = extract_from_zenodo(kwds, tuto_dir) + # create tutorial skeleton from workflow if kwds["workflow"] or kwds['workflow_id']: info("Create tutorial skeleton from workflow") - create_tutorial_from_workflow(kwds, tuto_dir, ctx) - extract_tools_from_workflow(kwds, tuto_dir) + #create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx) # fill the metadata of the new tutorial update_tutorial(kwds, tuto_dir, topic_dir) - # extract the data library from Zenodo - if kwds["zenodo"]: - extract_data_library_from_zenodo(kwds["zenodo"], tuto_dir) - def init(ctx, kwds): """Create/update a topic/tutorial""" From f318da369ccc186c55a0c4e853ad8cee8fed7a64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Wed, 25 Jul 2018 18:15:39 +0200 Subject: [PATCH 06/26] Extract workflow and tool also from running Galaxy instance --- planemo/options.py | 3 +++ planemo/training.py | 22 ++++++++++++++++------ 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/planemo/options.py b/planemo/options.py index ae027155d..1e33a1d5e 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -1169,6 +1169,9 @@ def training_tutorial_worflow_option(): planemo_option( "--galaxy_url", help="URL of a Galaxy instance with the workflow"), + planemo_option( + "--galaxy_api_key", + help="API key on the Galaxy instance with the workflow"), planemo_option( "--workflow_id", help="ID of the workflow on the Galaxy instance") diff --git a/planemo/training.py b/planemo/training.py index 6a76df629..f6bb4427f 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -10,6 +10,7 @@ from planemo import templates +from planemo.bioblend import galaxy from planemo.io import info from planemo.runnable import for_path from planemo.engine import ( @@ -552,7 +553,7 @@ def get_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): if n not in wf_params: raise ValueError("%s not in workflow" % n) wf_param = wf_params[n] - if isinstance(wf_param, str): + if isinstance(wf_param, str) and ":" in wf_param: wf_param = json.loads(wf_param) paramlist += format_param_desc(wf_param, wf_inputs, tp_d, level, wf_steps) return paramlist @@ -579,7 +580,10 @@ def get_handson_box(step_id, steps, tools): def get_wf_from_running_galaxy(kwds, ctx): """Get the workflow dictionary from a running Galaxy instance with the workflow installed there""" - return {} + gi = galaxy.GalaxyInstance(kwds['galaxy_url'], key=kwds['galaxy_api_key']) + wf = gi.workflows.export_workflow_dict(kwds['workflow_id']) + tools = get_wf_tool_description(wf, gi) + return wf, tools def get_wf_tool_description(wf, gi): @@ -589,7 +593,10 @@ def get_wf_tool_description(wf, gi): step = wf['steps'][s] if len(step['input_connections']) == 0: continue - tool_desc = gi.tools.show_tool(step['tool_id'], io_details = True) + try: + tool_desc = gi.tools.show_tool(step['tool_id'], io_details = True) + except: + tool_desc = {'inputs': []} tools.setdefault(step['name'], get_tool_input(tool_desc)) return tools @@ -610,8 +617,11 @@ def create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx): """Create tutorial structure from the workflow file""" # load workflow if kwds['workflow_id']: - if kwds['galaxy_url']: - wf = get_wf_from_running_galaxy(kwds, ctx) + if not kwds['galaxy_url']: + raise ValueError("No Galaxy URL given") + if not kwds['galaxy_api_key']: + raise ValueError("No API key to access Galaxy given") + wf, tools = get_wf_from_running_galaxy(kwds, ctx) else: wf, tools = serve_wf_locally(kwds, kwds["workflow"], ctx) @@ -652,7 +662,7 @@ def create_tutorial(kwds, tuto_dir, topic_dir, template_dir, ctx): # create tutorial skeleton from workflow if kwds["workflow"] or kwds['workflow_id']: info("Create tutorial skeleton from workflow") - #create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx) + create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx) # fill the metadata of the new tutorial update_tutorial(kwds, tuto_dir, topic_dir) From 2c6c6bddc43fcbd34aa79c443f367409bf7f2370 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Thu, 26 Jul 2018 17:56:15 +0200 Subject: [PATCH 07/26] Add command to fill a data library of an existing tutorial --- .../cmd_training_fill_data_library.py | 19 ++++ planemo/options.py | 47 +++++++--- planemo/training.py | 91 ++++++++++++++----- 3 files changed, 124 insertions(+), 33 deletions(-) create mode 100644 planemo/commands/cmd_training_fill_data_library.py diff --git a/planemo/commands/cmd_training_fill_data_library.py b/planemo/commands/cmd_training_fill_data_library.py new file mode 100644 index 000000000..7e07c406c --- /dev/null +++ b/planemo/commands/cmd_training_fill_data_library.py @@ -0,0 +1,19 @@ +"""Module describing the planemo ``training_fill_data_library`` command.""" +import os + +import click + +from planemo import options +from planemo import training +from planemo.config import planemo_option +from planemo.cli import command_function + + +@click.command('training_fill_data_library') +@options.optional_tools_arg(multiple=True, allow_uris=True) +@options.training_fill_data_library_options() +@command_function +def cli(ctx, uris, **kwds): + """Build training template from workflow.""" + kwds["no_dependency_resolution"] = True + training.fill_data_library(ctx, kwds) diff --git a/planemo/options.py b/planemo/options.py index 1e33a1d5e..8759d3830 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -1128,7 +1128,7 @@ def training_topic_name_option(): "--topic_name", required=True, help="Name (directory name) of the topic to create or in which " - "the tutorial should be create" + "a tutorial should be created or updates" ) @@ -1154,11 +1154,33 @@ def training_topic_option(): def training_tutorial_name_option(): return planemo_option( "--tutorial_name", - help="Name (directory name) of the new tutorial to create " - "(it will be the directory name)" + help="Name (directory name) of the tutorial to create or to modify" ) +def training_tutorial_name_req_option(): + return planemo_option( + "--tutorial_name", + required=True, + help="Name (directory name) of the tutorial to modify" + ) + + +def training_datatype_option(): + return planemo_option( + "--datatypes", + type=click.Path(file_okay=True, resolve_path=True), + help="YAML file with the correspondance between Zenodo extension and Galaxy datatypes", + default="shared/datatypes.yaml" + ) + + +def training_zenodo_option(): + return planemo_option( + "--zenodo", + help="Zenodo URL with the input data") + + def training_tutorial_worflow_option(): return _compose( planemo_option( @@ -1196,9 +1218,7 @@ def training_tutorial_option(): default=False, help="Add slides for the new tutorial"), training_tutorial_worflow_option(), - planemo_option( - "--zenodo", - help="Zenodo URL with the input data") + training_zenodo_option() ) @@ -1206,11 +1226,16 @@ def training_init_options(): return _compose( training_topic_option(), training_tutorial_option(), - planemo_option( - "--datatypes", - type=click.Path(file_okay=True, resolve_path=True), - help="YAML file with the correspondance between Zenodo extension and Galaxy datatypes", - default="shared/datatypes.yaml") + training_datatype_option() + ) + + +def training_fill_data_library_options(): + return _compose( + training_topic_name_option(), + training_tutorial_name_req_option(), + training_zenodo_option(), + training_datatype_option() ) diff --git a/planemo/training.py b/planemo/training.py index f6bb4427f..41a7e8cde 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -334,7 +334,7 @@ def get_zenodo_record(zenodo_link): return(z_record, req_res) -def fill_data_library(files, kwds, z_record, tuto_dir): +def prepare_data_library(files, kwds, z_record, tuto_dir): """Fill or update the data library file""" data_lib_filepath = os.path.join(tuto_dir, "data-library.yaml") @@ -344,11 +344,11 @@ def fill_data_library(files, kwds, z_record, tuto_dir): data_lib = collections.OrderedDict() # set default information - data_lib.setdefault('destination', collections.OrderedDict({ - 'type': 'library', - 'name': 'GTN - Material', - 'description': 'Galaxy Training Network Material', - 'synopsis': 'Galaxy Training Network Material. See https://training.galaxyproject.org'})) + data_lib.setdefault('destination', collections.OrderedDict()) + data_lib['destination']['type'] = 'library' + data_lib['destination']['name'] = 'GTN - Material' + data_lib['destination']['description'] = 'Galaxy Training Network Material' + data_lib['destination']['synopsis'] = 'Galaxy Training Network Material. See https://training.galaxyproject.org' data_lib.setdefault('items', []) data_lib.pop('libraries', None) @@ -358,10 +358,10 @@ def fill_data_library(files, kwds, z_record, tuto_dir): if item['name'] == kwds['topic_title']: topic = item if not topic: - topic = collections.OrderedDict({ - 'name': kwds['topic_title'], - 'description': kwds['topic_summary'], - 'items': []}) + data_lib['items'].append(topic) + topic['name'] = kwds['topic_title'] + topic['description'] = kwds['topic_summary'] + topic['items'] = [] # get tutorial or create new one tuto = collections.OrderedDict() @@ -369,9 +369,9 @@ def fill_data_library(files, kwds, z_record, tuto_dir): if item['name'] == kwds['tutorial_title']: tuto = item if not tuto: - tuto = collections.OrderedDict({ - 'name': kwds['tutorial_title'], - 'items': []}) + topic['items'].append(tuto) + tuto['name'] = kwds['tutorial_title'] + tuto['items'] = [] # get current data library and/or previous data library for the tutorial # remove the latest tag of any existing library @@ -385,19 +385,15 @@ def fill_data_library(files, kwds, z_record, tuto_dir): previous_data_lib = item previous_data_lib['description'] = '' if not current_data_lib: - current_data_lib = collections.OrderedDict({ - 'name': "DOI: 10.5281/zenodo.%s" % z_record, - 'description': 'latest', - 'items': []}) + current_data_lib['name'] = "DOI: 10.5281/zenodo.%s" % z_record + current_data_lib['description'] = 'latest' + current_data_lib['items'] = [] current_data_lib['items'] = files - # add data lib, tuto and topic tuto['items'] = [current_data_lib] if previous_data_lib: tuto['items'].append(previous_data_lib) - topic['items'].append(tuto) - data_lib['items'].append(topic) - + save_to_yaml(data_lib, data_lib_filepath) @@ -437,7 +433,7 @@ def extract_from_zenodo(kwds, tuto_dir): files.append(file_dict) # prepare the data library dictionary - fill_data_library(files, kwds, z_record, tuto_dir) + prepare_data_library(files, kwds, z_record, tuto_dir) return links @@ -691,3 +687,54 @@ def init(ctx, kwds): else: info("The tutorial %s in topic %s already exists. It will be updated with the other arguments" % (kwds['tutorial_name'], kwds['topic_name'])) update_tutorial(kwds, tuto_dir, topic_dir) + + +def fill_data_library(ctx, kwds): + """Fill a data library for a tutorial""" + topic_dir = os.path.join("topics", kwds['topic_name']) + if not os.path.isdir(topic_dir): + raise Exception("The topic %s does not exists. It should be created" % kwds['topic_name']) + + tuto_dir = os.path.join(topic_dir, "tutorials", kwds['tutorial_name']) + if not os.path.isdir(tuto_dir): + raise Exception("The tutorial %s does not exists. It should be created" % kwds['tutorial_name']) + + # get metadata + metadata_path = os.path.join(topic_dir, "metadata.yaml") + metadata = load_yaml(metadata_path) + tuto_metadata = collections.OrderedDict() + + for mat in metadata['material']: + if mat['name'] == kwds['tutorial_name']: + tuto_metadata = mat + + # get the zenodo link + z_link = '' + if 'zenodo_link' in tuto_metadata and tuto_metadata['zenodo_link'] != '': + if kwds['zenodo']: + info("The data library and the metadata will be updated with the new Zenodo link") + tuto_metadata['zenodo_link'] = z_link + z_link = kwds['zenodo'] + else: + info("The data library will be extracted using the Zenodo link in the metadata") + z_link = tuto_metadata['zenodo_link'] + else: + info("The data library will be created and the metadata will be filled with the new Zenodo link") + tuto_metadata['zenodo_link'] = z_link + z_link = kwds['zenodo'] + + if z_link == '' or z_link is None: + raise Exception("A Zenodo link should be provided either in the metadata file or as argument of the command") + + # extract the data library from Zenodo + topic_kwds = { + 'topic_title': metadata['title'], + 'topic_summary': metadata['summary'], + 'tutorial_title': tuto_metadata['title'], + 'zenodo': z_link, + 'datatypes': kwds['datatypes'] + } + extract_from_zenodo(topic_kwds, tuto_dir) + + # update the metadata + save_to_yaml(metadata, metadata_path) \ No newline at end of file From bbf9473698a35056cfe82b34bfd9982cd5db8183 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Mon, 30 Jul 2018 16:27:47 +0200 Subject: [PATCH 08/26] Remove template tutorial from metadata --- planemo/training.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/planemo/training.py b/planemo/training.py index 41a7e8cde..e342cd4e4 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -288,6 +288,8 @@ def update_tutorial(kwds, tuto_dir, topic_dir): mat["workflows"] = True if kwds["workflow"] else False mat["zenodo_link"] = kwds["zenodo"] if kwds["zenodo"] else '' found = True + elif mat["name"] == "tutorial1": + metadata["material"].remove(mat) if not found: new_mat = collections.OrderedDict() From d803f3294bc2de03ed3fb954e572d993a8196d0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Mon, 30 Jul 2018 16:47:51 +0200 Subject: [PATCH 09/26] Fix tutorial top metadata --- planemo/training.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/planemo/training.py b/planemo/training.py index e342cd4e4..d07ea4fe8 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -89,8 +89,7 @@ """ -TUTORIAL_TEMPLATE = """ ---- +TUTORIAL_TEMPLATE = """--- layout: tutorial_hands_on topic_name: {{ topic_name }} tutorial_name: {{ tutorial_name }} From 5b5d4b7abc9ebadb80cedb13a387db07ebb1f07a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Mon, 30 Jul 2018 16:48:21 +0200 Subject: [PATCH 10/26] Copy or extract workflow into a file --- planemo/training.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/planemo/training.py b/planemo/training.py index d07ea4fe8..07f972298 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -298,7 +298,7 @@ def update_tutorial(kwds, tuto_dir, topic_dir): new_mat["zenodo_link"] = kwds["zenodo"] if kwds["zenodo"] else '' new_mat["hands_on"] = kwds["hands_on"] new_mat["slides"] = kwds["slides"] - new_mat["workflows"] = True if kwds["workflow"] else False + new_mat["workflows"] = True if kwds["workflow"] or kwds["workflow_id"] else False new_mat["galaxy_tour"] = False new_mat["questions"] = ['', ''] new_mat["objectives"] = ['', ''] @@ -641,6 +641,24 @@ def create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx): md.write(template) +def add_workflow_file(kwds, tuto_dir): + """Copy or extract workflow file and add it to the tutorial directory""" + wf_dir = os.path.join(tuto_dir, "workflows") + # copy / extract workflow + wf_filepath = os.path.join(wf_dir, "init_workflow.ga") + if kwds["workflow"]: + shutil.copy(kwds["workflow"], wf_filepath) + else: + gi = galaxy.GalaxyInstance(kwds['galaxy_url'], key=kwds['galaxy_api_key']) + gi.workflows.export_workflow_to_local_path(kwds['workflow_id'], + wf_filepath, + use_default_filename = False) + # remove empty workflow file if there + empty_wf_filepath = os.path.join(wf_dir, "empty_workflow.ga") + if os.path.exists(empty_wf_filepath): + os.remove(empty_wf_filepath) + + def create_tutorial(kwds, tuto_dir, topic_dir, template_dir, ctx): """Create the skeleton of a new tutorial""" # copy or rename templates @@ -656,10 +674,11 @@ def create_tutorial(kwds, tuto_dir, topic_dir, template_dir, ctx): info("Create the data library from Zenodo") z_file_links = extract_from_zenodo(kwds, tuto_dir) - # create tutorial skeleton from workflow + # create tutorial skeleton from workflow and copy workflow file if kwds["workflow"] or kwds['workflow_id']: info("Create tutorial skeleton from workflow") create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx) + add_workflow_file(kwds, tuto_dir) # fill the metadata of the new tutorial update_tutorial(kwds, tuto_dir, topic_dir) From 58f5d35b76315a92fd6e113dc963a939d127b716 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Tue, 31 Jul 2018 12:02:22 +0200 Subject: [PATCH 11/26] Fix flake8 linting and parameter description --- .../cmd_training_fill_data_library.py | 3 - planemo/commands/cmd_training_init.py | 2 - planemo/options.py | 4 +- planemo/training.py | 229 ++++++++++-------- 4 files changed, 135 insertions(+), 103 deletions(-) diff --git a/planemo/commands/cmd_training_fill_data_library.py b/planemo/commands/cmd_training_fill_data_library.py index 7e07c406c..77ca5d76f 100644 --- a/planemo/commands/cmd_training_fill_data_library.py +++ b/planemo/commands/cmd_training_fill_data_library.py @@ -1,11 +1,8 @@ """Module describing the planemo ``training_fill_data_library`` command.""" -import os - import click from planemo import options from planemo import training -from planemo.config import planemo_option from planemo.cli import command_function diff --git a/planemo/commands/cmd_training_init.py b/planemo/commands/cmd_training_init.py index 1c3a674b7..eec41e2bd 100644 --- a/planemo/commands/cmd_training_init.py +++ b/planemo/commands/cmd_training_init.py @@ -1,11 +1,9 @@ """Module describing the planemo ``training_init`` command.""" -import os import click from planemo import options from planemo import training -from planemo.config import planemo_option from planemo.cli import command_function diff --git a/planemo/options.py b/planemo/options.py index 8759d3830..be2e843e3 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -1128,7 +1128,7 @@ def training_topic_name_option(): "--topic_name", required=True, help="Name (directory name) of the topic to create or in which " - "a tutorial should be created or updates" + "a tutorial should be created or updates" ) @@ -1163,7 +1163,7 @@ def training_tutorial_name_req_option(): "--tutorial_name", required=True, help="Name (directory name) of the tutorial to modify" - ) + ) def training_datatype_option(): diff --git a/planemo/training.py b/planemo/training.py index 07f972298..3ee0a5d89 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -3,24 +3,23 @@ import collections import json import os -import requests import shutil -import time -import oyaml as yaml +import oyaml as yaml +import requests from planemo import templates from planemo.bioblend import galaxy -from planemo.io import info -from planemo.runnable import for_path from planemo.engine import ( engine_context, is_galaxy_engine, ) +from planemo.io import info +from planemo.runnable import for_path INPUT_FILE_TEMPLATE = """ -> - {{ '{%' }} icon {{icon}} {{ '%}' }} *"{{input_name}}"*: {{input_value}} +>{{space}}- {{ '{%' }} icon {{icon}} {{ '%}' }} *"{{input_name}}"*: {{input_value}} """ INPUT_SECTION = """ @@ -42,34 +41,32 @@ ![Alternative text](../../images/image_name "Legend of the image") -The idea is to keep the theory description before quite simple to focus more on the practical part. +The idea is to keep the theory description before quite simple to focus more on the practical part. -<-- Consider adding a detail box to expand the theory --> +***TODO***: *Consider adding a detail box to expand the theory* > ### {{ '{%' }} icon details {{ '%}' }} More details about the theory > > But to describe more details, it is possible to use the detail boxes which are expandable -> +> {: .details} -> ### {{ '{%' }} icon hands_on {{ '%}' }} Hands-on: TODO: task description +> ### {{ '{%' }} icon hands_on {{ '%}' }} Hands-on: Task description > > 1. **{{tool_name}}** {{ '{%' }} icon tool {{ '%}' }} with the following parameters:{{inputlist}}{{paramlist}} > -> TODO: check parameter descriptions -> TODO: some of these parameters may be the default values and can be removed -> unless they have some didactic value. +> ***TODO***: *Check parameter descriptions* > -> <-- Consider adding a comment or tip box --> +> ***TODO***: *Consider adding a comment or tip box* > -> > ### {{ '{%' }} icon comment {{ '%}' }}} Comment -> > -> > A comment about the tool or something else. This box can also be in the main text -> {: .comment} +> > ### {{ '{%' }} icon comment {{ '%}' }} Comment +> > +> > A comment about the tool or something else. This box can also be in the main text +> {: .comment} > {: .hands_on} -<-- Consider adding a question to test the learners understanding of the previous exercise --> +***TODO***: *Consider adding a question to test the learners understanding of the previous exercise* > ### {{ '{%' }} icon question {{ '%}' }} Questions > @@ -100,14 +97,15 @@ -General introduction about the topic and then an introduction of the -tutorial (the questions and the objectives). It is nice also to have a -scheme to sum up the pipeline used during the tutorial. The idea is to -give to trainees insight into the content of the tutorial and the (theoretical +General introduction about the topic and then an introduction of the +tutorial (the questions and the objectives). It is nice also to have a +scheme to sum up the pipeline used during the tutorial. The idea is to +give to trainees insight into the content of the tutorial and the (theoretical and technical) key concepts they will learn. -**Please follow our -[tutorial to learn how to fill the Markdown]({{ '{{' }} site.baseurl {{ '}}' }}/topics/contributing/tutorials/create-new-tutorial-content/tutorial.html)** +**Please follow our +[tutorial to learn how to fill the Markdown]({{ '{{' }} site.baseurl {{ '}}' }}/topics/contributing/tutorials/\ +create-new-tutorial-content/tutorial.html)** > ### Agenda > @@ -127,7 +125,7 @@ as breaking the tutorial into sections, we encourage you to make such changes as you see fit, this is just a starting point :) -Anywhere you find the word `TODO`, there is something that needs to be changed +Anywhere you find the word "***TODO***", there is something that needs to be changed depending on the specifics of your tutorial. have fun! @@ -140,11 +138,11 @@ > library named `TODO` if available (ask your instructor) > > ``` -> TODO: add the files by the ones on Zenodo here (if not added) -> TODO: remove the useless files (if added) -> TODO: so that they can easily be copy-pasted into Galaxy's upload dialog > {{ z_file_links }} > ``` +> ***TODO***: *Add the files by the ones on Zenodo here (if not added)* +> +> ***TODO***: *Remove the useless files (if added)* > > > ### {{ '{%' }} icon tip {{ '%}' }} Tip: Importing data via links > > @@ -177,6 +175,8 @@ pipeline used. """ +SPACE = ' ' + def load_yaml(filepath): """Load the content of a YAML file to a dictionary""" @@ -343,7 +343,7 @@ def prepare_data_library(files, kwds, z_record, tuto_dir): data_lib = load_yaml(data_lib_filepath) else: data_lib = collections.OrderedDict() - + # set default information data_lib.setdefault('destination', collections.OrderedDict()) data_lib['destination']['type'] = 'library' @@ -363,7 +363,7 @@ def prepare_data_library(files, kwds, z_record, tuto_dir): topic['name'] = kwds['topic_title'] topic['description'] = kwds['topic_summary'] topic['items'] = [] - + # get tutorial or create new one tuto = collections.OrderedDict() for item in topic['items']: @@ -390,23 +390,23 @@ def prepare_data_library(files, kwds, z_record, tuto_dir): current_data_lib['description'] = 'latest' current_data_lib['items'] = [] current_data_lib['items'] = files - + tuto['items'] = [current_data_lib] if previous_data_lib: tuto['items'].append(previous_data_lib) - + save_to_yaml(data_lib, data_lib_filepath) def get_galaxy_datatype(z_ext, kwds): """Get the Galaxy datatype corresponding to a Zenodo file type""" - g_datatype = '' + g_datatype = '' datatypes = load_yaml(kwds['datatypes']) if z_ext in datatypes: g_datatype = datatypes[z_ext] if g_datatype == '': - g_datatype = '# Please add a Galaxy datatype or update the shared/datatypes.yaml file' - info("Get Galaxy datatypes: %s --> %s" %(z_ext, g_datatype)) + g_datatype = '# Please add a Galaxy datatype or update the shared/datatypes.yaml file' + info("Get Galaxy datatypes: %s --> %s" % (z_ext, g_datatype)) return g_datatype @@ -424,7 +424,7 @@ def extract_from_zenodo(kwds, tuto_dir): files = [] for f in req_res['files']: - file_dict = {'url':'', 'src': 'url', 'ext': '', 'info': kwds['zenodo']} + file_dict = {'url': '', 'src': 'url', 'ext': '', 'info': kwds['zenodo']} if 'type' in f: file_dict['ext'] = get_galaxy_datatype(f['type'], kwds) if 'links' not in f and 'self' not in f['links']: @@ -435,7 +435,7 @@ def extract_from_zenodo(kwds, tuto_dir): # prepare the data library dictionary prepare_data_library(files, kwds, z_record, tuto_dir) - + return links @@ -444,7 +444,11 @@ def get_input_tool_name(step_id, steps): inp_provenance = '' inp_prov_id = str(step_id) if inp_prov_id in steps: - inp_provenance = "(output of **%s** {%% icon tool %%})" % steps[inp_prov_id]['name'] + name = steps[inp_prov_id]['name'] + if name == 'Input dataset': + inp_provenance = "(input dataset)" + else: + inp_provenance = "(output of **%s** {%% icon tool %%})" % name return inp_provenance @@ -457,19 +461,21 @@ def get_tool_input(tool_desc): return tool_inp -def format_inputs(wf_inputs, tp_desc, wf_steps): +def format_inputs(wf_inputs, tp_desc, wf_steps, level): inputlist = '' for inp_n, inp in wf_inputs.items(): if inp_n != tp_desc['name']: continue inps = [] - if isinstance(inp, list): # multiple input (not collection) + if isinstance(inp, list): + # multiple input (not collection) icon = 'param-files' for i in inp: inps.append('`%s` %s' % ( i['output_name'], get_input_tool_name(i['id'], wf_steps))) - else: # sinle input + else: + # sinle input icon = 'param-file' inps = ['`%s` %s' % ( inp['output_name'], @@ -477,68 +483,98 @@ def format_inputs(wf_inputs, tp_desc, wf_steps): context = { "icon": icon, "input_name": tp_desc['label'], - "input_value": ', '.join(inps) + "input_value": ', '.join(inps), + "space": SPACE * level } inputlist += templates.render(INPUT_FILE_TEMPLATE, **context) return inputlist +def format_section_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): + """Format the description (label and value) for parameters in a section""" + section_paramlist = '' + context = {'space': SPACE * level, 'section_label': tp_desc['title']} + sub_param_desc = get_param_desc(wf_params, wf_inputs, get_tool_input(tp_desc), level+1, wf_steps) + if sub_param_desc != '': + section_paramlist += templates.render(INPUT_SECTION, **context) + section_paramlist += sub_param_desc + return section_paramlist + + +def format_conditional_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): + """Format the description (label and value) for parameters in a conditional""" + conditional_paramlist = '' + test_param = tp_desc['test_param'] + conditional_paramlist += format_param_desc(wf_params[test_param['name']], wf_inputs, test_param, level, wf_steps) + for case in tp_desc['cases']: + if case['value'] == wf_params[test_param['name']]: + if len(case['inputs']) > 1: + conditional_paramlist += get_param_desc(wf_params, wf_inputs, get_tool_input(case), level+1, wf_steps) + return conditional_paramlist + + +def format_repeat_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): + """Format the description (label and value) for parameters in a repeat""" + repeat_paramlist = '' + repeat_inp_desc = get_tool_input(tp_desc) + context = {'space': SPACE * level, 'section_label': tp_desc['title']} + repeat_paramlist += templates.render(INPUT_SECTION, **context) + for r in range(len(wf_params)): + context = { + 'space': SPACE * (level+1), + 'section_label': "%s: %s" % (r+1, tp_desc['title'])} + repeat_paramlist += templates.render(INPUT_SECTION, **context) + repeat_paramlist += get_param_desc(wf_params[r], wf_inputs, repeat_inp_desc, level+2, wf_steps) + if r < len(wf_params) - 1: + context = {'space': SPACE * (level+1), 'section_label': tp_desc['title']} + repeat_paramlist += templates.render(INPUT_ADD_REPEAT, **context) + return repeat_paramlist + + +def get_param_value(wf_params, tp_desc): + """Get value of a 'simple' parameter""" + param_value = '' + if tp_desc['value'] == wf_params: + param_value = None + elif tp_desc['type'] == 'boolean': + if bool(tp_desc['value']) == wf_params: + param_value = None + param_value = 'Yes' if wf_params else 'No' + elif tp_desc['type'] == 'select': + param_value = '' + for opt in tp_desc['options']: + if opt[1] == wf_params: + param_value = opt[0] + elif tp_desc['type'] == 'data_column': + param_value = "c%s" % wf_params + else: + param_value = wf_params + return param_value + + def format_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): """Format the parameter description (label and value) given the type of parameter""" paramlist = '' if 'type' not in tp_desc: raise ValueError("No type for the paramater %s" % tp_desc['name']) if tp_desc['type'] == 'data': - paramlist += format_inputs(wf_inputs, tp_desc, wf_steps) + paramlist += format_inputs(wf_inputs, tp_desc, wf_steps, level) elif tp_desc['type'] == 'data_collection': info("data_collection parameters are currently not supported") elif tp_desc['type'] == 'section': - context = {'space': ' ' * level, 'section_label': tp_desc['title']} - sub_param_desc = get_param_desc(wf_params, wf_inputs, get_tool_input(tp_desc), level+1, wf_steps) - if sub_param_desc != '': - paramlist += templates.render(INPUT_SECTION, **context) - paramlist += sub_param_desc + paramlist += format_section_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps) elif tp_desc['type'] == 'conditional': - test_param = tp_desc['test_param'] - paramlist += format_param_desc(wf_params[test_param['name']], wf_inputs, test_param, level, wf_steps) - for case in tp_desc['cases']: - if case['value'] == wf_params[test_param['name']]: - if len(case['inputs']) > 1: - paramlist += get_param_desc(wf_params, wf_inputs, get_tool_input(case), level+1, wf_steps) + paramlist += format_conditional_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps) elif tp_desc['type'] == 'repeat': - repeat_inp_desc = get_tool_input(tp_desc) - context = {'space': ' ' * level, 'section_label': tp_desc['title']} - paramlist += templates.render(INPUT_SECTION, **context) - for r in range(len(wf_params)): - context = { - 'space': ' ' * (level+1), - 'section_label': "%s: %s" % (r+1, tp_desc['title'])} - paramlist += templates.render(INPUT_SECTION, **context) - paramlist += get_param_desc(wf_params[r], wf_inputs, repeat_inp_desc, level+2, wf_steps) - if r < len(wf_params) - 1: - context = {'space': ' ' * (level+1), 'section_label': tp_desc['title']} - paramlist += templates.render(INPUT_ADD_REPEAT, **context) + paramlist += format_repeat_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps) else: - if tp_desc['value'] == wf_params: - return paramlist - elif tp_desc['type'] == 'boolean': - if bool(tp_desc['value']) == wf_params: - return paramlist - param_value = 'Yes' if wf_params else 'No' - elif tp_desc['type'] == 'select': - param_value = '' - for opt in tp_desc['options']: - if opt[1] == wf_params: - param_value = opt[0] - elif tp_desc['type'] == 'data_column': - param_value = "c%s" % wf_params - else: - param_value = wf_params - context = { - 'space': ' ' * level, - 'param_label': tp_desc['label'], - 'param_value': param_value} - paramlist += templates.render(INPUT_PARAM, **context) + param_value = get_param_value(wf_params, tp_desc) + if param_value is not None: + context = { + 'space': SPACE * level, + 'param_label': tp_desc['label'], + 'param_value': param_value} + paramlist += templates.render(INPUT_PARAM, **context) return paramlist @@ -591,8 +627,8 @@ def get_wf_tool_description(wf, gi): if len(step['input_connections']) == 0: continue try: - tool_desc = gi.tools.show_tool(step['tool_id'], io_details = True) - except: + tool_desc = gi.tools.show_tool(step['tool_id'], io_details=True) + except Exception: tool_desc = {'inputs': []} tools.setdefault(step['name'], get_tool_input(tool_desc)) return tools @@ -606,7 +642,7 @@ def serve_wf_locally(kwds, wf_filepath, ctx): with galaxy_engine.ensure_runnables_served([runnable]) as config: workflow_id = config.workflow_id(wf_filepath) wf = config.gi.workflows.export_workflow_dict(workflow_id) - tools = get_wf_tool_description(wf, config.gi) + tools = get_wf_tool_description(wf, config.gi) return wf, tools @@ -618,7 +654,7 @@ def create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx): raise ValueError("No Galaxy URL given") if not kwds['galaxy_api_key']: raise ValueError("No API key to access Galaxy given") - wf, tools = get_wf_from_running_galaxy(kwds, ctx) + wf, tools = get_wf_from_running_galaxy(kwds, ctx) else: wf, tools = serve_wf_locally(kwds, kwds["workflow"], ctx) @@ -634,7 +670,7 @@ def create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx): "body": body } template = templates.render(TUTORIAL_TEMPLATE, **context) - + # create the tutorial markdown file md_path = os.path.join(tuto_dir, "tutorial.md") with open(md_path, 'w') as md: @@ -652,7 +688,7 @@ def add_workflow_file(kwds, tuto_dir): gi = galaxy.GalaxyInstance(kwds['galaxy_url'], key=kwds['galaxy_api_key']) gi.workflows.export_workflow_to_local_path(kwds['workflow_id'], wf_filepath, - use_default_filename = False) + use_default_filename=False) # remove empty workflow file if there empty_wf_filepath = os.path.join(wf_dir, "empty_workflow.ga") if os.path.exists(empty_wf_filepath): @@ -705,7 +741,8 @@ def init(ctx, kwds): info("The tutorial %s in topic %s does not exist. It will be created." % (kwds['tutorial_name'], kwds['topic_name'])) create_tutorial(kwds, tuto_dir, topic_dir, tuto_template_dir, ctx) else: - info("The tutorial %s in topic %s already exists. It will be updated with the other arguments" % (kwds['tutorial_name'], kwds['topic_name'])) + info("The tutorial %s in topic %s already exists. It will be updated with the other arguments" % ( + kwds['tutorial_name'], kwds['topic_name'])) update_tutorial(kwds, tuto_dir, topic_dir) @@ -742,7 +779,7 @@ def fill_data_library(ctx, kwds): info("The data library will be created and the metadata will be filled with the new Zenodo link") tuto_metadata['zenodo_link'] = z_link z_link = kwds['zenodo'] - + if z_link == '' or z_link is None: raise Exception("A Zenodo link should be provided either in the metadata file or as argument of the command") @@ -757,4 +794,4 @@ def fill_data_library(ctx, kwds): extract_from_zenodo(topic_kwds, tuto_dir) # update the metadata - save_to_yaml(metadata, metadata_path) \ No newline at end of file + save_to_yaml(metadata, metadata_path) From 206bab9e95bcd288e03e61b611202b1cbd334766 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Tue, 31 Jul 2018 14:15:51 +0200 Subject: [PATCH 12/26] Add template option and directory creation if not there --- planemo/options.py | 11 ++++++++--- planemo/training.py | 24 +++++++++++++++++------- 2 files changed, 25 insertions(+), 10 deletions(-) diff --git a/planemo/options.py b/planemo/options.py index be2e843e3..c57202ac9 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -1139,15 +1139,20 @@ def training_topic_option(): "--topic_title", default="Title of the topic", help="Title of the topic to create"), + planemo_option( + "--topic_summary", + default="Summary of the topic", + help="Summary of the topic"), planemo_option( "--topic_target", type=click.Choice(['use', 'admin-dev', 'instructors']), default="use", help="Target audience for the topic"), planemo_option( - "--topic_summary", - default="Summary of the topic", - help="Summary of the topic") + "--templates", + type=click.Path(file_okay=True, resolve_path=True), + default="templates", + help="Directory with the training templates") ) diff --git a/planemo/training.py b/planemo/training.py index 3ee0a5d89..fa039127c 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -196,11 +196,14 @@ def save_to_yaml(content, filepath): explicit_start=True) -def get_template_dir(): +def get_template_dir(kwds): """Check and return the templates directory""" - template_dir = "templates" - if not os.path.isdir(template_dir): - raise Exception("This script needs to be run in the training material repository") + if not kwds["templates"]: + template_dir = "templates" + if not os.path.isdir(template_dir): + raise Exception("This script needs to be run in the training material repository") + else: + template_dir = kwds["templates"] return template_dir @@ -250,7 +253,10 @@ def create_topic(kwds, topic_dir, template_dir): change_topic_name(kwds["topic_name"], slides_path) # add a symbolic link to the metadata.yaml - os.chdir("metadata") + metadata_dir = "metadata" + if not os.path.isdir(metadata_dir): + os.makedirs(metadata_dir) + os.chdir(metadata_dir) os.symlink(os.path.join("..", metadata_path), "%s.yaml" % kwds["topic_name"]) os.chdir("..") @@ -722,7 +728,7 @@ def create_tutorial(kwds, tuto_dir, topic_dir, template_dir, ctx): def init(ctx, kwds): """Create/update a topic/tutorial""" - topic_template_dir = get_template_dir() + topic_template_dir = get_template_dir(kwds) topic_dir = os.path.join("topics", kwds['topic_name']) if not os.path.isdir(topic_dir): @@ -748,7 +754,11 @@ def init(ctx, kwds): def fill_data_library(ctx, kwds): """Fill a data library for a tutorial""" - topic_dir = os.path.join("topics", kwds['topic_name']) + topics_dir = "topics" + if not os.path.isdir(topics_dir): + os.makedirs(topics_dir) + + topic_dir = os.path.join(topics_dir, kwds['topic_name']) if not os.path.isdir(topic_dir): raise Exception("The topic %s does not exists. It should be created" % kwds['topic_name']) From d9368a1f9da62447a4ffb6769bf5e97483e032cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Tue, 31 Jul 2018 16:18:02 +0200 Subject: [PATCH 13/26] Add tests for training init command --- project_templates/training/README.md | 4 + project_templates/training/docker/Dockerfile | 21 + .../training/images/template.graphml | 373 ++++++++++++++++++ project_templates/training/index.md | 4 + project_templates/training/metadata.yaml | 50 +++ .../training/slides/introduction.html | 53 +++ .../tutorials/tutorial1/data-library.yaml | 7 + .../tutorials/tutorial1/data-manager.yaml | 18 + .../training/tutorials/tutorial1/slides.html | 14 + .../tutorials/tutorial1/tours/tour.yaml | 0 .../training/tutorials/tutorial1/tutorial.md | 128 ++++++ .../tutorial1/workflows/empty_workflow.ga | 8 + tests/test_cmd_training_init.py | 142 +++++++ 13 files changed, 822 insertions(+) create mode 100644 project_templates/training/README.md create mode 100644 project_templates/training/docker/Dockerfile create mode 100644 project_templates/training/images/template.graphml create mode 100644 project_templates/training/index.md create mode 100644 project_templates/training/metadata.yaml create mode 100644 project_templates/training/slides/introduction.html create mode 100644 project_templates/training/tutorials/tutorial1/data-library.yaml create mode 100644 project_templates/training/tutorials/tutorial1/data-manager.yaml create mode 100644 project_templates/training/tutorials/tutorial1/slides.html create mode 100644 project_templates/training/tutorials/tutorial1/tours/tour.yaml create mode 100644 project_templates/training/tutorials/tutorial1/tutorial.md create mode 100644 project_templates/training/tutorials/tutorial1/workflows/empty_workflow.ga create mode 100644 tests/test_cmd_training_init.py diff --git a/project_templates/training/README.md b/project_templates/training/README.md new file mode 100644 index 000000000..03a7d058c --- /dev/null +++ b/project_templates/training/README.md @@ -0,0 +1,4 @@ +Topic name +========== + +Please refer to the [CONTRIBUTING.md](../CONTRIBUTING.md) before adding or updating any material \ No newline at end of file diff --git a/project_templates/training/docker/Dockerfile b/project_templates/training/docker/Dockerfile new file mode 100644 index 000000000..31f674d85 --- /dev/null +++ b/project_templates/training/docker/Dockerfile @@ -0,0 +1,21 @@ +# Galaxy - metagenomics training material +# +# to build the docker image, go to root of training repo and +# docker build -t -f topics//docker/Dockerfile . +# +# to run image: +# docker run -p "8080:80" -t + +FROM bgruening/galaxy-stable + +MAINTAINER Galaxy Training Material + +ENV GALAXY_CONFIG_BRAND "GTN: " + +# copy the tutorials directory for your topic +ADD topics//tutorials/ /tutorials/ + +# install everything for tutorials +ADD bin/docker-install-tutorials.sh /setup-tutorials.sh +ADD bin/mergeyaml.py /mergeyaml.py +RUN /setup-tutorials.sh diff --git a/project_templates/training/images/template.graphml b/project_templates/training/images/template.graphml new file mode 100644 index 000000000..4164882a9 --- /dev/null +++ b/project_templates/training/images/template.graphml @@ -0,0 +1,373 @@ + + + + + + + + + + + + + + + + + + + + + + + Part + + + + + + + + + + + + + + + + + Part + + + + + + + + + + + + + + + + + subPart + + + + + + + + + + + subPart + + + + + + + + + + + subPart + + + + + + + + + + + subPart + + + + + + + + + + + subPart + + + + + + + + + + + Part + + + + + + + + + + + + + + + + + subPart + + + + + + + + + + + subPart + + + + + + + + + + + subPart + + + + + + + + + + + subPart + + + + + + + + + + + subPart + + + + + + + + + + + subPart + + + + + + + + + + + subPart + + + + + + + + + + + subPart + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/project_templates/training/index.md b/project_templates/training/index.md new file mode 100644 index 000000000..0720e4154 --- /dev/null +++ b/project_templates/training/index.md @@ -0,0 +1,4 @@ +--- +layout: topic +topic_name: your_topic +--- diff --git a/project_templates/training/metadata.yaml b/project_templates/training/metadata.yaml new file mode 100644 index 000000000..49f4280c0 --- /dev/null +++ b/project_templates/training/metadata.yaml @@ -0,0 +1,50 @@ +--- +name: "Name of the topic" +type: "use" +title: "topic_name" +summary: "Description of the topic" +edam_ontology: "" +requirements: + - + title: "Galaxy introduction" + type: "internal" + link: "/introduction/" + +material: + - + title: "Introduction to the topic" + type: "introduction" + name: "introduction" + slides: "yes" + contributors: + - contributor1 + - contributor2 + - + title: "tutorial1" + type: "tutorial" + name: "tutorial1" + zenodo_link: "" + galaxy_tour: no + hands_on: yes + slides: no + workflows: no + questions: + - "" + - "" + objectives: + - "" + - "" + - "" + time_estimation: "1d/3h/6h" + key_points: + - "" + - "" + - "" + - "..." + contributors: + - contributor1 + - contributor2 + +maintainers: + - maintainer1 + - maintainer2 diff --git a/project_templates/training/slides/introduction.html b/project_templates/training/slides/introduction.html new file mode 100644 index 000000000..a1ec16762 --- /dev/null +++ b/project_templates/training/slides/introduction.html @@ -0,0 +1,53 @@ +--- +layout: introduction_slides +topic_name: "your_topic" +logo: "GTN" +--- + +# What is the topic? + +--- + +### Topic + +Blabla + +- Blabla +- Blabla + +Blabla + +- Blabla +- Blabla + +--- + +### Second slide + +![](../images/RNA_seq_zang2016.png) + +[*Zang and Mortazavi, Nature, 2012*](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4138050/) + +--- + +### A Third slide + +Some text + +??? + +Slide notes +-> Pressing **P** will toggle presenter mode. + + +--- + +# Part 2 + +--- + +### An other slide + +With text and a resized image + +.image-25[![](../images/ecker_2012.jpg)] diff --git a/project_templates/training/tutorials/tutorial1/data-library.yaml b/project_templates/training/tutorials/tutorial1/data-library.yaml new file mode 100644 index 000000000..de1e313fd --- /dev/null +++ b/project_templates/training/tutorials/tutorial1/data-library.yaml @@ -0,0 +1,7 @@ +--- +libraries: + # This needs to be changed to reference your Zenodo data + - name: "Small test files" + files: + - url: "http://raw.githubusercontent.com/bgruening/galaxytools/master/tools/rna_tools/sortmerna/test-data/read_small.fasta" + file_type: "fasta" diff --git a/project_templates/training/tutorials/tutorial1/data-manager.yaml b/project_templates/training/tutorials/tutorial1/data-manager.yaml new file mode 100644 index 000000000..e9420766d --- /dev/null +++ b/project_templates/training/tutorials/tutorial1/data-manager.yaml @@ -0,0 +1,18 @@ +# configuration for fetch and index genomes +--- +data_managers: + # Data manager ID + - id: url to data manager on ToolShed + # tool parameters, nested parameters should be specified using a pipe (|) + params: + - 'param1': '{{ item }}' + - 'param2': 'value' + # Items refer to a list of variables you want to run this data manager. You can use them inside the param field with {{ item }} + # In case of genome for example you can run this DM with multiple genomes, or you could give multiple URLs. + items: + - item1 + - item2 + # Name of the data-tables you want to reload after your DM are finished. This can be important for subsequent data managers + data_table_reload: + - all_fasta + - __dbkeys__ diff --git a/project_templates/training/tutorials/tutorial1/slides.html b/project_templates/training/tutorials/tutorial1/slides.html new file mode 100644 index 000000000..42e37eb0e --- /dev/null +++ b/project_templates/training/tutorials/tutorial1/slides.html @@ -0,0 +1,14 @@ +--- +layout: tutorial_slides +topic_name: your_topic +tutorial_name: your_tutorial_name +logo: "GTN" +--- + +# What is the topic? + +--- + +### How to fill the slide decks? + +Please follow our [tutorial to learn how to fill the slides]({{ site.baseurl }}/topics/contributing/tutorials/create-new-tutorial-slides/slides.html) diff --git a/project_templates/training/tutorials/tutorial1/tours/tour.yaml b/project_templates/training/tutorials/tutorial1/tours/tour.yaml new file mode 100644 index 000000000..e69de29bb diff --git a/project_templates/training/tutorials/tutorial1/tutorial.md b/project_templates/training/tutorials/tutorial1/tutorial.md new file mode 100644 index 000000000..8303e8afe --- /dev/null +++ b/project_templates/training/tutorials/tutorial1/tutorial.md @@ -0,0 +1,128 @@ +--- +layout: tutorial_hands_on +topic_name: your_topic +tutorial_name: your_tutorial_name +--- + +# Introduction +{:.no_toc} + + + +General introduction about the topic and then an introduction of the tutorial (the questions and the objectives). It is nice also to have a scheme to sum up the pipeline used during the tutorial. The idea is to give to trainees insight into the content of the tutorial and the (theoretical and technical) key concepts they will learn. + +**Please follow our [tutorial to learn how to fill the Markdown]({{ site.baseurl }}/topics/contributing/tutorials/create-new-tutorial-content/tutorial.html)** + +> ### Agenda +> +> In this tutorial, we will deal with: +> +> 1. TOC +> {:toc} +> +{: .agenda} + +# Part 1 + +Introduction about this part + +## Subpart 1 + +Short introduction about this subpart. + + + +> ### {% icon hands_on %} Hands-on: Data upload +> +> 1. Step1 +> 2. Step2 +> +> > ### {% icon comment %} Comments +> > A comment +> {: .comment} +> +> > ### {% icon tip %}Tip: A tip +> > +> > * Step1 +> > * Step2 +> {: .tip} +{: .hands_on} + +## Subpart 2 + +Short introduction about this subpart. + +> ### {% icon hands_on %} Hands-on: Data upload +> +> 1. Step1 +> 2. Step2 +> +> > ### {% icon question %} Question +> > +> > Question? +> > +> > > ### {% icon solution %} Solution +> > > +> > > Answer to question +> > > +> > {: .solution} +> > +> {: .question} +{: .hands_on} + +Some blabla +> ### {% icon hands_on %} Hands-on: Data upload +> +> 1. Step1 +> 2. **My Tool** {% icon tool %} with the following parameters +> - *"param1"*: the file `myfile` +> - *"param2"*: `42` +> - *"param3"*: `Yes` +> +> 3. **My Tool** {% icon tool %} with the following parameters +> - {% icon param-text %} *"My text parameter"*: `my value` +> - {% icon param-file %} *"My input file"*: `my file` +> - {% icon param-files %} *"My multiple file input or collection"*: `my collection` +> - {% icon param-select %} *"My select menu"*: `my choice` +> - {% icon param-check %} *"My check box"*: `yes` +> +> > ### {% icon question %} Questions +> > +> > 1. Question1? +> > 2. Question2? +> > +> > > ### {% icon solution %} Solution +> > > +> > > 1. Answer for question1 +> > > 2. Answer for question2 +> > > +> > {: .solution} +> > +> {: .question} +> +> 3. Step3 +{: .hands_on} + +# Part 2 + +Short introduction about this subpart. + +> ### {% icon comment %} Comment +> +> Do you want to learn more about the principles behind mapping? Follow our [training](../../NGS-mapping) +{: .comment} + + +> ### {% icon details %} More details on the .... +> +> Add more details in Markdown. By default the box is collapsed. And is expanded when clicked +> +{: .details} + +# Conclusion +{:.no_toc} + +Conclusion about the technical key points. And then relation between the techniques and the biological question to end with a global view. diff --git a/project_templates/training/tutorials/tutorial1/workflows/empty_workflow.ga b/project_templates/training/tutorials/tutorial1/workflows/empty_workflow.ga new file mode 100644 index 000000000..f80a4f6cd --- /dev/null +++ b/project_templates/training/tutorials/tutorial1/workflows/empty_workflow.ga @@ -0,0 +1,8 @@ +{ + "a_galaxy_workflow": "true", + "annotation": "", + "format-version": "0.1", + "name": "empty workflow", + "steps": {}, + "uuid": "2b67537f-0546-4ffa-bc9b-51b55829f6d9" +} \ No newline at end of file diff --git a/tests/test_cmd_training_init.py b/tests/test_cmd_training_init.py new file mode 100644 index 000000000..2bb8298aa --- /dev/null +++ b/tests/test_cmd_training_init.py @@ -0,0 +1,142 @@ +"""Tests for the ``training_init`` command.""" +import os + +from .test_utils import ( + CliTestCase, + PROJECT_TEMPLATES_DIR, + TEST_DATA_DIR +) + + +class CmdTrainingInitTestCase(CliTestCase): + """Container class defining test cases for the ``training_init`` command.""" + + def test_training_init_command_by_default(self): + with self._isolate(): + training_init_command = [ + "training_init", + "--topic_name", "test" + ] + self._check_exit_code(training_init_command, exit_code=-1) + + def test_training_init_command_templates(self): + with self._isolate(): + training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") + training_init_command = [ + "training_init", + "--topic_name", "test", + "--templates", training_template + ] + self._check_exit_code(training_init_command, exit_code=0) + + def test_training_init_command_topic(self): + with self._isolate(): + training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") + # working test + training_init_command = [ + "training_init", + "--topic_name", "test", + "--templates", training_template, + "--topic_title", "Topic title", + "--topic_target", "use", + "--topic_summary", "Summary" + ] + self._check_exit_code(training_init_command, exit_code=0) + # failing test + training_init_command = [ + "training_init", + "--topic_name", "test", + "--templates", training_template, + "--topic_title", "Topic title", + "--topic_target", "test", + "--topic_summary", "Summary" + ] + self._check_exit_code(training_init_command, exit_code=2) + + def test_training_init_command_tutorial_no_topic(self): + with self._isolate(): + training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") + # working test + training_init_command = [ + "training_init", + "--tutorial_name", "test", + "--templates", training_template, + ] + self._check_exit_code(training_init_command, exit_code=2) + + def test_training_init_command_tutorial(self): + with self._isolate(): + training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") + # working test + training_init_command = [ + "training_init", + "--topic_name", "test", + "--tutorial_name", "test", + "--templates", training_template, + "--tutorial_title", "Title of the tutorial", + "--hands_on", + "--slides" + ] + self._check_exit_code(training_init_command, exit_code=0) + + def test_training_init_command_tutorial_zenodo(self): + with self._isolate(): + training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") + datatype = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml") + # not working test + training_init_command = [ + "training_init", + "--topic_name", "test", + "--tutorial_name", "test", + "--zenodo", "https://zenodo.org/record/1321885", + "--templates", training_template + ] + self._check_exit_code(training_init_command, exit_code=-1) + # working + training_init_command = [ + "training_init", + "--topic_name", "test", + "--tutorial_name", "test", + "--zenodo", "https://zenodo.org/record/1321885", + "--datatypes", datatype, + "--templates", training_template + ] + self._check_exit_code(training_init_command, exit_code=0) + + def test_training_init_command_tutorial_local_wf(self): + with self._isolate(): + training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") + test_workflow = os.path.join(TEST_DATA_DIR, "test_workflow_1.ga") + # working test + training_init_command = [ + "training_init", + "--topic_name", "test", + "--tutorial_name", "test", + "--workflow", test_workflow, + "--templates", training_template + ] + self._check_exit_code(training_init_command, exit_code=0) + + def test_training_init_command_tutorial_remote_wf(self): + with self._isolate(): + training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") + # not working test + training_init_command = [ + "training_init", + "--topic_name", "test", + "--tutorial_name", "test", + "--workflow_id", "ID", + "--templates", training_template + ] + self._check_exit_code(training_init_command, exit_code=-1) + # working test + training_init_command = [ + "training_init", + "--topic_name", "test", + "--tutorial_name", "test", + "--workflow_id", "ID", + "--galaxy_url", "https://usegalaxy.eu/", + "--galaxy_api_key", "API", + "--templates", training_template + ] + self._check_exit_code(training_init_command, exit_code=0) From 185e07018f21d332eebf5ca6a8b891c70406aeb4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Tue, 31 Jul 2018 16:18:25 +0200 Subject: [PATCH 14/26] Add tests for training fille data library command --- tests/data/training_datatypes.yaml | 2 + tests/data/training_metadata_w_zenodo.yaml | 42 +++++++++ tests/data/training_metadata_wo_zenodo.yaml | 42 +++++++++ tests/test_cmd_training_fill_data_library.py | 93 ++++++++++++++++++++ 4 files changed, 179 insertions(+) create mode 100644 tests/data/training_datatypes.yaml create mode 100644 tests/data/training_metadata_w_zenodo.yaml create mode 100644 tests/data/training_metadata_wo_zenodo.yaml create mode 100644 tests/test_cmd_training_fill_data_library.py diff --git a/tests/data/training_datatypes.yaml b/tests/data/training_datatypes.yaml new file mode 100644 index 000000000..22257a730 --- /dev/null +++ b/tests/data/training_datatypes.yaml @@ -0,0 +1,2 @@ +--- +csv: csv \ No newline at end of file diff --git a/tests/data/training_metadata_w_zenodo.yaml b/tests/data/training_metadata_w_zenodo.yaml new file mode 100644 index 000000000..fe87389ad --- /dev/null +++ b/tests/data/training_metadata_w_zenodo.yaml @@ -0,0 +1,42 @@ +--- +name: test +type: use +title: Test +summary: 'Summary' +edam_ontology: '' +requirements: +- title: Galaxy introduction + type: internal + link: /introduction/ +material: +- title: Introduction to the topic + type: introduction + name: introduction + slides: 'yes' + contributors: + - contributor1 + - contributor2 +- title: Test + name: test + type: tutorial + zenodo_link: 'https://zenodo.org/record/1321885' + hands_on: true + slides: false + workflows: true + galaxy_tour: false + questions: + - '' + - '' + objectives: + - '' + - '' + time_estimation: 1d/3h/6h + key_points: + - '' + - '' + contributors: + - contributor1 + - contributor2 +maintainers: +- maintainer1 +- maintainer2 \ No newline at end of file diff --git a/tests/data/training_metadata_wo_zenodo.yaml b/tests/data/training_metadata_wo_zenodo.yaml new file mode 100644 index 000000000..80a1f7a99 --- /dev/null +++ b/tests/data/training_metadata_wo_zenodo.yaml @@ -0,0 +1,42 @@ +--- +name: test +type: use +title: Test +summary: 'Summary' +edam_ontology: '' +requirements: +- title: Galaxy introduction + type: internal + link: /introduction/ +material: +- title: Introduction to the topic + type: introduction + name: introduction + slides: 'yes' + contributors: + - contributor1 + - contributor2 +- title: Test + name: test + type: tutorial + zenodo_link: '' + hands_on: true + slides: false + workflows: true + galaxy_tour: false + questions: + - '' + - '' + objectives: + - '' + - '' + time_estimation: 1d/3h/6h + key_points: + - '' + - '' + contributors: + - contributor1 + - contributor2 +maintainers: +- maintainer1 +- maintainer2 \ No newline at end of file diff --git a/tests/test_cmd_training_fill_data_library.py b/tests/test_cmd_training_fill_data_library.py new file mode 100644 index 000000000..21a755cd8 --- /dev/null +++ b/tests/test_cmd_training_fill_data_library.py @@ -0,0 +1,93 @@ +"""Tests for the ``training_fill_data_library`` command.""" +import os +import shutil + +from .test_utils import ( + CliTestCase, + TEST_DATA_DIR +) + + +def create_tutorial_dir(topic_n, tuto_n, metadata_n): + topic_dir = os.path.join("topics", topic_n) + tuto_dir = os.path.join(topic_dir, "tutorials", tuto_n) + metadata_path = os.path.join(topic_dir, "metadata.yaml") + if not os.path.isdir(topic_dir): + os.makedirs(topic_dir) + if not os.path.isdir(tuto_dir): + os.makedirs(tuto_dir) + if not os.path.exists(metadata_path): + metadata = os.path.join(TEST_DATA_DIR, metadata_n) + shutil.copy(metadata, metadata_path) + + +def remove_topics(): + shutil.rmtree("topics") + + +class CmdTrainingFillDataLibraryTestCase(CliTestCase): + """Container class defining test cases for the ``training_fill_data_library`` command.""" + + def test_training_fill_data_library_command_empty(self): + with self._isolate(): + training_fill_data_library_command = [ + "training_fill_data_library" + ] + self._check_exit_code(training_fill_data_library_command, exit_code=2) + + def test_training_fill_data_library_command_topic(self): + with self._isolate(): + training_fill_data_library_command = [ + "training_fill_data_library", + "--topic_name", "test" + ] + self._check_exit_code(training_fill_data_library_command, exit_code=2) + + def test_training_fill_data_library_command_tutorial_topic(self): + with self._isolate(): + topic_n = "test" + tuto_n = "test" + datatype = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml") + # not working + create_tutorial_dir(topic_n, tuto_n, "training_metadata_wo_zenodo.yaml") + training_fill_data_library_command = [ + "training_fill_data_library", + "--topic_name", topic_n, + "--tutorial_name", tuto_n, + "--datatypes", datatype + ] + remove_topics() + # working + self._check_exit_code(training_fill_data_library_command, exit_code=-1) + create_tutorial_dir(topic_n, tuto_n, "training_metadata_w_zenodo.yaml") + training_fill_data_library_command = [ + "training_fill_data_library", + "--topic_name", topic_n, + "--tutorial_name", tuto_n, + "--datatypes", datatype + ] + self._check_exit_code(training_fill_data_library_command, exit_code=0) + + def test_training_fill_data_library_command_tutorial_zenodo(self): + with self._isolate(): + topic_n = "test" + tuto_n = "test" + create_tutorial_dir(topic_n, tuto_n, "training_metadata_wo_zenodo.yaml") + datatype = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml") + # not working test + training_fill_data_library_command = [ + "training_fill_data_library", + "--topic_name", topic_n, + "--tutorial_name", tuto_n, + "--zenodo", "https://zenodo.org/record/1321885" + ] + self._check_exit_code(training_fill_data_library_command, exit_code=-1) + # working + training_fill_data_library_command = [ + "training_fill_data_library", + "--topic_name", topic_n, + "--tutorial_name", tuto_n, + "--zenodo", "https://zenodo.org/record/1321885", + "--datatypes", datatype + ] + self._check_exit_code(training_fill_data_library_command, exit_code=0) From daba767ab2cce3ebb3c604a2bc2cc851340cd6f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Tue, 31 Jul 2018 18:32:45 +0200 Subject: [PATCH 15/26] Add tests for training and fix training code --- planemo/training.py | 580 +++++++++++------- tests/data/training_datatypes.yaml | 3 +- tests/data/training_workflow.ga | 230 +++++++ tests/test_training.py | 930 +++++++++++++++++++++++++++++ 4 files changed, 1513 insertions(+), 230 deletions(-) create mode 100644 tests/data/training_workflow.ga create mode 100644 tests/test_training.py diff --git a/planemo/training.py b/planemo/training.py index fa039127c..e1842c844 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -3,6 +3,7 @@ import collections import json import os +import re import shutil import oyaml as yaml @@ -27,7 +28,7 @@ """ INPUT_ADD_REPEAT = """ ->{{space}}- Click on *"Insert {{section_label}}"*: +>{{space}}- Click on *"Insert {{repeat_label}}"*: """ INPUT_PARAM = """ @@ -35,21 +36,7 @@ """ HANDS_ON_TOOL_BOX_TEMPLATE = """ -# Title of the section usually corresponding to a big step - -Description of the step: some background and some theory. Some image can be added there to support the theory explanation: - -![Alternative text](../../images/image_name "Legend of the image") - -The idea is to keep the theory description before quite simple to focus more on the practical part. - -***TODO***: *Consider adding a detail box to expand the theory* - -> ### {{ '{%' }} icon details {{ '%}' }} More details about the theory -> -> But to describe more details, it is possible to use the detail boxes which are expandable -> -{: .details} +## Sub-step with **{{tool_name}}** > ### {{ '{%' }} icon hands_on {{ '%}' }} Hands-on: Task description > @@ -82,7 +69,6 @@ > {: .question} - """ @@ -158,7 +144,7 @@ > > ### {{ '{%' }} icon tip {{ '%}' }} Tip: Importing data from a data library > > > > * Go into "Shared data" (top panel) then "Data libraries" -> > * Click on "Training data" and then "Analyses of metagenomics data" +> > * Click on "Training data" and then "{{ topic_title }}" > > * Select interesting file > > * Click on "Import selected datasets into history" > > * Import in a new history @@ -166,8 +152,34 @@ > {: .hands_on} +# Title of the section usually corresponding to a big step in the analysis + +It comes first a description of the step: some background and some theory. +Some image can be added there to support the theory explanation: + +![Alternative text](../../images/image_name "Legend of the image") + +The idea is to keep the theory description before quite simple to focus more on the practical part. + +***TODO***: *Consider adding a detail box to expand the theory* + +> ### {{ '{%' }} icon details {{ '%}' }} More details about the theory +> +> But to describe more details, it is possible to use the detail boxes which are expandable +> +{: .details} + +A big step can have several subsections or sub steps: + {{ body }} +## Re-arrange + +To create the template, each step of the workflow had its own subsection. + +***TODO***: *Re-arrange the generated subsections into sections or other subsections. +Consider merging some hands-on boxes to have a meaningful flow of the analyses* + # Conclusion {:.no_toc} @@ -188,12 +200,14 @@ def load_yaml(filepath): def save_to_yaml(content, filepath): """Save a dictionary to a YAML file""" with open(filepath, 'w') as stream: - yaml.dump(content, - stream, - indent=2, - default_flow_style=False, - default_style='', - explicit_start=True) + yaml.safe_dump(content, + stream, + indent=2, + default_flow_style=False, + default_style='', + explicit_start=True, + encoding='utf-8', + allow_unicode=True) def get_template_dir(kwds): @@ -207,16 +221,20 @@ def get_template_dir(kwds): return template_dir -def change_topic_name(topic_name, filepath): - """Change the topic name in the top metadata of a file""" - with open(filepath, "r") as in_f: - content = in_f.read() +def update_top_metadata_file(filepath, topic_name, tuto_name="tutorial1", keep=True): + """Update metadata on the top or delete a (tutorial or index) file """ + if keep: + with open(filepath, "r") as in_f: + content = in_f.read() - content = content.replace("your_topic", topic_name) - content = content.replace("your_tutorial_name", "tutorial1") + content = content.replace("your_topic", topic_name) + content = content.replace("your_tutorial_name", tuto_name) - with open(filepath, 'w') as out_f: - out_f.write(content) + with open(filepath, 'w') as out_f: + out_f.write(content) + + elif os.path.isfile(filepath): + os.remove(filepath) def create_topic(kwds, topic_dir, template_dir): @@ -232,7 +250,7 @@ def create_topic(kwds, topic_dir, template_dir): # update the index.md to match your topic's name index_path = os.path.join(topic_dir, "index.md") - change_topic_name(kwds["topic_name"], index_path) + update_top_metadata_file(index_path, kwds["topic_name"]) # update the metadata file metadata_path = os.path.join(topic_dir, "metadata.yaml") @@ -248,9 +266,9 @@ def create_topic(kwds, topic_dir, template_dir): # update the metadata in top of tutorial.md and slides.html tuto_path = os.path.join(topic_dir, "tutorials", "tutorial1") hand_on_path = os.path.join(tuto_path, "tutorial.md") - change_topic_name(kwds["topic_name"], hand_on_path) + update_top_metadata_file(hand_on_path, kwds["topic_name"]) slides_path = os.path.join(tuto_path, "slides.html") - change_topic_name(kwds["topic_name"], slides_path) + update_top_metadata_file(slides_path, kwds["topic_name"]) # add a symbolic link to the metadata.yaml metadata_dir = "metadata" @@ -261,24 +279,8 @@ def create_topic(kwds, topic_dir, template_dir): os.chdir("..") -def update_tuto_file(filepath, keep, topic_name, tutorial_name): - """Update or delete a tutorial (hands-on or slide) file""" - if keep: - with open(filepath, "r") as in_f: - content = in_f.read() - - content = content.replace("your_topic", topic_name) - content = content.replace("your_tutorial_name", tutorial_name) - - with open(filepath, 'w') as out_f: - out_f.write(content) - - elif os.path.isfile(filepath): - os.remove(filepath) - - def update_tutorial(kwds, tuto_dir, topic_dir): - """Update the metadata information of a tutorial""" + """Update the metadata information of a tutorial and add it if not there""" # update the metadata file to add the new tutorial metadata_path = os.path.join(topic_dir, "metadata.yaml") @@ -290,7 +292,7 @@ def update_tutorial(kwds, tuto_dir, topic_dir): mat["title"] = kwds["tutorial_title"] mat["hands_on"] = kwds["hands_on"] mat["slides"] = kwds["slides"] - mat["workflows"] = True if kwds["workflow"] else False + mat["workflows"] = True if kwds["workflow"] or kwds["workflow_id"] else False mat["zenodo_link"] = kwds["zenodo"] if kwds["zenodo"] else '' found = True elif mat["name"] == "tutorial1": @@ -317,11 +319,11 @@ def update_tutorial(kwds, tuto_dir, topic_dir): # update the metadata in top of tutorial.md or remove it if not needed hand_on_path = os.path.join(tuto_dir, "tutorial.md") - update_tuto_file(hand_on_path, kwds["hands_on"], kwds["topic_name"], kwds["tutorial_name"]) + update_top_metadata_file(hand_on_path, kwds["topic_name"], tuto_name=kwds["tutorial_name"], keep=kwds["hands_on"]) # update the metadata in top of slides.md or remove it if not needed slides_path = os.path.join(tuto_dir, "slides.html") - update_tuto_file(slides_path, kwds["slides"], kwds["topic_name"], kwds["tutorial_name"]) + update_top_metadata_file(slides_path, kwds["topic_name"], tuto_name=kwds["tutorial_name"], keep=kwds["slides"]) def get_zenodo_record(zenodo_link): @@ -331,25 +333,59 @@ def get_zenodo_record(zenodo_link): z_record = zenodo_link.split('.')[-1] else: z_record = zenodo_link.split('/')[-1] - # get JSON corresponding to the record from Zenodo API req = "https://zenodo.org/api/records/%s" % (z_record) r = requests.get(req) - r.raise_for_status() - req_res = r.json() - + if r: + req_res = r.json() + else: + info("The Zenodo link (%s) seems invalid" % (zenodo_link)) + req_res = {'files': []} + z_record = None return(z_record, req_res) -def prepare_data_library(files, kwds, z_record, tuto_dir): - """Fill or update the data library file""" - data_lib_filepath = os.path.join(tuto_dir, "data-library.yaml") +def get_galaxy_datatype(z_ext, datatype_fp): + """Get the Galaxy datatype corresponding to a Zenodo file type""" + g_datatype = '' + datatypes = load_yaml(datatype_fp) + if z_ext in datatypes: + g_datatype = datatypes[z_ext] + if g_datatype == '': + g_datatype = '# Please add a Galaxy datatype or update the shared/datatypes.yaml file' + info("Get Galaxy datatypes: %s --> %s" % (z_ext, g_datatype)) + return g_datatype + + +def get_files_from_zenodo(z_link, datatype_fp): + """Extract a list of URLs and dictionary describing the files from the JSON """ + """output of the Zenodo API""" + z_record, req_res = get_zenodo_record(z_link) + + links = [] + if 'files' not in req_res: + raise ValueError("No files in the Zenodo record") + + files = [] + for f in req_res['files']: + file_dict = {'url': '', 'src': 'url', 'ext': '', 'info': z_link} + if 'type' in f: + file_dict['ext'] = get_galaxy_datatype(f['type'], datatype_fp) + if 'links' not in f and 'self' not in f['links']: + raise ValueError("No link for file %s" % f) + file_dict['url'] = f['links']['self'] + links.append(f['links']['self']) + files.append(file_dict) + return (files, links, z_record) + + +def init_data_lib(data_lib_filepath): + """Init the data library dictionary""" if os.path.exists(data_lib_filepath): data_lib = load_yaml(data_lib_filepath) else: data_lib = collections.OrderedDict() - # set default information data_lib.setdefault('destination', collections.OrderedDict()) data_lib['destination']['type'] = 'library' @@ -358,7 +394,13 @@ def prepare_data_library(files, kwds, z_record, tuto_dir): data_lib['destination']['synopsis'] = 'Galaxy Training Network Material. See https://training.galaxyproject.org' data_lib.setdefault('items', []) data_lib.pop('libraries', None) + return data_lib + +def prepare_data_library(files, kwds, z_record, tuto_dir): + """Fill or update the data library file""" + data_lib_filepath = os.path.join(tuto_dir, "data-library.yaml") + data_lib = init_data_lib(data_lib_filepath) # get topic or create new one topic = collections.OrderedDict() for item in data_lib['items']: @@ -369,7 +411,6 @@ def prepare_data_library(files, kwds, z_record, tuto_dir): topic['name'] = kwds['topic_title'] topic['description'] = kwds['topic_summary'] topic['items'] = [] - # get tutorial or create new one tuto = collections.OrderedDict() for item in topic['items']: @@ -379,70 +420,83 @@ def prepare_data_library(files, kwds, z_record, tuto_dir): topic['items'].append(tuto) tuto['name'] = kwds['tutorial_title'] tuto['items'] = [] - # get current data library and/or previous data library for the tutorial # remove the latest tag of any existing library # remove the any other existing library - current_data_lib = collections.OrderedDict() - previous_data_lib = collections.OrderedDict() - for item in tuto['items']: - if item['name'] == "DOI: 10.5281/zenodo.%s" % z_record: - current_data_lib = item - elif item['description'] == 'latest': - previous_data_lib = item - previous_data_lib['description'] = '' - if not current_data_lib: - current_data_lib['name'] = "DOI: 10.5281/zenodo.%s" % z_record - current_data_lib['description'] = 'latest' - current_data_lib['items'] = [] - current_data_lib['items'] = files - - tuto['items'] = [current_data_lib] - if previous_data_lib: - tuto['items'].append(previous_data_lib) + if z_record: + current_data_lib = collections.OrderedDict() + previous_data_lib = collections.OrderedDict() + for item in tuto['items']: + if item['name'] == "DOI: 10.5281/zenodo.%s" % z_record: + current_data_lib = item + elif item['description'] == 'latest': + previous_data_lib = item + previous_data_lib['description'] = '' + if not current_data_lib: + current_data_lib['name'] = "DOI: 10.5281/zenodo.%s" % z_record + current_data_lib['description'] = 'latest' + current_data_lib['items'] = [] + current_data_lib['items'] = files + + tuto['items'] = [current_data_lib] + if previous_data_lib: + tuto['items'].append(previous_data_lib) save_to_yaml(data_lib, data_lib_filepath) -def get_galaxy_datatype(z_ext, kwds): - """Get the Galaxy datatype corresponding to a Zenodo file type""" - g_datatype = '' - datatypes = load_yaml(kwds['datatypes']) - if z_ext in datatypes: - g_datatype = datatypes[z_ext] - if g_datatype == '': - g_datatype = '# Please add a Galaxy datatype or update the shared/datatypes.yaml file' - info("Get Galaxy datatypes: %s --> %s" % (z_ext, g_datatype)) - return g_datatype - - -def extract_from_zenodo(kwds, tuto_dir): +def prepare_data_library_from_zenodo(kwds, tuto_dir): """Get the list of URLs of the files on Zenodo and fill the data library file""" links = [] if not kwds['zenodo']: return links + files, links, z_record = get_files_from_zenodo(kwds['zenodo'], kwds['datatypes']) + prepare_data_library(files, kwds, z_record, tuto_dir) + return links - z_record, req_res = get_zenodo_record(kwds['zenodo']) - # extract the URLs from the JSON - if 'files' not in req_res: - raise ValueError("No files in the Zenodo record") +def get_tool_input(tool_desc): + """Get a dictionary with label being the tool parameter name and the value the description + of the parameter extracted from the show_tool function of bioblend""" + tool_inp = collections.OrderedDict() + for inp in tool_desc["inputs"]: + tool_inp.setdefault(inp['name'], inp) + return tool_inp - files = [] - for f in req_res['files']: - file_dict = {'url': '', 'src': 'url', 'ext': '', 'info': kwds['zenodo']} - if 'type' in f: - file_dict['ext'] = get_galaxy_datatype(f['type'], kwds) - if 'links' not in f and 'self' not in f['links']: - raise ValueError("No link for file %s" % f) - file_dict['url'] = f['links']['self'] - links.append(f['links']['self']) - files.append(file_dict) - # prepare the data library dictionary - prepare_data_library(files, kwds, z_record, tuto_dir) +def get_wf_tool_description(wf, gi): + """Get a dictionary with description of inputs of all tools in a workflow""" + tools = {} + for s in wf['steps']: + step = wf['steps'][s] + if not step['input_connections']: + continue + try: + tool_desc = gi.tools.show_tool(step['tool_id'], io_details=True) + except Exception: + tool_desc = {'inputs': []} + tools.setdefault(step['name'], get_tool_input(tool_desc)) + return tools - return links + +def get_wf_tool_from_local_galaxy(kwds, wf_filepath, ctx): + """Server local Galaxy and get the workflow dictionary""" + assert is_galaxy_engine(**kwds) + runnable = for_path(wf_filepath) + with engine_context(ctx, **kwds) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([runnable]) as config: + workflow_id = config.workflow_id(wf_filepath) + wf = config.gi.workflows.export_workflow_dict(workflow_id) + tools = get_wf_tool_description(wf, config.gi) + return wf, tools + + +def get_wf_tools_from_running_galaxy(kwds): + """Get the workflow dictionary from a running Galaxy instance with the workflow installed there""" + gi = galaxy.GalaxyInstance(kwds['galaxy_url'], key=kwds['galaxy_api_key']) + wf = gi.workflows.export_workflow_dict(kwds['workflow_id']) + tools = get_wf_tool_description(wf, gi) + return wf, tools def get_input_tool_name(step_id, steps): @@ -451,25 +505,17 @@ def get_input_tool_name(step_id, steps): inp_prov_id = str(step_id) if inp_prov_id in steps: name = steps[inp_prov_id]['name'] - if name == 'Input dataset': - inp_provenance = "(input dataset)" + if name.find('Input dataset') != -1: + inp_provenance = "(%s)" % name else: inp_provenance = "(output of **%s** {%% icon tool %%})" % name return inp_provenance -def get_tool_input(tool_desc): - """Get a dictionary with label being the tool parameter name and the value the description - of the parameter extracted from the show_tool function of bioblend""" - tool_inp = collections.OrderedDict() - for inp in tool_desc["inputs"]: - tool_inp.setdefault(inp['name'], inp) - return tool_inp - - -def format_inputs(wf_inputs, tp_desc, wf_steps, level): +def format_inputs(step_inputs, tp_desc, wf_steps, level): + """Format the inputs of a step""" inputlist = '' - for inp_n, inp in wf_inputs.items(): + for inp_n, inp in step_inputs.items(): if inp_n != tp_desc['name']: continue inps = [] @@ -481,8 +527,12 @@ def format_inputs(wf_inputs, tp_desc, wf_steps, level): i['output_name'], get_input_tool_name(i['id'], wf_steps))) else: - # sinle input - icon = 'param-file' + # sinle input or collection + inp_type = wf_steps[str(inp['id'])]['type'] + if inp_type.find('collection') != -1: + icon = 'param-collection' + else: + icon = 'param-file' inps = ['`%s` %s' % ( inp['output_name'], get_input_tool_name(inp['id'], wf_steps))] @@ -496,85 +546,160 @@ def format_inputs(wf_inputs, tp_desc, wf_steps, level): return inputlist -def format_section_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): +def get_wf_step_inputs(step_inp): + """Get the inputs from a workflow step and format them""" + step_inputs = {} + for inp_n, inp in step_inp.items(): + if inp_n.find('|') != -1: + repeat_regex = '(?P[^\|]*)_(?P\d+)\|(?P.+).+' + repeat_search = re.search(repeat_regex, inp_n) + hier_regex = '(?P[^\|]*)\|(?P.+)' + hier_regex = re.search(hier_regex, inp_n) + if repeat_search and repeat_search.start(0) <= hier_regex.start(0): + step_inputs.setdefault(repeat_search.group('prefix'), {}) + step_inputs[repeat_search.group('prefix')].setdefault( + repeat_search.group('nb'), + get_wf_step_inputs({hier_regex.group('suffix'): inp})) + else: + step_inputs.setdefault(hier_regex.group('prefix'), {}) + step_inputs[hier_regex.group('prefix')].update( + get_wf_step_inputs({hier_regex.group('suffix'): inp})) + else: + step_inputs.setdefault(inp_n, inp) + return step_inputs + + +def json_load(string): + """Transform a string into a dictionary""" + if ":" in string: + return json.loads(string) + else: + return string + + +def get_lower_params(step_params, name): + """Get the parameters from workflow that are below name in the hierarchy""" + params = json_load(step_params) + if name in params: + params = json_load(params[name]) + return params + + +def get_lower_inputs(step_inputs, name): + """Get the inputs from workflow that are below name in the hierarchy""" + inputs = {} + if name in step_inputs: + inputs = step_inputs[name] + else: + inputs = step_inputs + return inputs + + +def format_section_param_desc(step_params, step_inputs, tp_desc, level, wf_steps): """Format the description (label and value) for parameters in a section""" section_paramlist = '' + # get section description context = {'space': SPACE * level, 'section_label': tp_desc['title']} - sub_param_desc = get_param_desc(wf_params, wf_inputs, get_tool_input(tp_desc), level+1, wf_steps) + # get sub params and inputs + params = get_lower_params(step_params, tp_desc['name']) + inputs = get_lower_inputs(step_inputs, tp_desc['name']) + # get description of parameters in lower hierarchy + sub_param_desc = get_param_desc(params, inputs, get_tool_input(tp_desc), level+1, wf_steps) if sub_param_desc != '': section_paramlist += templates.render(INPUT_SECTION, **context) section_paramlist += sub_param_desc return section_paramlist -def format_conditional_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): +def format_conditional_param_desc(step_params, step_inputs, tp_desc, level, wf_steps): """Format the description (label and value) for parameters in a conditional""" conditional_paramlist = '' + # Get conditional parameter test_param = tp_desc['test_param'] - conditional_paramlist += format_param_desc(wf_params[test_param['name']], wf_inputs, test_param, level, wf_steps) + params = get_lower_params(step_params, tp_desc['name']) + conditional_paramlist += format_param_desc( + params[test_param['name']], + step_inputs, + test_param, + level, + wf_steps, + force_default=True) + # Get parameters in the when for case in tp_desc['cases']: - if case['value'] == wf_params[test_param['name']]: - if len(case['inputs']) > 1: - conditional_paramlist += get_param_desc(wf_params, wf_inputs, get_tool_input(case), level+1, wf_steps) + if case['value'] == params[test_param['name']]: + if len(case['inputs']) > 0: + conditional_paramlist += get_param_desc( + params, + step_inputs, + get_tool_input(case), + level+1, + wf_steps) return conditional_paramlist -def format_repeat_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): +def format_repeat_param_desc(step_params, step_inputs, tp_desc, level, wf_steps): """Format the description (label and value) for parameters in a repeat""" - repeat_paramlist = '' repeat_inp_desc = get_tool_input(tp_desc) - context = {'space': SPACE * level, 'section_label': tp_desc['title']} - repeat_paramlist += templates.render(INPUT_SECTION, **context) - for r in range(len(wf_params)): - context = { - 'space': SPACE * (level+1), - 'section_label': "%s: %s" % (r+1, tp_desc['title'])} - repeat_paramlist += templates.render(INPUT_SECTION, **context) - repeat_paramlist += get_param_desc(wf_params[r], wf_inputs, repeat_inp_desc, level+2, wf_steps) - if r < len(wf_params) - 1: - context = {'space': SPACE * (level+1), 'section_label': tp_desc['title']} + params = get_lower_params(step_params, tp_desc['name']) + inputs = get_lower_inputs(step_inputs, tp_desc['name']) + repeat_paramlist = '' + for r in range(len(params)): + r_inputs = inputs[str(r)] if str(r) in inputs else inputs + paramlist_in_repeat = get_param_desc(params[r], r_inputs, repeat_inp_desc, level+2, wf_steps) + if paramlist_in_repeat != '': + # add first click + context = {'space': SPACE * (level+1), 'repeat_label': tp_desc['title']} repeat_paramlist += templates.render(INPUT_ADD_REPEAT, **context) + # add description of parameters in the repeat + context = { + 'space': SPACE * (level+1), + 'section_label': "%s: %s" % (r+1, tp_desc['title'])} + repeat_paramlist += templates.render(INPUT_SECTION, **context) + repeat_paramlist += paramlist_in_repeat + if repeat_paramlist != '': + context = {'space': SPACE * level, 'section_label': tp_desc['title']} + repeat_paramlist = templates.render(INPUT_SECTION, **context) + repeat_paramlist return repeat_paramlist -def get_param_value(wf_params, tp_desc): - """Get value of a 'simple' parameter""" +def get_param_value(step_params, tp_desc, force_default=False): + """Get value of a 'simple' parameter if different from the default value, None otherwise""" param_value = '' - if tp_desc['value'] == wf_params: + if tp_desc['value'] == step_params and not force_default: param_value = None elif tp_desc['type'] == 'boolean': - if bool(tp_desc['value']) == wf_params: + if bool(tp_desc['value']) == step_params: param_value = None - param_value = 'Yes' if wf_params else 'No' + else: + param_value = 'Yes' if step_params else 'No' elif tp_desc['type'] == 'select': param_value = '' for opt in tp_desc['options']: - if opt[1] == wf_params: + if opt[1] == step_params: param_value = opt[0] elif tp_desc['type'] == 'data_column': - param_value = "c%s" % wf_params + param_value = "c%s" % step_params else: - param_value = wf_params + param_value = step_params return param_value -def format_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): +def format_param_desc(step_params, step_inputs, tp_desc, level, wf_steps, force_default=False): """Format the parameter description (label and value) given the type of parameter""" paramlist = '' if 'type' not in tp_desc: raise ValueError("No type for the paramater %s" % tp_desc['name']) - if tp_desc['type'] == 'data': - paramlist += format_inputs(wf_inputs, tp_desc, wf_steps, level) - elif tp_desc['type'] == 'data_collection': - info("data_collection parameters are currently not supported") + if tp_desc['type'] == 'data' or tp_desc['type'] == 'data_collection': + paramlist += format_inputs(step_inputs, tp_desc, wf_steps, level) + # info("data_collection parameters are currently not supported") elif tp_desc['type'] == 'section': - paramlist += format_section_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps) + paramlist += format_section_param_desc(step_params, step_inputs, tp_desc, level, wf_steps) elif tp_desc['type'] == 'conditional': - paramlist += format_conditional_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps) + paramlist += format_conditional_param_desc(step_params, step_inputs, tp_desc, level, wf_steps) elif tp_desc['type'] == 'repeat': - paramlist += format_repeat_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps) + paramlist += format_repeat_param_desc(step_params, step_inputs, tp_desc, level, wf_steps) else: - param_value = get_param_value(wf_params, tp_desc) + param_value = get_param_value(step_params, tp_desc, force_default) if param_value is not None: context = { 'space': SPACE * level, @@ -584,74 +709,39 @@ def format_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): return paramlist -def get_param_desc(wf_params, wf_inputs, tp_desc, level, wf_steps): +def get_param_desc(step_params, step_inputs, tp_desc, level, wf_steps, should_be_there=False): """Parse the parameters of the tool and return the formatted list of the parameters and values set in the workflow""" paramlist = '' for n, tp_d in tp_desc.items(): - if n not in wf_params: - raise ValueError("%s not in workflow" % n) - wf_param = wf_params[n] - if isinstance(wf_param, str) and ":" in wf_param: - wf_param = json.loads(wf_param) - paramlist += format_param_desc(wf_param, wf_inputs, tp_d, level, wf_steps) + if n not in step_params: + if not should_be_there: + info("%s not in workflow" % n) + else: + raise ValueError("%s not in workflow" % n) + else: + step_param = get_lower_params(step_params, n) + paramlist += format_param_desc(step_param, step_inputs, tp_d, level, wf_steps) return paramlist -def get_handson_box(step_id, steps, tools): +def get_handson_box(step, steps, tools): """Get the string for an hands-on box based on a step in a workflow""" - step = steps[step_id] - + # get input (if none: input step) + step_inputs = get_wf_step_inputs(step['input_connections']) + if not step_inputs: + return '' + # get params + step_params = json.loads(step['tool_state']) # get tool tool_name = step['name'] - if len(step['input_connections']) == 0: - return '' tp_desc = tools[tool_name] - - # add description - wf_inputs = step['input_connections'] - wf_params = json.loads(step['tool_state']) - paramlist = get_param_desc(wf_params, wf_inputs, tp_desc, 1, steps) - + # get formatted param description + paramlist = get_param_desc(step_params, step_inputs, tp_desc, 1, steps, should_be_there=True) context = {"tool_name": tool_name, "paramlist": paramlist} return templates.render(HANDS_ON_TOOL_BOX_TEMPLATE, **context) -def get_wf_from_running_galaxy(kwds, ctx): - """Get the workflow dictionary from a running Galaxy instance with the workflow installed there""" - gi = galaxy.GalaxyInstance(kwds['galaxy_url'], key=kwds['galaxy_api_key']) - wf = gi.workflows.export_workflow_dict(kwds['workflow_id']) - tools = get_wf_tool_description(wf, gi) - return wf, tools - - -def get_wf_tool_description(wf, gi): - """Get a dictionary with description of inputs of all tools in a workflow""" - tools = {} - for s in wf['steps']: - step = wf['steps'][s] - if len(step['input_connections']) == 0: - continue - try: - tool_desc = gi.tools.show_tool(step['tool_id'], io_details=True) - except Exception: - tool_desc = {'inputs': []} - tools.setdefault(step['name'], get_tool_input(tool_desc)) - return tools - - -def serve_wf_locally(kwds, wf_filepath, ctx): - """Server local Galaxy and get the workflow dictionary""" - assert is_galaxy_engine(**kwds) - runnable = for_path(wf_filepath) - with engine_context(ctx, **kwds) as galaxy_engine: - with galaxy_engine.ensure_runnables_served([runnable]) as config: - workflow_id = config.workflow_id(wf_filepath) - wf = config.gi.workflows.export_workflow_dict(workflow_id) - tools = get_wf_tool_description(wf, config.gi) - return wf, tools - - def create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx): """Create tutorial structure from the workflow file""" # load workflow @@ -660,16 +750,21 @@ def create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx): raise ValueError("No Galaxy URL given") if not kwds['galaxy_api_key']: raise ValueError("No API key to access Galaxy given") - wf, tools = get_wf_from_running_galaxy(kwds, ctx) + wf, tools = get_wf_tools_from_running_galaxy(kwds) else: - wf, tools = serve_wf_locally(kwds, kwds["workflow"], ctx) + wf, tools = get_wf_tool_from_local_galaxy(kwds, kwds["workflow"], ctx) + save_to_yaml(tools, 'tools.yaml') body = '' - for step in range(len(wf['steps'].keys())): - body += get_handson_box(str(step), wf['steps'], tools) + for step_id in range(len(wf['steps'].keys())): + step = wf['steps'][str(step_id)] + if not step['tool_state']: + continue + body += get_handson_box(step, wf['steps'], tools) context = { "topic_name": kwds["topic_name"], + "topic_title": kwds["topic_title"], "tutorial_name": kwds["tutorial_name"], "zenodo_link": kwds["zenodo"] if kwds["zenodo"] else '', "z_file_links": "\n> ".join(z_file_links), @@ -714,7 +809,7 @@ def create_tutorial(kwds, tuto_dir, topic_dir, template_dir, ctx): z_file_links = '' if kwds["zenodo"]: info("Create the data library from Zenodo") - z_file_links = extract_from_zenodo(kwds, tuto_dir) + z_file_links = prepare_data_library_from_zenodo(kwds, tuto_dir) # create tutorial skeleton from workflow and copy workflow file if kwds["workflow"] or kwds['workflow_id']: @@ -734,6 +829,11 @@ def init(ctx, kwds): if not os.path.isdir(topic_dir): info("The topic %s does not exist. It will be created" % kwds['topic_name']) create_topic(kwds, topic_dir, topic_template_dir) + else: + metadata_path = os.path.join(topic_dir, "metadata.yaml") + metadata = load_yaml(metadata_path) + kwds['topic_title'] = metadata['title'] + kwds['topic_summary'] = metadata['summary'] if not kwds['tutorial_name']: if kwds['workflow'] or kwds['workflow_id']: @@ -752,8 +852,8 @@ def init(ctx, kwds): update_tutorial(kwds, tuto_dir, topic_dir) -def fill_data_library(ctx, kwds): - """Fill a data library for a tutorial""" +def prepare_tuto_update(kwds): + """Prepare the update of a tutorial""" topics_dir = "topics" if not os.path.isdir(topics_dir): os.makedirs(topics_dir) @@ -765,30 +865,35 @@ def fill_data_library(ctx, kwds): tuto_dir = os.path.join(topic_dir, "tutorials", kwds['tutorial_name']) if not os.path.isdir(tuto_dir): raise Exception("The tutorial %s does not exists. It should be created" % kwds['tutorial_name']) - # get metadata metadata_path = os.path.join(topic_dir, "metadata.yaml") metadata = load_yaml(metadata_path) tuto_metadata = collections.OrderedDict() - for mat in metadata['material']: if mat['name'] == kwds['tutorial_name']: tuto_metadata = mat + return (topic_dir, tuto_dir, metadata, metadata_path, tuto_metadata) + + +def fill_data_library(ctx, kwds): + """Fill a data library for a tutorial""" + topic_dir, tuto_dir, metadata, metadata_path, tuto_metadata = prepare_tuto_update(kwds) + # get the zenodo link z_link = '' if 'zenodo_link' in tuto_metadata and tuto_metadata['zenodo_link'] != '': if kwds['zenodo']: info("The data library and the metadata will be updated with the new Zenodo link") - tuto_metadata['zenodo_link'] = z_link z_link = kwds['zenodo'] + tuto_metadata['zenodo_link'] = z_link else: info("The data library will be extracted using the Zenodo link in the metadata") z_link = tuto_metadata['zenodo_link'] - else: + elif kwds['zenodo']: info("The data library will be created and the metadata will be filled with the new Zenodo link") - tuto_metadata['zenodo_link'] = z_link z_link = kwds['zenodo'] + tuto_metadata['zenodo_link'] = z_link if z_link == '' or z_link is None: raise Exception("A Zenodo link should be provided either in the metadata file or as argument of the command") @@ -801,7 +906,24 @@ def fill_data_library(ctx, kwds): 'zenodo': z_link, 'datatypes': kwds['datatypes'] } - extract_from_zenodo(topic_kwds, tuto_dir) + prepare_data_library_from_zenodo(topic_kwds, tuto_dir) + print(metadata) + # update the metadata + save_to_yaml(metadata, metadata_path) + +def generate_tuto_from_wf(ctx, kwds): + """Generate the skeleton of a tutorial from a workflow""" + topic_dir, tuto_dir, metadata, metadata_path, tuto_metadata = prepare_tuto_update(kwds) + if kwds["workflow"] or kwds['workflow_id']: + kwds["zenodo"] = '' + kwds["topic_title"] = metadata['title'] + info("Create tutorial skeleton from workflow") + create_tutorial_from_workflow(kwds, [], tuto_dir, ctx) + add_workflow_file(kwds, tuto_dir) + else: + exc = "A path to a local workflow or the id of a workflow on a running Galaxy instance should be provided" + raise Exception(exc) # update the metadata + tuto_metadata['workflows'] = True save_to_yaml(metadata, metadata_path) diff --git a/tests/data/training_datatypes.yaml b/tests/data/training_datatypes.yaml index 22257a730..ed27c75e3 100644 --- a/tests/data/training_datatypes.yaml +++ b/tests/data/training_datatypes.yaml @@ -1,2 +1,3 @@ --- -csv: csv \ No newline at end of file +csv: csv +test: strange_datatype \ No newline at end of file diff --git a/tests/data/training_workflow.ga b/tests/data/training_workflow.ga new file mode 100644 index 000000000..d1840921f --- /dev/null +++ b/tests/data/training_workflow.ga @@ -0,0 +1,230 @@ +{ + "uuid": "82ae273d-dd17-4f97-a286-8c2c0d56a6c9", + "tags": [], + "format-version": "0.1", + "name": "Test training workflow", + "steps": { + "0": { + "tool_id": null, + "tool_version": null, + "outputs": [], + "workflow_outputs": [], + "input_connections": {}, + "tool_state": "{\"collection_type\": \"list\"}", + "id": 0, + "uuid": "72575fe1-340c-41dd-8347-8ac6ead7a981", + "errors": null, + "name": "Input dataset collection", + "label": null, + "inputs": [], + "position": { + "top": 224, + "left": 198.5 + }, + "annotation": "", + "content_id": null, + "type": "data_collection_input" + }, + "1": { + "tool_id": null, + "tool_version": null, + "outputs": [], + "workflow_outputs": [], + "input_connections": {}, + "tool_state": "{}", + "id": 1, + "uuid": "6b1638b8-97ee-465a-a5a2-2d4346c33c80", + "errors": null, + "name": "Input dataset", + "label": null, + "inputs": [], + "position": { + "top": 296, + "left": 234.5 + }, + "annotation": "", + "content_id": null, + "type": "data_input" + }, + "2": { + "tool_id": null, + "tool_version": null, + "outputs": [], + "workflow_outputs": [], + "input_connections": {}, + "tool_state": "{}", + "id": 2, + "uuid": "ab3473ed-778b-4dc0-baaa-d9c3b68faa95", + "errors": null, + "name": "Input dataset", + "label": null, + "inputs": [], + "position": { + "top": 388, + "left": 228 + }, + "annotation": "", + "content_id": null, + "type": "data_input" + }, + "3": { + "tool_id": "toolshed.g2.bx.psu.edu/repos/devteam/fastqc/fastqc/0.71", + "tool_version": "0.71", + "outputs": [ + { + "type": "html", + "name": "html_file" + }, + { + "type": "txt", + "name": "text_file" + } + ], + "workflow_outputs": [], + "input_connections": { + "contaminants": { + "output_name": "output", + "id": 1 + }, + "input_file": { + "output_name": "output", + "id": 0 + } + }, + "tool_state": "{\"__page__\": null, \"contaminants\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"__rerun_remap_job_id__\": null, \"limits\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\", \"input_file\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\"}", + "id": 3, + "tool_shed_repository": { + "owner": "devteam", + "changeset_revision": "ff9530579d1f", + "name": "fastqc", + "tool_shed": "toolshed.g2.bx.psu.edu" + }, + "uuid": "e0c41ba6-03a9-4ff3-8ab4-9f5dd8125e4c", + "errors": null, + "name": "FastQC", + "post_job_actions": {}, + "label": null, + "inputs": [ + { + "name": "contaminants", + "description": "runtime parameter for tool FastQC" + }, + { + "name": "limits", + "description": "runtime parameter for tool FastQC" + }, + { + "name": "input_file", + "description": "runtime parameter for tool FastQC" + } + ], + "position": { + "top": 144, + "left": 514.5 + }, + "annotation": "", + "content_id": "toolshed.g2.bx.psu.edu/repos/devteam/fastqc/fastqc/0.71", + "type": "tool" + }, + "4": { + "tool_id": "toolshed.g2.bx.psu.edu/repos/iuc/query_tabular/query_tabular/2.0.0", + "tool_version": "2.0.0", + "outputs": [ + { + "type": "sqlite", + "name": "sqlitedb" + }, + { + "type": "tabular", + "name": "output" + } + ], + "workflow_outputs": [], + "input_connections": { + "tables_1|table": { + "output_name": "output", + "id": 2 + }, + "add_to_database|withdb": { + "output_name": "output", + "id": 0 + }, + "tables_0|table": { + "output_name": "output", + "id": 1 + } + }, + "tool_state": "{\"tables\": \"[{\\\"tbl_opts\\\": {\\\"pkey_autoincr\\\": \\\"\\\", \\\"load_named_columns\\\": \\\"false\\\", \\\"indexes\\\": [], \\\"table_name\\\": \\\"\\\", \\\"col_names\\\": \\\"\\\", \\\"column_names_from_first_line\\\": \\\"false\\\"}, \\\"__index__\\\": 0, \\\"input_opts\\\": {\\\"linefilters\\\": [{\\\"filter\\\": {\\\"skip_lines\\\": \\\"1\\\", \\\"__current_case__\\\": 0, \\\"filter_type\\\": \\\"skip\\\"}, \\\"__index__\\\": 0}]}, \\\"table\\\": {\\\"__class__\\\": \\\"RuntimeValue\\\"}}, {\\\"tbl_opts\\\": {\\\"pkey_autoincr\\\": \\\"\\\", \\\"load_named_columns\\\": \\\"false\\\", \\\"indexes\\\": [], \\\"table_name\\\": \\\"\\\", \\\"col_names\\\": \\\"\\\", \\\"column_names_from_first_line\\\": \\\"false\\\"}, \\\"__index__\\\": 1, \\\"input_opts\\\": {\\\"linefilters\\\": []}, \\\"table\\\": {\\\"__class__\\\": \\\"RuntimeValue\\\"}}]\", \"save_db\": \"\\\"false\\\"\", \"__page__\": null, \"__rerun_remap_job_id__\": null, \"sqlquery\": \"\\\"\\\"\", \"add_to_database\": \"{\\\"withdb\\\": {\\\"__class__\\\": \\\"RuntimeValue\\\"}}\", \"query_result\": \"{\\\"header\\\": \\\"yes\\\", \\\"header_prefix\\\": \\\"38\\\", \\\"__current_case__\\\": 0}\", \"workdb\": \"\\\"workdb.sqlite\\\"\"}", + "id": 4, + "tool_shed_repository": { + "owner": "iuc", + "changeset_revision": "1ea4e668bf73", + "name": "query_tabular", + "tool_shed": "toolshed.g2.bx.psu.edu" + }, + "uuid": "e09d110a-526a-4dea-b58f-0c03ae0287f1", + "errors": null, + "name": "Query Tabular", + "post_job_actions": {}, + "label": null, + "inputs": [ + { + "name": "add_to_database", + "description": "runtime parameter for tool Query Tabular" + } + ], + "position": { + "top": 353, + "left": 519 + }, + "annotation": "", + "content_id": "toolshed.g2.bx.psu.edu/repos/iuc/query_tabular/query_tabular/2.0.0", + "type": "tool" + }, + "5": { + "tool_id": "toolshed.g2.bx.psu.edu/repos/bgruening/text_processing/tp_head_tool/1.1.0", + "tool_version": "1.1.0", + "outputs": [ + { + "type": "input", + "name": "outfile" + } + ], + "workflow_outputs": [], + "input_connections": { + "infile": { + "output_name": "output", + "id": 4 + } + }, + "tool_state": "{\"count\": \"\\\"10\\\"\", \"__page__\": null, \"complement\": \"\\\"\\\"\", \"__rerun_remap_job_id__\": null, \"infile\": \"{\\\"__class__\\\": \\\"RuntimeValue\\\"}\"}", + "id": 5, + "tool_shed_repository": { + "owner": "bgruening", + "changeset_revision": "74a8bef53a00", + "name": "text_processing", + "tool_shed": "toolshed.g2.bx.psu.edu" + }, + "uuid": "732d789d-e3e2-4d5e-bd28-257e6be0602b", + "errors": null, + "name": "Select first", + "post_job_actions": {}, + "label": null, + "inputs": [ + { + "name": "infile", + "description": "runtime parameter for tool Select first" + } + ], + "position": { + "top": 416, + "left": 775.5 + }, + "annotation": "", + "content_id": "toolshed.g2.bx.psu.edu/repos/bgruening/text_processing/tp_head_tool/1.1.0", + "type": "tool" + } + }, + "annotation": "", + "a_galaxy_workflow": "true" +} \ No newline at end of file diff --git a/tests/test_training.py b/tests/test_training.py new file mode 100644 index 000000000..789eb6701 --- /dev/null +++ b/tests/test_training.py @@ -0,0 +1,930 @@ +import json +import os +import shutil + +from nose.tools import assert_raises_regexp + +from planemo import cli +from planemo import training +from planemo.engine import ( + engine_context, + is_galaxy_engine, +) +from planemo.runnable import for_path +from .test_utils import ( + PROJECT_TEMPLATES_DIR, + TEST_DATA_DIR +) + + +METADATA_FP = os.path.join(TEST_DATA_DIR, "training_metadata_w_zenodo.yaml") +TRAINING_TEMPLATE_DIR = os.path.join(PROJECT_TEMPLATES_DIR, "training") +TUTORIAL_FP = os.path.join("tutorials", "tutorial1", "tutorial.md") +DATATYPE_FP = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml") +ZENODO_LINK = 'https://zenodo.org/record/1321885' +WF_FP = os.path.join(TEST_DATA_DIR, "training_workflow.ga") +RUNNABLE = for_path(WF_FP) +CTX = cli.Context() +CTX.planemo_directory = "/tmp/planemo-test-workspace" + + +def prepare_test(): + topic_name = 'my_new_topic' + topic_dir = topic_name + tuto_name = "new_tuto" + tuto_dir = os.path.join(topic_dir, "tutorials", tuto_name) + kwds = { + 'topic_name': topic_name, + 'topic_title': "New topic", + 'topic_target': "use", + 'topic_summary': "Topic summary", + 'tutorial_name': tuto_name, + 'tutorial_title': "Title of tuto", + 'hands_on': True, + 'slides': True, + 'workflow': None, + 'workflow_id': None, + 'zenodo': None, + 'datatypes': DATATYPE_FP, + 'templates': None, + # planemo configuation + 'conda_auto_init': True, + 'conda_auto_install': True, + 'conda_copy_dependencies': False, + 'conda_debug': False, + 'conda_dependency_resolution': False, + 'conda_ensure_channels': 'iuc,bioconda,conda-forge,defaults', + 'conda_exec': None, + 'conda_prefix': None, + 'conda_use_local': False, + 'brew_dependency_resolution': False, + 'daemon': False, + 'database_connection': None, + 'database_type': 'auto', + 'dependency_resolvers_config_file': None, + 'docker': False, + 'docker_cmd': 'docker', + 'docker_extra_volume': None, + 'docker_galaxy_image': 'quay.io/bgruening/galaxy', + 'docker_host': None, + 'docker_sudo': False, + 'docker_sudo_cmd': 'sudo', + 'engine': 'galaxy', + 'extra_tools': (), + 'file_path': None, + 'galaxy_api_key': None, + 'galaxy_branch': None, + 'galaxy_database_seed': None, + 'galaxy_email': 'planemo@galaxyproject.org', + 'galaxy_root': None, + 'galaxy_single_user': True, + 'galaxy_source': None, + 'galaxy_url': None, + 'host': '127.0.0.1', + 'ignore_dependency_problems': False, + 'install_galaxy': False, + 'job_config_file': None, + 'mulled_containers': False, + 'no_cleanup': False, + 'no_cache_galaxy': False, + 'no_dependency_resolution': True, + 'non_strict_cwl': False, + 'pid_file': None, + 'port': '9090', + 'postgres_database_host': None, + 'postgres_database_port': None, + 'postgres_database_user': 'postgres', + 'postgres_psql_path': 'psql', + 'profile': None, + 'shed_dependency_resolution': False, + 'shed_install': True, + 'shed_tool_conf': None, + 'shed_tool_path': None, + 'skip_venv': False, + 'test_data': None, + 'tool_data_table': None, + 'tool_dependency_dir': None + } + return (kwds, topic_dir, tuto_dir) + + +def test_load_yaml(): + """Test :func:`planemo.training.load_yaml`.""" + metadata = training.load_yaml(METADATA_FP) + # test if name there + assert metadata["name"] == "test" + # test if order of material is conserved + assert metadata["material"][1]["name"] == "test" + + +def test_save_to_yaml(): + """Test :func:`planemo.training.save_to_yaml`.""" + metadata = training.load_yaml(METADATA_FP) + new_metadata_fp = "metadata.yaml" + training.save_to_yaml(metadata, new_metadata_fp) + assert os.path.exists(new_metadata_fp) + assert open(new_metadata_fp, 'r').read().find('material') != -1 + os.remove(new_metadata_fp) + + +def test_get_template_dir_1(): + """Test :func:`planemo.training.get_template_dir`: test exception raising""" + kwds = {"templates": None} + exp_exception = "This script needs to be run in the training material repository" + with assert_raises_regexp(Exception, exp_exception): + training.get_template_dir(kwds) + + +def test_get_template_dir_2(): + """Test :func:`planemo.training.get_template_dir`: test default return value""" + kwds = {"templates": None} + os.makedirs("templates") + assert training.get_template_dir(kwds) == "templates" + shutil.rmtree("templates") + + +def test_get_template_dir_3(): + """Test :func:`planemo.training.get_template_dir`: test return value""" + template_path = "temp" + kwds = {"templates": template_path} + assert training.get_template_dir(kwds) == template_path + + +def test_update_top_metadata_file_1(): + """Test :func:`planemo.training.update_top_metadata_file`: test topic change.""" + new_index_fp = "index.md" + topic_name = 'my_new_topic' + template_index_fp = os.path.join(TRAINING_TEMPLATE_DIR, "index.md") + shutil.copyfile(template_index_fp, new_index_fp) + training.update_top_metadata_file(new_index_fp, topic_name) + assert open(new_index_fp, 'r').read().find(topic_name) != -1 + os.remove(new_index_fp) + + +def test_update_top_metadata_file_2(): + """Test :func:`planemo.training.update_top_metadata_file`: test tutorial change.""" + new_tuto_fp = "tutorial.md" + topic_name = 'my_new_topic' + tuto_name = 'my_new_tuto' + template_tuto_fp = os.path.join(TRAINING_TEMPLATE_DIR, TUTORIAL_FP) + shutil.copyfile(template_tuto_fp, new_tuto_fp) + training.update_top_metadata_file(new_tuto_fp, topic_name, tuto_name=tuto_name) + assert open(new_tuto_fp, 'r').read().find(tuto_name) != -1 + os.remove(new_tuto_fp) + + +def test_update_top_metadata_file_3(): + """Test :func:`planemo.training.update_top_metadata_file`: test tutorial change.""" + new_tuto_fp = "tutorial.md" + topic_name = 'my_new_topic' + template_tuto_fp = os.path.join(TRAINING_TEMPLATE_DIR, TUTORIAL_FP) + shutil.copyfile(template_tuto_fp, new_tuto_fp) + training.update_top_metadata_file(new_tuto_fp, topic_name, keep=False) + assert not os.path.exists(new_tuto_fp) + + +def test_create_topic(): + """Test :func:`planemo.training.create_topic`.""" + kwds, topic_dir, tuto_dir = prepare_test() + topic_name = kwds['topic_name'] + training.create_topic(kwds, topic_dir, TRAINING_TEMPLATE_DIR) + # check if files has been moved and updated with topic name + index_fp = os.path.join(topic_dir, "index.md") + assert os.path.exists(index_fp) + assert open(index_fp, 'r').read().find(topic_name) != -1 + tuto_fp = os.path.join(topic_dir, TUTORIAL_FP) + assert os.path.exists(tuto_fp) + assert open(tuto_fp, 'r').read().find(topic_name) != -1 + # check metadata content + metadata = training.load_yaml(os.path.join(topic_dir, "metadata.yaml")) + assert metadata['name'] == topic_name + # check in metadata directory + assert os.path.exists(os.path.join("metadata", "%s.yaml" % topic_name)) + # clean + shutil.rmtree(topic_dir) + shutil.rmtree("metadata") + + +def test_update_tutorial(): + """Test :func:`planemo.training.update_tutorial`.""" + kwds, topic_dir, tuto_dir = prepare_test() + tuto_title = kwds['tutorial_title'] + metadata_fp = os.path.join(topic_dir, "metadata.yaml") + tuto_fp = os.path.join(tuto_dir, "tutorial.md") + slides_fp = os.path.join(tuto_dir, "slides.html") + # create a topic and prepare the tutorial + training.create_topic(kwds, topic_dir, TRAINING_TEMPLATE_DIR) + template_tuto_path = os.path.join(topic_dir, "tutorials", "tutorial1") + os.rename(template_tuto_path, tuto_dir) + assert open(metadata_fp, 'r').read().find("tutorial1") != -1 + # test update a new tutorial + training.update_tutorial(kwds, tuto_dir, topic_dir) + assert open(metadata_fp, 'r').read().find("tutorial1") == -1 + assert open(metadata_fp, 'r').read().find(tuto_title) != -1 + assert os.path.exists(tuto_fp) + assert os.path.exists(slides_fp) + # test update an existing tutorial + new_tuto_title = "A totally new title" + kwds['tutorial_title'] = new_tuto_title + kwds['slides'] = False + training.update_tutorial(kwds, tuto_dir, topic_dir) + assert open(metadata_fp, 'r').read().find(tuto_title) == -1 + assert open(metadata_fp, 'r').read().find(new_tuto_title) != -1 + assert not os.path.exists(slides_fp) + # clean + shutil.rmtree(topic_dir) + shutil.rmtree("metadata") + + +def test_get_zenodo_record(): + """Test :func:`planemo.training.get_zenodo_record`.""" + z_record, req_res = training.get_zenodo_record(ZENODO_LINK) + file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" + assert z_record == "1321885" + assert 'files' in req_res + assert req_res['files'][0]['type'] in ['rdata', 'csv'] + assert req_res['files'][0]['links']['self'].find(file_link_prefix) != -1 + # check with wrong zenodo link + z_record, req_res = training.get_zenodo_record('https://zenodo.org/api/records/zenodooo') + assert z_record is None + assert 'files' in req_res + assert len(req_res['files']) == 0 + + +def test_get_zenodo_record_with_doi(): + """Test :func:`planemo.training.get_zenodo_record`: link with DOI.""" + z_link = 'https://doi.org/10.5281/zenodo.1321885' + z_record, req_res = training.get_zenodo_record(z_link) + file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" + assert z_record == "1321885" + assert 'files' in req_res + assert req_res['files'][0]['type'] in ['rdata', 'csv'] + assert req_res['files'][0]['links']['self'].find(file_link_prefix) != -1 + + +def test_get_galaxy_datatype(): + """Test :func:`planemo.training.get_galaxy_datatype`.""" + assert training.get_galaxy_datatype("csv", DATATYPE_FP) == "csv" + assert training.get_galaxy_datatype("test", DATATYPE_FP) == "strange_datatype" + assert training.get_galaxy_datatype("unknown", DATATYPE_FP).find("# Please add") != -1 + + +def test_get_files_from_zenodo(): + """Test :func:`planemo.training.get_files_from_zenodo`.""" + files, links, z_record = training.get_files_from_zenodo(ZENODO_LINK, DATATYPE_FP) + assert z_record == "1321885" + # test links + file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" + assert links[0].find(file_link_prefix) != -1 + # test files dict + assert files[0]['url'].find(file_link_prefix) != -1 + assert files[0]['src'] == 'url' + assert files[0]['info'] == ZENODO_LINK + assert files[0]['ext'].find("# Please add") != -1 + assert files[1]['ext'] == 'csv' + + +def test_init_data_lib(): + """Test :func:`planemo.training.init_data_lib`.""" + data_lib_filepath = 'data-library.yaml' + datalib = training.init_data_lib(data_lib_filepath) + assert datalib['destination']['name'] == 'GTN - Material' + + +def test_prepare_data_library(): + """Test :func:`planemo.training.prepare_data_library`.""" + kwds, topic_dir, tuto_dir = prepare_test() + os.makedirs(tuto_dir) + files, links, z_record = training.get_files_from_zenodo(ZENODO_LINK, DATATYPE_FP) + datalib_fp = os.path.join(tuto_dir, "data-library.yaml") + # test default prepare_data_library + training.prepare_data_library(files, kwds, z_record, tuto_dir) + assert os.path.exists(datalib_fp) + datalib = training.load_yaml(datalib_fp) + assert datalib['items'][0]['name'] == kwds['topic_title'] + assert datalib['items'][0]['items'][0]['name'] == kwds['tutorial_title'] + assert datalib['items'][0]['items'][0]['items'][0]['name'] == "DOI: 10.5281/zenodo.%s" % z_record + assert datalib['items'][0]['items'][0]['items'][0]['description'] == "latest" + assert datalib['items'][0]['items'][0]['items'][0]['items'][0]['url'] == files[0]['url'] + # test adding a new collection for same tutorial + new_z_record = '124' + training.prepare_data_library(files, kwds, new_z_record, tuto_dir) + datalib = training.load_yaml(datalib_fp) + assert datalib['items'][0]['items'][0]['items'][0]['name'] == "DOI: 10.5281/zenodo.%s" % new_z_record + assert datalib['items'][0]['items'][0]['items'][0]['description'] == "latest" + assert datalib['items'][0]['items'][0]['items'][1]['name'] == "DOI: 10.5281/zenodo.%s" % z_record + assert datalib['items'][0]['items'][0]['items'][1]['description'] == "" + # test adding a new tutorial + new_tuto_title = "New title" + kwds['tutorial_title'] = new_tuto_title + training.prepare_data_library(files, kwds, z_record, tuto_dir) + datalib = training.load_yaml(datalib_fp) + assert datalib['items'][0]['items'][1]['name'] == new_tuto_title + assert datalib['items'][0]['items'][1]['items'][0]['name'] == "DOI: 10.5281/zenodo.%s" % z_record + # test adding a new topic + new_topic_title = "New title" + kwds['topic_title'] = new_topic_title + training.prepare_data_library(files, kwds, z_record, tuto_dir) + datalib = training.load_yaml(datalib_fp) + assert datalib['items'][1]['name'] == new_topic_title + assert datalib['items'][1]['items'][0]['name'] == new_tuto_title + assert datalib['items'][1]['items'][0]['items'][0]['name'] == "DOI: 10.5281/zenodo.%s" % z_record + # clean + shutil.rmtree(topic_dir) + + +def test_prepare_data_library_from_zenodo(): + """Test :func:`planemo.training.prepare_data_library_from_zenodo`.""" + kwds, topic_dir, tuto_dir = prepare_test() + os.makedirs(tuto_dir) + datalib_fp = os.path.join(tuto_dir, "data-library.yaml") + # test prepare_data_library_from_zenodo with no zenodo + links = training.prepare_data_library_from_zenodo(kwds, tuto_dir) + assert len(links) == 0 + assert not os.path.exists(datalib_fp) + # test prepare_data_library_from_zenodo with a zenodo link + kwds['zenodo'] = ZENODO_LINK + links = training.prepare_data_library_from_zenodo(kwds, tuto_dir) + file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" + assert links[0].find(file_link_prefix) != -1 + assert os.path.exists(datalib_fp) + # clean + shutil.rmtree(topic_dir) + + +def test_get_tool_input(): + """Test :func:`planemo.training.get_tool_input`.""" + tool_desc = { + 'inputs': [ + {'name': "name1", 'content': 'c'}, + {'name': "name2", 'content': 'c'} + ] + } + tool_inp = training.get_tool_input(tool_desc) + assert "name1" in tool_inp + assert 'content' in tool_inp["name1"] + assert tool_inp["name1"]['content'] == 'c' + + +def check_tools(tools): + """Test the tool return from get_wf_tool_description""" + assert 'FastQC' in tools + assert 'input_file' in tools['FastQC'] + + +def test_get_wf_tool_description(): + """Test :func:`planemo.training.get_wf_tool_description`.""" + kwds, topic_dir, tuto_dir = prepare_test() + assert is_galaxy_engine(**kwds) + with engine_context(CTX, **kwds) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: + workflow_id = config.workflow_id(WF_FP) + wf = config.gi.workflows.export_workflow_dict(workflow_id) + wf['steps']['10'] = { + 'input_connections': [], + 'tool_id': 'no_input', + 'name': 'with_no_input' + } + wf['steps']['11'] = { + 'input_connections': [1], + 'tool_id': 'no_tool', + 'name': 'with_no_tool' + } + tools = training.get_wf_tool_description(wf, config.gi) + check_tools(tools) + assert 'with_no_input' not in tools + assert 'with_no_tool' in tools + + +def check_workflow(wf): + """Test the worflow return""" + assert 'steps' in wf + assert '1' in wf['steps'] + assert 'name' in wf['steps']['1'] + + +def test_get_wf_tool_from_local_galaxy(): + """Test :func:`planemo.training.get_wf_tool_from_local_galaxy`.""" + kwds, topic_dir, tuto_dir = prepare_test() + wf, tools = training.get_wf_tool_from_local_galaxy(kwds, WF_FP, CTX) + check_tools(tools) + check_workflow(wf) + + +def test_get_wf_tools_from_running_galaxy(): + """Test :func:`planemo.training.get_wf_tools_from_running_galaxy`.""" + kwds, topic_dir, tuto_dir = prepare_test() + assert is_galaxy_engine(**kwds) + kwds['galaxy_url'] = 'http://%s:%s' % (kwds['host'], kwds['port']) + with engine_context(CTX, **kwds) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: + workflow_id = config.workflow_id(WF_FP) + kwds['workflow_id'] = workflow_id + kwds['galaxy_api_key'] = config.user_api_key + wf = config.gi.workflows.export_workflow_dict(workflow_id) + wf, tools = training.get_wf_tools_from_running_galaxy(kwds) + check_tools(tools) + check_workflow(wf) + + +def test_get_input_tool_name(): + """Test :func:`planemo.training.get_input_tool_name`.""" + steps = {'1': {'name': 'Input dataset'}} + # test if step not found + tool_name = training.get_input_tool_name(2, steps) + assert tool_name == '' + # test if tool is input + assert training.get_input_tool_name(1, steps) == '(Input dataset)' + # test if tool is input + steps['1']['name'] = 'Input dataset collection' + assert training.get_input_tool_name(1, steps) == '(Input dataset collection)' + # test if other case + steps['1']['name'] = 'Tool name' + assert training.get_input_tool_name(1, steps) == '(output of **Tool name** {% icon tool %})' + + +def get_wf_a_tools(): + """Get workflow and tool of a workflow""" + kwds, topic_dir, tuto_dir = prepare_test() + assert is_galaxy_engine(**kwds) + with engine_context(CTX, **kwds) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: + workflow_id = config.workflow_id(WF_FP) + wf = config.gi.workflows.export_workflow_dict(workflow_id) + tools = training.get_wf_tool_description(wf, config.gi) + return (wf, tools) + + +def test_format_inputs(): + """Test :func:`planemo.training.format_inputs`.""" + wf, tools = get_wf_a_tools() + step = wf['steps']['3'] + step_inputs = step['input_connections'] + tool = tools[step['name']] + inputlist = training.format_inputs(step_inputs, tool['input_file'], wf['steps'], 1) + assert inputlist.find('param-collection ') != -1 + assert inputlist.find('Input dataset collection') != -1 + inputlist = training.format_inputs(step_inputs, tool['contaminants'], wf['steps'], 1) + assert inputlist.find('param-file ') != -1 + + +def test_get_wf_step_inputs(): + """Test :func:`planemo.training.get_wf_step_inputs`.""" + step_inp = { + 'tables_1|table': {'output_name': 'output', 'id': 2}, + 'add_to_database|withdb': {'output_name': 'output', 'id': 0}, + 'tables_0|table': {'output_name': 'output', 'id': 1}, + 'add_to_database|tab_0|tt': {'output_name': 'output', 'id': 0}, + 'tables_2|section|sect': {'output_name': 'output', 'id': 1}, + 'tables_3|tables_0|sect': {'output_name': 'output', 'id': 1} + } + step_inputs = training.get_wf_step_inputs(step_inp) + assert 'tables' in step_inputs + assert '0' in step_inputs['tables'] + assert 'table' in step_inputs['tables']['0'] + assert '2' in step_inputs['tables'] + assert 'section' in step_inputs['tables']['2'] + assert 'sect' in step_inputs['tables']['2']['section'] + assert 'output_name' in step_inputs['tables']['2']['section']['sect'] + assert 'add_to_database' in step_inputs + assert 'withdb' in step_inputs['add_to_database'] + assert 'tab' in step_inputs['add_to_database'] + assert '0' in step_inputs['add_to_database']['tab'] + assert 'tt' in step_inputs['add_to_database']['tab']['0'] + + +def test_json_load(): + """Test :func:`planemo.training.json_load`.""" + assert isinstance(training.json_load('{"name": "1"}'), dict) + assert isinstance(training.json_load("name"), str) + + +def test_get_lower_params(): + """Test :func:`planemo.training.get_lower_params`.""" + step_params = {'name': '1'} + assert 'name' in training.get_lower_params(step_params, 'n1') + assert training.get_lower_params(step_params, 'name') == '1' + print(training.get_lower_params('{"name": "1"}', 'n1')) + assert 'name' in training.get_lower_params('{"name": "1"}', 'n1') + + +def test_get_lower_inputs(): + """Test :func:`planemo.training.get_lower_inputs`.""" + step_inputs = {'name': '1'} + assert 'name' in training.get_lower_inputs(step_inputs, 'n1') + assert training.get_lower_inputs(step_inputs, 'name') == '1' + + +def test_format_section_param_desc(): + """Test :func:`planemo.training.format_section_param_desc`.""" + wf, tools = get_wf_a_tools() + step = wf['steps']['4'] + step_inputs = training.get_wf_step_inputs(step['input_connections']) + step_params = json.loads(step['tool_state']) + tp_desc = tools[step['name']]['add_to_database'] + section_paramlist = training.format_section_param_desc( + step_params, + step_inputs, + tp_desc, + 0, + wf['steps']) + assert section_paramlist.find('In *"Add tables to an existing database"*') != -1 + assert section_paramlist.find('icon param-collection') != -1 + assert section_paramlist.find('Input dataset collection') != -1 + + +def test_format_conditional_param_desc(): + """Test :func:`planemo.training.format_conditional_param_desc`.""" + wf, tools = get_wf_a_tools() + step = wf['steps']['4'] + step_inputs = training.get_wf_step_inputs(step['input_connections']) + step_params = json.loads(step['tool_state']) + tp_desc = tools[step['name']]['query_result'] + section_paramlist = training.format_conditional_param_desc( + step_params, + step_inputs, + tp_desc, + 0, + wf['steps']) + print(section_paramlist) + assert section_paramlist.find('column headers') != -1 + assert section_paramlist.find('`Yes`') != -1 + assert section_paramlist.find('column_header line') != -1 + + +def test_format_repeat_param_desc(): + """Test :func:`planemo.training.format_repeat_param_desc`.""" + wf, tools = get_wf_a_tools() + step = wf['steps']['4'] + step_inputs = training.get_wf_step_inputs(step['input_connections']) + step_params = json.loads(step['tool_state']) + tp_desc = tools[step['name']]['tables'] + repeat_paramlist = training.format_repeat_param_desc( + step_params, + step_inputs, + tp_desc, + 0, + wf['steps']) + print(repeat_paramlist) + assert repeat_paramlist.find('Click on *"Insert Database Table"*') != -1 + assert repeat_paramlist.find('In *"1: Database Table"*') != -1 + assert repeat_paramlist.find('In *"1: Database Table"*') != -1 + assert repeat_paramlist.find('Click on *"Insert Filter Tabular Input Lines"*') != -1 + assert repeat_paramlist.find('In *"1: Filter Tabular Input Lines"*:') != -1 + assert repeat_paramlist.find('In *"2: Database Table"*:') != -1 + + +def test_get_param_value(): + """Test :func:`planemo.training.get_param_value`.""" + # test same value + tp_desc = {'type': 'boolean', 'value': 'same'} + assert training.get_param_value('same', tp_desc) is None + # test boolean + tp_desc = {'type': 'boolean', 'value': 'True'} + assert training.get_param_value(True, tp_desc) is None + assert training.get_param_value(False, tp_desc) == 'No' + # test select + tp_desc = {'type': 'select', 'options': [['opt1', 'val1'], ['opt2', 'val2']], 'value': ''} + assert training.get_param_value('val1', tp_desc) == 'opt1' + # test data_column + tp_desc = {'type': 'data_column', 'value': ''} + assert training.get_param_value('1', tp_desc) == 'c1' + # test integer + tp_desc = {'type': 'integer', 'value': ''} + assert training.get_param_value('1', tp_desc) == '1' + + +def test_format_param_desc(): + """Test :func:`planemo.training.format_param_desc`.""" + wf, tools = get_wf_a_tools() + step = wf['steps']['4'] + step_inputs = training.get_wf_step_inputs(step['input_connections']) + step_params = json.loads(step['tool_state']) + # test section (add_to_database) + n = 'add_to_database' + tp_desc = tools[step['name']][n] + step_param = training.get_lower_params(step_params, n) + paramlist = training.format_param_desc( + step_param, + step_inputs, + tp_desc, + 0, + wf['steps'], + force_default=False) + assert paramlist.find('In *"Add tables to an existing database"*') != -1 + # test repeat (tables) + n = 'tables' + tp_desc = tools[step['name']][n] + step_param = training.get_lower_params(step_params, n) + paramlist = training.format_param_desc( + step_param, + step_inputs, + tp_desc, + 0, + wf['steps'], + force_default=False) + assert paramlist.find('In *"1: Filter Tabular Input Lines"*:') != -1 + # test boolean (save_db) + n = 'save_db' + tp_desc = tools[step['name']][n] + step_param = training.get_lower_params(step_params, n) + paramlist = training.format_param_desc( + step_param, + step_inputs, + tp_desc, + 0, + wf['steps'], + force_default=False) + assert '`Yes`' in paramlist + # test conditional (query_result) + n = 'query_result' + tp_desc = tools[step['name']][n] + step_param = training.get_lower_params(step_params, n) + paramlist = training.format_param_desc( + step_param, + step_inputs, + tp_desc, + 0, + wf['steps'], + force_default=False) + assert paramlist.find('Prefix character') != -1 + # no type + exp_exception = "No type for the paramater name" + with assert_raises_regexp(Exception, exp_exception): + training.format_param_desc( + step_params, + step_inputs, + {'name': 'name'}, + 0, + wf['steps'], + force_default=False) + + +def test_get_param_desc(): + """Test :func:`planemo.training.get_param_desc`.""" + wf, tools = get_wf_a_tools() + step_3 = wf['steps']['3'] + step_inputs = training.get_wf_step_inputs(step_3['input_connections']) + step_params = json.loads(step_3['tool_state']) + # not in workflow and should be there + step_4 = wf['steps']['4'] + tp_desc = tools[step_4['name']] + exp_exception = "workdb not in workflow" + with assert_raises_regexp(Exception, exp_exception): + training.get_param_desc( + step_params, + step_inputs, + tp_desc, + 0, + wf['steps'], + should_be_there=True) + # not in workflow + step_4 = wf['steps']['4'] + tp_desc = tools[step_4['name']] + paramlist = training.get_param_desc( + step_params, + step_inputs, + tp_desc, + 0, + wf['steps']) + assert paramlist == '' + # correct one + tp_desc = tools[step_3['name']] + paramlist = training.get_param_desc( + step_params, + step_inputs, + tp_desc, + 0, + wf['steps']) + assert 'param-collection' in paramlist + assert 'param-file' in paramlist + + +def test_get_handson_box(): + """Test :func:`planemo.training.get_handson_box`.""" + wf, tools = get_wf_a_tools() + # test normal step + hand_boxes = training.get_handson_box(wf['steps']['3'], wf['steps'], tools) + assert '### {% icon hands_on %}' in hand_boxes + assert '{% icon tool %} with the following parameters:' in hand_boxes + assert ': .hands_on' in hand_boxes + # test input step + hand_boxes = training.get_handson_box(wf['steps']['1'], wf['steps'], tools) + assert hand_boxes == '' + + +def test_create_tutorial_from_workflow(): + """Test :func:`planemo.training.create_tutorial_from_workflow`.""" + kwds, topic_dir, tuto_dir = prepare_test() + os.makedirs(tuto_dir) + assert is_galaxy_engine(**kwds) + kwds['galaxy_url'] = 'http://%s:%s' % (kwds['host'], kwds['port']) + with engine_context(CTX, **kwds) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: + workflow_id = config.workflow_id(WF_FP) + kwds['workflow_id'] = workflow_id + kwds['galaxy_api_key'] = config.user_api_key + training.create_tutorial_from_workflow(kwds, '', tuto_dir, CTX) + # tests + tuto_path = os.path.join(tuto_dir, "tutorial.md") + assert os.path.exists(tuto_path) + with open(tuto_path, 'r') as tuto: + tuto_content = tuto.read() + assert 'topic_name: my_new_topic' in tuto_content + assert 'tutorial_name: new_tuto' in tuto_content + assert '> ### Agenda' in tuto_content + assert '## Get data' in tuto_content + assert '{% icon tool %} with the following parameters:' in tuto_content + assert 'no_toc' in tuto_content + assert '# Conclusion' in tuto_content + # clean after + shutil.rmtree(topic_dir) + + +def test_add_workflow_file(): + """Test :func:`planemo.training.add_workflow_file`.""" + kwds, topic_dir, tuto_dir = prepare_test() + wf_dir = os.path.join(tuto_dir, "workflows") + os.makedirs(wf_dir) + wf_path = os.path.join(wf_dir, "init_workflow.ga") + # test with workflow on a running instance + assert is_galaxy_engine(**kwds) + kwds['galaxy_url'] = 'http://%s:%s' % (kwds['host'], kwds['port']) + with engine_context(CTX, **kwds) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: + workflow_id = config.workflow_id(WF_FP) + kwds['workflow_id'] = workflow_id + kwds['galaxy_api_key'] = config.user_api_key + training.add_workflow_file(kwds, tuto_dir) + assert os.path.exists(wf_path) + os.remove(wf_path) + # test with local workflow + kwds["workflow"] = WF_FP + training.add_workflow_file(kwds, tuto_dir) + assert os.path.exists(wf_path) + # clean after + shutil.rmtree(topic_dir) + + +def test_create_tutorial(): + """Test :func:`planemo.training.create_tutorial`.""" + kwds, topic_dir, tuto_dir = prepare_test() + kwds["templates"] = TRAINING_TEMPLATE_DIR + topic_template_dir = training.get_template_dir(kwds) + metadata_fp = os.path.join(topic_dir, 'metadata.yaml') + tuto_fp = os.path.join(tuto_dir, 'tutorial.md') + data_library_fp = os.path.join(tuto_dir, 'data-library.yaml') + # wo zenodo and wo workflow + training.create_topic(kwds, topic_dir, topic_template_dir) + tuto_template_dir = os.path.join(topic_template_dir, "tutorials", "tutorial1") + training.create_tutorial(kwds, tuto_dir, topic_dir, tuto_template_dir, CTX) + with open(metadata_fp, 'r') as metadata: + metadata_content = metadata.read() + assert 'name: new_tuto' in metadata_content + assert "zenodo_link: ''" in metadata_content + assert 'workflows: false' in metadata_content + assert '**My Tool** {% icon tool %}' in open(tuto_fp, 'r').read() + assert 'name: "Small test files"' in open(data_library_fp, 'r').read() + shutil.rmtree(topic_dir) + shutil.rmtree("metadata") + # w zenodo and wo workflow + kwds["zenodo"] = ZENODO_LINK + training.create_topic(kwds, topic_dir, topic_template_dir) + tuto_template_dir = os.path.join(topic_template_dir, "tutorials", "tutorial1") + training.create_tutorial(kwds, tuto_dir, topic_dir, tuto_template_dir, CTX) + with open(metadata_fp, 'r') as metadata: + metadata_content = metadata.read() + assert 'name: new_tuto' in metadata_content + assert 'zenodo_link: %s' % ZENODO_LINK in metadata_content + assert 'workflows: false' in metadata_content + assert '**My Tool** {% icon tool %}' in open(tuto_fp, 'r').read() + assert 'DOI: 10.5281/zenodo.1321885' in open(data_library_fp, 'r').read() + shutil.rmtree(topic_dir) + shutil.rmtree("metadata") + # w zenodo and w workflow + kwds["workflow"] = WF_FP + training.create_topic(kwds, topic_dir, topic_template_dir) + tuto_template_dir = os.path.join(topic_template_dir, "tutorials", "tutorial1") + training.create_tutorial(kwds, tuto_dir, topic_dir, tuto_template_dir, CTX) + with open(metadata_fp, 'r') as metadata: + metadata_content = metadata.read() + assert 'name: new_tuto' in metadata_content + assert 'zenodo_link: %s' % ZENODO_LINK in metadata_content + assert 'workflows: true' in metadata_content + assert '**FastQC** {% icon tool %} with the following parameters:' in open(tuto_fp, 'r').read() + assert 'DOI: 10.5281/zenodo.1321885' in open(data_library_fp, 'r').read() + assert os.path.exists(os.path.join(tuto_dir, 'workflows', 'init_workflow.ga')) + shutil.rmtree(topic_dir) + shutil.rmtree("metadata") + + +def test_init(): + """Test :func:`planemo.training.init`.""" + kwds, topic_dir, tuto_dir = prepare_test() + kwds["templates"] = TRAINING_TEMPLATE_DIR + topic_dir = os.path.join('topics', topic_dir) + tuto_dir = os.path.join('topics', tuto_dir) + metadata_fp = os.path.join(topic_dir, 'metadata.yaml') + tuto_name = kwds['tutorial_name'] + # new topic, no tutorial name but workflow + kwds['tutorial_name'] = None + kwds['workflow'] = WF_FP + exp_exception = "A tutorial name is needed to create the skeleton of the tutorial from a workflow" + with assert_raises_regexp(Exception, exp_exception): + training.init(CTX, kwds) + # no new topic, no tutorial name but zenodo + kwds['workflow'] = None + kwds['zenodo'] = ZENODO_LINK + exp_exception = "A tutorial name is needed to add Zenodo information" + with assert_raises_regexp(Exception, exp_exception): + training.init(CTX, kwds) + # no new topic, new tutorial + kwds['tutorial_name'] = tuto_name + kwds['workflow'] = None + kwds['zenodo'] = None + training.init(CTX, kwds) + assert kwds['tutorial_title'] in open(metadata_fp, 'r').read() + # no new topic, update tutorial + kwds['tutorial_title'] = 'Totally new tutorial title' + training.init(CTX, kwds) + assert 'Totally new tutorial title' in open(metadata_fp, 'r').read() + # clean after + shutil.rmtree(topic_dir) + shutil.rmtree("metadata") + + +def test_prepare_tuto_update(): + """Test :func:`planemo.training.prepare_tuto_update`.""" + kwds, topic_dir, tuto_dir = prepare_test() + new_topic_name = 'a_topic' + topic_dir = os.path.join('topics', new_topic_name) + # non existing topic + kwds['topic_name'] = new_topic_name + exp_exception = "The topic %s does not exists. It should be created" % new_topic_name + with assert_raises_regexp(Exception, exp_exception): + training.prepare_tuto_update(kwds) + # non existing tutorial + kwds["templates"] = TRAINING_TEMPLATE_DIR + topic_template_dir = training.get_template_dir(kwds) + training.create_topic(kwds, topic_dir, topic_template_dir) + exp_exception = "The tutorial new_tuto does not exists. It should be created" + with assert_raises_regexp(Exception, exp_exception): + training.prepare_tuto_update(kwds) + + +def test_fill_data_library(): + """Test :func:`planemo.training.fill_data_library`.""" + kwds, topic_dir, tuto_dir = prepare_test() + kwds["templates"] = TRAINING_TEMPLATE_DIR + topic_dir = os.path.join('topics', topic_dir) + tuto_dir = os.path.join(topic_dir, 'tutorials', 'new_tuto') + training.init(CTX, kwds) + metadata_fp = os.path.join(topic_dir, 'metadata.yaml') + data_library_fp = os.path.join(tuto_dir, 'data-library.yaml') + # no Zenodo link + kwds['zenodo'] = None + kwds['workflow'] = None + exp_exception = "A Zenodo link should be provided either in the metadata file or as argument of the command" + with assert_raises_regexp(Exception, exp_exception): + training.fill_data_library(CTX, kwds) + # with a given Zenodo link and no Zenodo in metadata + kwds['zenodo'] = ZENODO_LINK + training.fill_data_library(CTX, kwds) + assert 'DOI: 10.5281/zenodo.1321885' in open(data_library_fp, 'r').read() + assert 'zenodo_link: %s' % ZENODO_LINK in open(metadata_fp, 'r').read() + # with a given Zenodo link and Zenodo in metadata + new_z_link = 'https://zenodo.org/record/1324204' + kwds['zenodo'] = new_z_link + training.fill_data_library(CTX, kwds) + assert 'DOI: 10.5281/zenodo.1324204' in open(data_library_fp, 'r').read() + assert 'zenodo_link: %s' % new_z_link in open(metadata_fp, 'r').read() + # with no given Zenodo link + kwds['zenodo'] = None + training.fill_data_library(CTX, kwds) + assert 'DOI: 10.5281/zenodo.1324204' in open(data_library_fp, 'r').read() + assert 'zenodo_link: %s' % new_z_link in open(metadata_fp, 'r').read() + # clean after + shutil.rmtree(topic_dir) + shutil.rmtree("metadata") + + +def test_generate_tuto_from_wf(): + """Test :func:`planemo.training.generate_tuto_from_wf`.""" + kwds, topic_dir, tuto_dir = prepare_test() + topic_dir = os.path.join('topics', topic_dir) + tuto_dir = os.path.join(topic_dir, 'tutorials', 'new_tuto') + kwds["templates"] = TRAINING_TEMPLATE_DIR + training.init(CTX, kwds) + # no workflow + kwds['workflow'] = None + exp_exception = "A path to a local workflow or the id of a workflow on a running Galaxy instance should be provided" + with assert_raises_regexp(Exception, exp_exception): + training.generate_tuto_from_wf(CTX, kwds) + # with workflow + kwds['workflow'] = WF_FP + training.generate_tuto_from_wf(CTX, kwds) + tuto_fp = os.path.join(tuto_dir, 'tutorial.md') + metadata_fp = os.path.join(topic_dir, 'metadata.yaml') + assert 'workflows: true' in open(metadata_fp, 'r').read() + assert '**FastQC** {% icon tool %} with the following parameters:' in open(tuto_fp, 'r').read() + assert os.path.exists(os.path.join(tuto_dir, 'workflows', 'init_workflow.ga')) From 07b8fe33940598c322744fc98b4e8b328d4866da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Wed, 8 Aug 2018 17:43:29 +0200 Subject: [PATCH 16/26] Add command to generate skeleton of existing tutorial --- .../cmd_training_generate_tuto_from_wf.py | 18 ++++ planemo/options.py | 8 ++ ...test_cmd_training_generate_tuto_from_wf.py | 85 +++++++++++++++++++ 3 files changed, 111 insertions(+) create mode 100644 planemo/commands/cmd_training_generate_tuto_from_wf.py create mode 100644 tests/test_cmd_training_generate_tuto_from_wf.py diff --git a/planemo/commands/cmd_training_generate_tuto_from_wf.py b/planemo/commands/cmd_training_generate_tuto_from_wf.py new file mode 100644 index 000000000..51bf68a80 --- /dev/null +++ b/planemo/commands/cmd_training_generate_tuto_from_wf.py @@ -0,0 +1,18 @@ +"""Module describing the planemo ``training_generate_tuto_from_wf`` command.""" + +import click + +from planemo import options +from planemo import training +from planemo.cli import command_function + + +@click.command('training_generate_tuto_from_wf') +@options.optional_tools_arg(multiple=True, allow_uris=True) +@options.training_generate_tuto_from_wf_options() +@options.galaxy_serve_options() +@command_function +def cli(ctx, uris, **kwds): + """Create tutorial skeleton from workflow.""" + kwds["no_dependency_resolution"] = True + training.generate_tuto_from_wf(ctx, kwds) diff --git a/planemo/options.py b/planemo/options.py index c57202ac9..fcc3c2a27 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -1244,6 +1244,14 @@ def training_fill_data_library_options(): ) +def training_generate_tuto_from_wf_options(): + return _compose( + training_topic_name_option(), + training_tutorial_name_req_option(), + training_tutorial_worflow_option() + ) + + def shed_fail_fast_option(): return planemo_option( "--fail_fast", diff --git a/tests/test_cmd_training_generate_tuto_from_wf.py b/tests/test_cmd_training_generate_tuto_from_wf.py new file mode 100644 index 000000000..559078a0a --- /dev/null +++ b/tests/test_cmd_training_generate_tuto_from_wf.py @@ -0,0 +1,85 @@ +"""Tests for the ``training_generate_tuto_from_wf`` command.""" +import os +import shutil + +from .test_utils import ( + CliTestCase, + TEST_DATA_DIR +) + + +def create_tutorial_dir(topic_n, tuto_n, metadata_n): + topic_dir = os.path.join("topics", topic_n) + tuto_dir = os.path.join(topic_dir, "tutorials", tuto_n) + metadata_path = os.path.join(topic_dir, "metadata.yaml") + if not os.path.isdir(topic_dir): + os.makedirs(topic_dir) + if not os.path.isdir(tuto_dir): + os.makedirs(tuto_dir) + if not os.path.exists(metadata_path): + metadata = os.path.join(TEST_DATA_DIR, metadata_n) + shutil.copy(metadata, metadata_path) + + +def remove_topics(): + shutil.rmtree("topics") + + +class CmdTrainingGenerateTutoFromWfTestCase(CliTestCase): + """Container class defining test cases for the ``training_generate_tuto_from_wf`` command.""" + + def test_training_generate_tuto_from_wf_command_empty(self): + with self._isolate(): + training_fill_data_library_command = [ + "training_generate_tuto_from_wf" + ] + self._check_exit_code(training_fill_data_library_command, exit_code=2) + + def test_training_generate_tuto_from_wf_command_topic(self): + with self._isolate(): + training_fill_data_library_command = [ + "training_generate_tuto_from_wf", + "--topic_name", "test" + ] + self._check_exit_code(training_fill_data_library_command, exit_code=2) + + def test_training_generate_tuto_from_wf_command_local_wf(self): + with self._isolate(): + topic_n = "test" + tuto_n = "test" + test_workflow = os.path.join(TEST_DATA_DIR, "test_workflow_1.ga") + # working test + create_tutorial_dir(topic_n, tuto_n, "training_metadata_wo_zenodo.yaml") + training_init_command = [ + "training_generate_tuto_from_wf", + "--topic_name", "test", + "--tutorial_name", "test", + "--workflow", test_workflow + ] + self._check_exit_code(training_init_command, exit_code=-1) + remove_topics() + + def test_training_generate_tuto_from_wf_command_remote_wf(self): + with self._isolate(): + topic_n = "test" + tuto_n = "test" + # not working test + training_init_command = [ + "training_generate_tuto_from_wf", + "--topic_name", "test", + "--tutorial_name", "test", + "--workflow_id", "ID" + ] + self._check_exit_code(training_init_command, exit_code=-1) + # not working test + create_tutorial_dir(topic_n, tuto_n, "training_metadata_wo_zenodo.yaml") + training_init_command = [ + "training_generate_tuto_from_wf", + "--topic_name", "test", + "--tutorial_name", "test", + "--workflow_id", "ID", + "--galaxy_url", "https://usegalaxy.eu/", + "--galaxy_api_key", "API" + ] + self._check_exit_code(training_init_command, exit_code=-1) + remove_topics() From 64ac6523ddeb2325a67b89edf9a8719a14623dac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Wed, 8 Aug 2018 17:44:18 +0200 Subject: [PATCH 17/26] Formatting of command testing --- tests/test_cmd_training_fill_data_library.py | 2 +- tests/test_cmd_training_init.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_cmd_training_fill_data_library.py b/tests/test_cmd_training_fill_data_library.py index 21a755cd8..ba7c2bafd 100644 --- a/tests/test_cmd_training_fill_data_library.py +++ b/tests/test_cmd_training_fill_data_library.py @@ -57,8 +57,8 @@ def test_training_fill_data_library_command_tutorial_topic(self): "--datatypes", datatype ] remove_topics() - # working self._check_exit_code(training_fill_data_library_command, exit_code=-1) + # working create_tutorial_dir(topic_n, tuto_n, "training_metadata_w_zenodo.yaml") training_fill_data_library_command = [ "training_fill_data_library", diff --git a/tests/test_cmd_training_init.py b/tests/test_cmd_training_init.py index 2bb8298aa..5fd1842f1 100644 --- a/tests/test_cmd_training_init.py +++ b/tests/test_cmd_training_init.py @@ -115,7 +115,7 @@ def test_training_init_command_tutorial_local_wf(self): "--workflow", test_workflow, "--templates", training_template ] - self._check_exit_code(training_init_command, exit_code=0) + self._check_exit_code(training_init_command, exit_code=-1) def test_training_init_command_tutorial_remote_wf(self): with self._isolate(): @@ -139,4 +139,4 @@ def test_training_init_command_tutorial_remote_wf(self): "--galaxy_api_key", "API", "--templates", training_template ] - self._check_exit_code(training_init_command, exit_code=0) + self._check_exit_code(training_init_command, exit_code=-1) From ef7a2e553908118190209955aa110fd589b684a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Thu, 9 Aug 2018 09:02:44 +0200 Subject: [PATCH 18/26] Rename generate tuto command --- ...wf.py => cmd_training_generate_from_wf.py} | 4 ++-- ... => test_cmd_training_generate_from_wf.py} | 22 +++++++++---------- 2 files changed, 13 insertions(+), 13 deletions(-) rename planemo/commands/{cmd_training_generate_tuto_from_wf.py => cmd_training_generate_from_wf.py} (77%) rename tests/{test_cmd_training_generate_tuto_from_wf.py => test_cmd_training_generate_from_wf.py} (80%) diff --git a/planemo/commands/cmd_training_generate_tuto_from_wf.py b/planemo/commands/cmd_training_generate_from_wf.py similarity index 77% rename from planemo/commands/cmd_training_generate_tuto_from_wf.py rename to planemo/commands/cmd_training_generate_from_wf.py index 51bf68a80..ceb10b7d5 100644 --- a/planemo/commands/cmd_training_generate_tuto_from_wf.py +++ b/planemo/commands/cmd_training_generate_from_wf.py @@ -1,4 +1,4 @@ -"""Module describing the planemo ``training_generate_tuto_from_wf`` command.""" +"""Module describing the planemo ``training_generate_from_wf`` command.""" import click @@ -7,7 +7,7 @@ from planemo.cli import command_function -@click.command('training_generate_tuto_from_wf') +@click.command('training_generate_from_wf') @options.optional_tools_arg(multiple=True, allow_uris=True) @options.training_generate_tuto_from_wf_options() @options.galaxy_serve_options() diff --git a/tests/test_cmd_training_generate_tuto_from_wf.py b/tests/test_cmd_training_generate_from_wf.py similarity index 80% rename from tests/test_cmd_training_generate_tuto_from_wf.py rename to tests/test_cmd_training_generate_from_wf.py index 559078a0a..d61c6b53b 100644 --- a/tests/test_cmd_training_generate_tuto_from_wf.py +++ b/tests/test_cmd_training_generate_from_wf.py @@ -1,4 +1,4 @@ -"""Tests for the ``training_generate_tuto_from_wf`` command.""" +"""Tests for the ``training_generate_from_wf`` command.""" import os import shutil @@ -25,25 +25,25 @@ def remove_topics(): shutil.rmtree("topics") -class CmdTrainingGenerateTutoFromWfTestCase(CliTestCase): - """Container class defining test cases for the ``training_generate_tuto_from_wf`` command.""" +class CmdTrainingGenerateFromWfTestCase(CliTestCase): + """Container class defining test cases for the ``training_generate_from_wf`` command.""" - def test_training_generate_tuto_from_wf_command_empty(self): + def test_training_generate_from_wf_command_empty(self): with self._isolate(): training_fill_data_library_command = [ - "training_generate_tuto_from_wf" + "training_generate_from_wf" ] self._check_exit_code(training_fill_data_library_command, exit_code=2) - def test_training_generate_tuto_from_wf_command_topic(self): + def test_training_generate_from_wf_command_topic(self): with self._isolate(): training_fill_data_library_command = [ - "training_generate_tuto_from_wf", + "training_generate_from_wf", "--topic_name", "test" ] self._check_exit_code(training_fill_data_library_command, exit_code=2) - def test_training_generate_tuto_from_wf_command_local_wf(self): + def test_training_generate_from_wf_command_local_wf(self): with self._isolate(): topic_n = "test" tuto_n = "test" @@ -59,13 +59,13 @@ def test_training_generate_tuto_from_wf_command_local_wf(self): self._check_exit_code(training_init_command, exit_code=-1) remove_topics() - def test_training_generate_tuto_from_wf_command_remote_wf(self): + def test_training_generate_from_wf_command_remote_wf(self): with self._isolate(): topic_n = "test" tuto_n = "test" # not working test training_init_command = [ - "training_generate_tuto_from_wf", + "training_generate_from_wf", "--topic_name", "test", "--tutorial_name", "test", "--workflow_id", "ID" @@ -74,7 +74,7 @@ def test_training_generate_tuto_from_wf_command_remote_wf(self): # not working test create_tutorial_dir(topic_n, tuto_n, "training_metadata_wo_zenodo.yaml") training_init_command = [ - "training_generate_tuto_from_wf", + "training_generate_from_wf", "--topic_name", "test", "--tutorial_name", "test", "--workflow_id", "ID", From 10522675e765a459fd1b28c6c6573d539a2598fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Thu, 9 Aug 2018 09:06:29 +0200 Subject: [PATCH 19/26] Fix failing test --- tests/test_cmd_training_generate_from_wf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_cmd_training_generate_from_wf.py b/tests/test_cmd_training_generate_from_wf.py index d61c6b53b..0c36b068e 100644 --- a/tests/test_cmd_training_generate_from_wf.py +++ b/tests/test_cmd_training_generate_from_wf.py @@ -81,5 +81,5 @@ def test_training_generate_from_wf_command_remote_wf(self): "--galaxy_url", "https://usegalaxy.eu/", "--galaxy_api_key", "API" ] - self._check_exit_code(training_init_command, exit_code=-1) + self._check_exit_code(training_init_command, exit_code=0) remove_topics() From 83d2c7db0efe147b5fc27e855dd2b9176f325dad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Thu, 9 Aug 2018 11:28:24 +0200 Subject: [PATCH 20/26] Fix function docstrings --- planemo/training.py | 83 +++++++++++--------- tests/test_cmd_training_fill_data_library.py | 11 +-- tests/test_cmd_training_generate_from_wf.py | 13 +-- tests/test_cmd_training_init.py | 8 ++ tests/test_training.py | 15 ++-- 5 files changed, 74 insertions(+), 56 deletions(-) diff --git a/planemo/training.py b/planemo/training.py index e1842c844..5924dd51f 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -1,4 +1,4 @@ -""" gtdk: Galaxy training development kit """ +"""gtdk: Galaxy training development kit.""" import collections import json @@ -191,14 +191,14 @@ def load_yaml(filepath): - """Load the content of a YAML file to a dictionary""" + """Load the content of a YAML file to a dictionary.""" with open(filepath, "r") as m_file: content = yaml.load(m_file) return content def save_to_yaml(content, filepath): - """Save a dictionary to a YAML file""" + """Save a dictionary to a YAML file.""" with open(filepath, 'w') as stream: yaml.safe_dump(content, stream, @@ -211,7 +211,7 @@ def save_to_yaml(content, filepath): def get_template_dir(kwds): - """Check and return the templates directory""" + """Check and return the templates directory.""" if not kwds["templates"]: template_dir = "templates" if not os.path.isdir(template_dir): @@ -222,7 +222,7 @@ def get_template_dir(kwds): def update_top_metadata_file(filepath, topic_name, tuto_name="tutorial1", keep=True): - """Update metadata on the top or delete a (tutorial or index) file """ + """Update metadata on the top or delete a (tutorial or index) file.""" if keep: with open(filepath, "r") as in_f: content = in_f.read() @@ -238,7 +238,8 @@ def update_top_metadata_file(filepath, topic_name, tuto_name="tutorial1", keep=T def create_topic(kwds, topic_dir, template_dir): - """Create the skeleton of a new topic: + """ + Create the skeleton of a new topic. 1. copy templates 2. update the index.md to match your topic's name @@ -280,7 +281,7 @@ def create_topic(kwds, topic_dir, template_dir): def update_tutorial(kwds, tuto_dir, topic_dir): - """Update the metadata information of a tutorial and add it if not there""" + """Update the metadata information of a tutorial and add it if not there.""" # update the metadata file to add the new tutorial metadata_path = os.path.join(topic_dir, "metadata.yaml") @@ -327,7 +328,7 @@ def update_tutorial(kwds, tuto_dir, topic_dir): def get_zenodo_record(zenodo_link): - """Get the content of a Zenodo record""" + """Get the content of a Zenodo record.""" # get the record in the Zenodo link if 'doi' in zenodo_link: z_record = zenodo_link.split('.')[-1] @@ -346,7 +347,7 @@ def get_zenodo_record(zenodo_link): def get_galaxy_datatype(z_ext, datatype_fp): - """Get the Galaxy datatype corresponding to a Zenodo file type""" + """Get the Galaxy datatype corresponding to a Zenodo file type.""" g_datatype = '' datatypes = load_yaml(datatype_fp) if z_ext in datatypes: @@ -358,8 +359,9 @@ def get_galaxy_datatype(z_ext, datatype_fp): def get_files_from_zenodo(z_link, datatype_fp): - """Extract a list of URLs and dictionary describing the files from the JSON """ - """output of the Zenodo API""" + """ + Extract a list of URLs and dictionary describing the files from the JSON output of the Zenodo API. + """ z_record, req_res = get_zenodo_record(z_link) links = [] @@ -381,7 +383,7 @@ def get_files_from_zenodo(z_link, datatype_fp): def init_data_lib(data_lib_filepath): - """Init the data library dictionary""" + """Init the data library dictionary.""" if os.path.exists(data_lib_filepath): data_lib = load_yaml(data_lib_filepath) else: @@ -398,7 +400,7 @@ def init_data_lib(data_lib_filepath): def prepare_data_library(files, kwds, z_record, tuto_dir): - """Fill or update the data library file""" + """Fill or update the data library file.""" data_lib_filepath = os.path.join(tuto_dir, "data-library.yaml") data_lib = init_data_lib(data_lib_filepath) # get topic or create new one @@ -446,7 +448,7 @@ def prepare_data_library(files, kwds, z_record, tuto_dir): def prepare_data_library_from_zenodo(kwds, tuto_dir): - """Get the list of URLs of the files on Zenodo and fill the data library file""" + """Get the list of URLs of the files on Zenodo and fill the data library file.""" links = [] if not kwds['zenodo']: return links @@ -456,8 +458,12 @@ def prepare_data_library_from_zenodo(kwds, tuto_dir): def get_tool_input(tool_desc): - """Get a dictionary with label being the tool parameter name and the value the description - of the parameter extracted from the show_tool function of bioblend""" + """ + Get a dictionary with the tool descriptions. + + The labels are the tool parameter name and the value the description + of the parameter extracted from the show_tool function of bioblend + """ tool_inp = collections.OrderedDict() for inp in tool_desc["inputs"]: tool_inp.setdefault(inp['name'], inp) @@ -465,7 +471,7 @@ def get_tool_input(tool_desc): def get_wf_tool_description(wf, gi): - """Get a dictionary with description of inputs of all tools in a workflow""" + """Get a dictionary with description of inputs of all tools in a workflow.""" tools = {} for s in wf['steps']: step = wf['steps'][s] @@ -480,7 +486,7 @@ def get_wf_tool_description(wf, gi): def get_wf_tool_from_local_galaxy(kwds, wf_filepath, ctx): - """Server local Galaxy and get the workflow dictionary""" + """Server local Galaxy and get the workflow dictionary.""" assert is_galaxy_engine(**kwds) runnable = for_path(wf_filepath) with engine_context(ctx, **kwds) as galaxy_engine: @@ -492,7 +498,7 @@ def get_wf_tool_from_local_galaxy(kwds, wf_filepath, ctx): def get_wf_tools_from_running_galaxy(kwds): - """Get the workflow dictionary from a running Galaxy instance with the workflow installed there""" + """Get the workflow dictionary from a running Galaxy instance with the workflow installed on it.""" gi = galaxy.GalaxyInstance(kwds['galaxy_url'], key=kwds['galaxy_api_key']) wf = gi.workflows.export_workflow_dict(kwds['workflow_id']) tools = get_wf_tool_description(wf, gi) @@ -500,7 +506,7 @@ def get_wf_tools_from_running_galaxy(kwds): def get_input_tool_name(step_id, steps): - """Get the string with the name of the tool that generated an input""" + """Get the string with the name of the tool that generated an input.""" inp_provenance = '' inp_prov_id = str(step_id) if inp_prov_id in steps: @@ -513,7 +519,7 @@ def get_input_tool_name(step_id, steps): def format_inputs(step_inputs, tp_desc, wf_steps, level): - """Format the inputs of a step""" + """Format the inputs of a step.""" inputlist = '' for inp_n, inp in step_inputs.items(): if inp_n != tp_desc['name']: @@ -547,7 +553,7 @@ def format_inputs(step_inputs, tp_desc, wf_steps, level): def get_wf_step_inputs(step_inp): - """Get the inputs from a workflow step and format them""" + """Get the inputs from a workflow step and format them.""" step_inputs = {} for inp_n, inp in step_inp.items(): if inp_n.find('|') != -1: @@ -570,15 +576,15 @@ def get_wf_step_inputs(step_inp): def json_load(string): - """Transform a string into a dictionary""" - if ":" in string: + """Transform a string into a dictionary.""" + if ":" in string and '{' in string: return json.loads(string) else: return string def get_lower_params(step_params, name): - """Get the parameters from workflow that are below name in the hierarchy""" + """Get the parameters from workflow that are below name in the hierarchy.""" params = json_load(step_params) if name in params: params = json_load(params[name]) @@ -586,7 +592,7 @@ def get_lower_params(step_params, name): def get_lower_inputs(step_inputs, name): - """Get the inputs from workflow that are below name in the hierarchy""" + """Get the inputs from workflow that are below name in the hierarchy.""" inputs = {} if name in step_inputs: inputs = step_inputs[name] @@ -596,7 +602,7 @@ def get_lower_inputs(step_inputs, name): def format_section_param_desc(step_params, step_inputs, tp_desc, level, wf_steps): - """Format the description (label and value) for parameters in a section""" + """Format the description (label and value) for parameters in a section.""" section_paramlist = '' # get section description context = {'space': SPACE * level, 'section_label': tp_desc['title']} @@ -612,7 +618,7 @@ def format_section_param_desc(step_params, step_inputs, tp_desc, level, wf_steps def format_conditional_param_desc(step_params, step_inputs, tp_desc, level, wf_steps): - """Format the description (label and value) for parameters in a conditional""" + """Format the description (label and value) for parameters in a conditional.""" conditional_paramlist = '' # Get conditional parameter test_param = tp_desc['test_param'] @@ -638,7 +644,7 @@ def format_conditional_param_desc(step_params, step_inputs, tp_desc, level, wf_s def format_repeat_param_desc(step_params, step_inputs, tp_desc, level, wf_steps): - """Format the description (label and value) for parameters in a repeat""" + """Format the description (label and value) for parameters in a repeat.""" repeat_inp_desc = get_tool_input(tp_desc) params = get_lower_params(step_params, tp_desc['name']) inputs = get_lower_inputs(step_inputs, tp_desc['name']) @@ -663,7 +669,7 @@ def format_repeat_param_desc(step_params, step_inputs, tp_desc, level, wf_steps) def get_param_value(step_params, tp_desc, force_default=False): - """Get value of a 'simple' parameter if different from the default value, None otherwise""" + """Get value of a 'simple' parameter if different from the default value, None otherwise.""" param_value = '' if tp_desc['value'] == step_params and not force_default: param_value = None @@ -685,7 +691,7 @@ def get_param_value(step_params, tp_desc, force_default=False): def format_param_desc(step_params, step_inputs, tp_desc, level, wf_steps, force_default=False): - """Format the parameter description (label and value) given the type of parameter""" + """Format the parameter description (label and value) given the type of parameter.""" paramlist = '' if 'type' not in tp_desc: raise ValueError("No type for the paramater %s" % tp_desc['name']) @@ -710,8 +716,7 @@ def format_param_desc(step_params, step_inputs, tp_desc, level, wf_steps, force_ def get_param_desc(step_params, step_inputs, tp_desc, level, wf_steps, should_be_there=False): - """Parse the parameters of the tool and return the formatted list of the - parameters and values set in the workflow""" + """Parse the parameters of the tool and return a formatted list with the values set in the workflow.""" paramlist = '' for n, tp_d in tp_desc.items(): if n not in step_params: @@ -726,7 +731,7 @@ def get_param_desc(step_params, step_inputs, tp_desc, level, wf_steps, should_be def get_handson_box(step, steps, tools): - """Get the string for an hands-on box based on a step in a workflow""" + """Get the string for an hands-on box based on a step in a workflow.""" # get input (if none: input step) step_inputs = get_wf_step_inputs(step['input_connections']) if not step_inputs: @@ -743,7 +748,7 @@ def get_handson_box(step, steps, tools): def create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx): - """Create tutorial structure from the workflow file""" + """Create tutorial structure from the workflow file.""" # load workflow if kwds['workflow_id']: if not kwds['galaxy_url']: @@ -797,7 +802,7 @@ def add_workflow_file(kwds, tuto_dir): def create_tutorial(kwds, tuto_dir, topic_dir, template_dir, ctx): - """Create the skeleton of a new tutorial""" + """Create the skeleton of a new tutorial.""" # copy or rename templates template_tuto_path = os.path.join(topic_dir, "tutorials", "tutorial1") if os.path.isdir(template_tuto_path): @@ -853,7 +858,7 @@ def init(ctx, kwds): def prepare_tuto_update(kwds): - """Prepare the update of a tutorial""" + """Prepare the update of a tutorial.""" topics_dir = "topics" if not os.path.isdir(topics_dir): os.makedirs(topics_dir) @@ -877,7 +882,7 @@ def prepare_tuto_update(kwds): def fill_data_library(ctx, kwds): - """Fill a data library for a tutorial""" + """Fill a data library for a tutorial.""" topic_dir, tuto_dir, metadata, metadata_path, tuto_metadata = prepare_tuto_update(kwds) # get the zenodo link @@ -913,7 +918,7 @@ def fill_data_library(ctx, kwds): def generate_tuto_from_wf(ctx, kwds): - """Generate the skeleton of a tutorial from a workflow""" + """Generate the skeleton of a tutorial from a workflow.""" topic_dir, tuto_dir, metadata, metadata_path, tuto_metadata = prepare_tuto_update(kwds) if kwds["workflow"] or kwds['workflow_id']: kwds["zenodo"] = '' diff --git a/tests/test_cmd_training_fill_data_library.py b/tests/test_cmd_training_fill_data_library.py index ba7c2bafd..f1031ec91 100644 --- a/tests/test_cmd_training_fill_data_library.py +++ b/tests/test_cmd_training_fill_data_library.py @@ -9,6 +9,7 @@ def create_tutorial_dir(topic_n, tuto_n, metadata_n): + """Create the tutorial directory structure.""" topic_dir = os.path.join("topics", topic_n) tuto_dir = os.path.join(topic_dir, "tutorials", tuto_n) metadata_path = os.path.join(topic_dir, "metadata.yaml") @@ -21,14 +22,11 @@ def create_tutorial_dir(topic_n, tuto_n, metadata_n): shutil.copy(metadata, metadata_path) -def remove_topics(): - shutil.rmtree("topics") - - class CmdTrainingFillDataLibraryTestCase(CliTestCase): """Container class defining test cases for the ``training_fill_data_library`` command.""" def test_training_fill_data_library_command_empty(self): + """Test training_fill_data_library command with no arguments.""" with self._isolate(): training_fill_data_library_command = [ "training_fill_data_library" @@ -36,6 +34,7 @@ def test_training_fill_data_library_command_empty(self): self._check_exit_code(training_fill_data_library_command, exit_code=2) def test_training_fill_data_library_command_topic(self): + """Test training_fill_data_library command with only topic name.""" with self._isolate(): training_fill_data_library_command = [ "training_fill_data_library", @@ -44,6 +43,7 @@ def test_training_fill_data_library_command_topic(self): self._check_exit_code(training_fill_data_library_command, exit_code=2) def test_training_fill_data_library_command_tutorial_topic(self): + """Test training_fill_data_library command with tutorial name.""" with self._isolate(): topic_n = "test" tuto_n = "test" @@ -56,7 +56,7 @@ def test_training_fill_data_library_command_tutorial_topic(self): "--tutorial_name", tuto_n, "--datatypes", datatype ] - remove_topics() + shutil.rmtree("topics") self._check_exit_code(training_fill_data_library_command, exit_code=-1) # working create_tutorial_dir(topic_n, tuto_n, "training_metadata_w_zenodo.yaml") @@ -69,6 +69,7 @@ def test_training_fill_data_library_command_tutorial_topic(self): self._check_exit_code(training_fill_data_library_command, exit_code=0) def test_training_fill_data_library_command_tutorial_zenodo(self): + """Test training_fill_data_library command with zenodo link.""" with self._isolate(): topic_n = "test" tuto_n = "test" diff --git a/tests/test_cmd_training_generate_from_wf.py b/tests/test_cmd_training_generate_from_wf.py index 0c36b068e..e3533a7ec 100644 --- a/tests/test_cmd_training_generate_from_wf.py +++ b/tests/test_cmd_training_generate_from_wf.py @@ -9,6 +9,7 @@ def create_tutorial_dir(topic_n, tuto_n, metadata_n): + """Create the tutorial directory structure.""" topic_dir = os.path.join("topics", topic_n) tuto_dir = os.path.join(topic_dir, "tutorials", tuto_n) metadata_path = os.path.join(topic_dir, "metadata.yaml") @@ -21,14 +22,11 @@ def create_tutorial_dir(topic_n, tuto_n, metadata_n): shutil.copy(metadata, metadata_path) -def remove_topics(): - shutil.rmtree("topics") - - class CmdTrainingGenerateFromWfTestCase(CliTestCase): """Container class defining test cases for the ``training_generate_from_wf`` command.""" def test_training_generate_from_wf_command_empty(self): + """Test training_generate_from_wf command with no arguments.""" with self._isolate(): training_fill_data_library_command = [ "training_generate_from_wf" @@ -36,6 +34,7 @@ def test_training_generate_from_wf_command_empty(self): self._check_exit_code(training_fill_data_library_command, exit_code=2) def test_training_generate_from_wf_command_topic(self): + """Test training_generate_from_wf command with only topic name.""" with self._isolate(): training_fill_data_library_command = [ "training_generate_from_wf", @@ -44,6 +43,7 @@ def test_training_generate_from_wf_command_topic(self): self._check_exit_code(training_fill_data_library_command, exit_code=2) def test_training_generate_from_wf_command_local_wf(self): + """Test training_generate_from_wf command with local workflow.""" with self._isolate(): topic_n = "test" tuto_n = "test" @@ -57,9 +57,10 @@ def test_training_generate_from_wf_command_local_wf(self): "--workflow", test_workflow ] self._check_exit_code(training_init_command, exit_code=-1) - remove_topics() + shutil.rmtree("topics") def test_training_generate_from_wf_command_remote_wf(self): + """Test training_generate_from_wf command with workflow on running instance.""" with self._isolate(): topic_n = "test" tuto_n = "test" @@ -82,4 +83,4 @@ def test_training_generate_from_wf_command_remote_wf(self): "--galaxy_api_key", "API" ] self._check_exit_code(training_init_command, exit_code=0) - remove_topics() + shutil.rmtree("topics") diff --git a/tests/test_cmd_training_init.py b/tests/test_cmd_training_init.py index 5fd1842f1..2f04991d6 100644 --- a/tests/test_cmd_training_init.py +++ b/tests/test_cmd_training_init.py @@ -12,6 +12,7 @@ class CmdTrainingInitTestCase(CliTestCase): """Container class defining test cases for the ``training_init`` command.""" def test_training_init_command_by_default(self): + """Test training_init command with only topic name.""" with self._isolate(): training_init_command = [ "training_init", @@ -20,6 +21,7 @@ def test_training_init_command_by_default(self): self._check_exit_code(training_init_command, exit_code=-1) def test_training_init_command_templates(self): + """Test training_init command with template path.""" with self._isolate(): training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") training_init_command = [ @@ -30,6 +32,7 @@ def test_training_init_command_templates(self): self._check_exit_code(training_init_command, exit_code=0) def test_training_init_command_topic(self): + """Test training_init command to create new topic.""" with self._isolate(): training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") # working test @@ -54,6 +57,7 @@ def test_training_init_command_topic(self): self._check_exit_code(training_init_command, exit_code=2) def test_training_init_command_tutorial_no_topic(self): + """Test training_init command with tutorial but no topic.""" with self._isolate(): training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") # working test @@ -65,6 +69,7 @@ def test_training_init_command_tutorial_no_topic(self): self._check_exit_code(training_init_command, exit_code=2) def test_training_init_command_tutorial(self): + """Test training_init command to create new tutorial.""" with self._isolate(): training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") # working test @@ -80,6 +85,7 @@ def test_training_init_command_tutorial(self): self._check_exit_code(training_init_command, exit_code=0) def test_training_init_command_tutorial_zenodo(self): + """Test training_init command to create new tutorial with zenodo.""" with self._isolate(): training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") datatype = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml") @@ -104,6 +110,7 @@ def test_training_init_command_tutorial_zenodo(self): self._check_exit_code(training_init_command, exit_code=0) def test_training_init_command_tutorial_local_wf(self): + """Test training_init command to create new tutorial with local workflow.""" with self._isolate(): training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") test_workflow = os.path.join(TEST_DATA_DIR, "test_workflow_1.ga") @@ -118,6 +125,7 @@ def test_training_init_command_tutorial_local_wf(self): self._check_exit_code(training_init_command, exit_code=-1) def test_training_init_command_tutorial_remote_wf(self): + """Test training_init command to create new tutorial with workflow on running instance.""" with self._isolate(): training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") # not working test diff --git a/tests/test_training.py b/tests/test_training.py index 789eb6701..0615465d0 100644 --- a/tests/test_training.py +++ b/tests/test_training.py @@ -1,3 +1,5 @@ +"""Training training functions.""" + import json import os import shutil @@ -29,6 +31,7 @@ def prepare_test(): + """Prepare kwds, topic_dir and tuto_dir.""" topic_name = 'my_new_topic' topic_dir = topic_name tuto_name = "new_tuto" @@ -128,7 +131,7 @@ def test_save_to_yaml(): def test_get_template_dir_1(): - """Test :func:`planemo.training.get_template_dir`: test exception raising""" + """Test :func:`planemo.training.get_template_dir`: test exception raising.""" kwds = {"templates": None} exp_exception = "This script needs to be run in the training material repository" with assert_raises_regexp(Exception, exp_exception): @@ -136,7 +139,7 @@ def test_get_template_dir_1(): def test_get_template_dir_2(): - """Test :func:`planemo.training.get_template_dir`: test default return value""" + """Test :func:`planemo.training.get_template_dir`: test default return value.""" kwds = {"templates": None} os.makedirs("templates") assert training.get_template_dir(kwds) == "templates" @@ -144,7 +147,7 @@ def test_get_template_dir_2(): def test_get_template_dir_3(): - """Test :func:`planemo.training.get_template_dir`: test return value""" + """Test :func:`planemo.training.get_template_dir`: test return value.""" template_path = "temp" kwds = {"templates": template_path} assert training.get_template_dir(kwds) == template_path @@ -367,7 +370,7 @@ def test_get_tool_input(): def check_tools(tools): - """Test the tool return from get_wf_tool_description""" + """Test the tool return from get_wf_tool_description.""" assert 'FastQC' in tools assert 'input_file' in tools['FastQC'] @@ -397,7 +400,7 @@ def test_get_wf_tool_description(): def check_workflow(wf): - """Test the worflow return""" + """Test the worflow return.""" assert 'steps' in wf assert '1' in wf['steps'] assert 'name' in wf['steps']['1'] @@ -444,7 +447,7 @@ def test_get_input_tool_name(): def get_wf_a_tools(): - """Get workflow and tool of a workflow""" + """Get workflow and tool of a workflow.""" kwds, topic_dir, tuto_dir = prepare_test() assert is_galaxy_engine(**kwds) with engine_context(CTX, **kwds) as galaxy_engine: From 0415b765bf0307db86d2234788e1ddeaea659a68 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Thu, 9 Aug 2018 13:12:51 +0200 Subject: [PATCH 21/26] Fix parameters with extra quotes and sub inputs for conditionals --- planemo/training.py | 6 ++++-- tests/test_training.py | 6 +++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/planemo/training.py b/planemo/training.py index 5924dd51f..c6c506bf6 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -623,6 +623,7 @@ def format_conditional_param_desc(step_params, step_inputs, tp_desc, level, wf_s # Get conditional parameter test_param = tp_desc['test_param'] params = get_lower_params(step_params, tp_desc['name']) + inputs = get_lower_inputs(step_inputs, tp_desc['name']) conditional_paramlist += format_param_desc( params[test_param['name']], step_inputs, @@ -636,7 +637,7 @@ def format_conditional_param_desc(step_params, step_inputs, tp_desc, level, wf_s if len(case['inputs']) > 0: conditional_paramlist += get_param_desc( params, - step_inputs, + inputs, get_tool_input(case), level+1, wf_steps) @@ -671,6 +672,8 @@ def format_repeat_param_desc(step_params, step_inputs, tp_desc, level, wf_steps) def get_param_value(step_params, tp_desc, force_default=False): """Get value of a 'simple' parameter if different from the default value, None otherwise.""" param_value = '' + if '"' in step_params: + step_params = step_params.replace('"', '') if tp_desc['value'] == step_params and not force_default: param_value = None elif tp_desc['type'] == 'boolean': @@ -697,7 +700,6 @@ def format_param_desc(step_params, step_inputs, tp_desc, level, wf_steps, force_ raise ValueError("No type for the paramater %s" % tp_desc['name']) if tp_desc['type'] == 'data' or tp_desc['type'] == 'data_collection': paramlist += format_inputs(step_inputs, tp_desc, wf_steps, level) - # info("data_collection parameters are currently not supported") elif tp_desc['type'] == 'section': paramlist += format_section_param_desc(step_params, step_inputs, tp_desc, level, wf_steps) elif tp_desc['type'] == 'conditional': diff --git a/tests/test_training.py b/tests/test_training.py index 0615465d0..a208e5f56 100644 --- a/tests/test_training.py +++ b/tests/test_training.py @@ -508,7 +508,8 @@ def test_get_lower_params(): assert 'name' in training.get_lower_params(step_params, 'n1') assert training.get_lower_params(step_params, 'name') == '1' print(training.get_lower_params('{"name": "1"}', 'n1')) - assert 'name' in training.get_lower_params('{"name": "1"}', 'n1') + assert 'name' in training.get_lower_params('{"name": "1"}', 'name') + assert 'name' in training.get_lower_params('name:1', 'name') def test_get_lower_inputs(): @@ -589,6 +590,9 @@ def test_get_param_value(): # test select tp_desc = {'type': 'select', 'options': [['opt1', 'val1'], ['opt2', 'val2']], 'value': ''} assert training.get_param_value('val1', tp_desc) == 'opt1' + # test select with extra quotes + tp_desc = {'type': 'select', 'options': [['opt1', 'val1'], ['opt2', 'val2']], 'value': ''} + assert training.get_param_value('"val1"', tp_desc) == 'opt1' # test data_column tp_desc = {'type': 'data_column', 'value': ''} assert training.get_param_value('1', tp_desc) == 'c1' From cf02ea8bdce55157183d3f1bf3cf659a91e8cf44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Thu, 16 Aug 2018 17:48:58 +0200 Subject: [PATCH 22/26] Add test for none or empty parameters --- planemo/training.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/planemo/training.py b/planemo/training.py index c6c506bf6..5e1811613 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -577,7 +577,7 @@ def get_wf_step_inputs(step_inp): def json_load(string): """Transform a string into a dictionary.""" - if ":" in string and '{' in string: + if string is not None and ":" in string and '{' in string: return json.loads(string) else: return string @@ -624,8 +624,9 @@ def format_conditional_param_desc(step_params, step_inputs, tp_desc, level, wf_s test_param = tp_desc['test_param'] params = get_lower_params(step_params, tp_desc['name']) inputs = get_lower_inputs(step_inputs, tp_desc['name']) + cond_param = step_params[test_param['name']] conditional_paramlist += format_param_desc( - params[test_param['name']], + cond_param, step_inputs, test_param, level, @@ -633,7 +634,7 @@ def format_conditional_param_desc(step_params, step_inputs, tp_desc, level, wf_s force_default=True) # Get parameters in the when for case in tp_desc['cases']: - if case['value'] == params[test_param['name']]: + if case['value'] == cond_param: if len(case['inputs']) > 0: conditional_paramlist += get_param_desc( params, @@ -728,6 +729,8 @@ def get_param_desc(step_params, step_inputs, tp_desc, level, wf_steps, should_be raise ValueError("%s not in workflow" % n) else: step_param = get_lower_params(step_params, n) + if step_param is None: + continue paramlist += format_param_desc(step_param, step_inputs, tp_d, level, wf_steps) return paramlist @@ -914,7 +917,6 @@ def fill_data_library(ctx, kwds): 'datatypes': kwds['datatypes'] } prepare_data_library_from_zenodo(topic_kwds, tuto_dir) - print(metadata) # update the metadata save_to_yaml(metadata, metadata_path) From 1d902b9d4e56f989dc922508b85b155b88e9296e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Mon, 27 Aug 2018 19:24:33 +0200 Subject: [PATCH 23/26] Update metadata description, remove templates and fix tests --- planemo/options.py | 2 +- planemo/training.py | 569 +++++++++++------- project_templates/training/README.md | 4 - project_templates/training/docker/Dockerfile | 21 - .../training/images/template.graphml | 373 ------------ project_templates/training/index.md | 4 - project_templates/training/metadata.yaml | 50 -- .../training/slides/introduction.html | 53 -- .../tutorials/tutorial1/data-library.yaml | 7 - .../tutorials/tutorial1/data-manager.yaml | 18 - .../training/tutorials/tutorial1/slides.html | 14 - .../tutorials/tutorial1/tours/tour.yaml | 0 .../training/tutorials/tutorial1/tutorial.md | 128 ---- .../tutorial1/workflows/empty_workflow.ga | 8 - tests/data/training_metadata_wo_zenodo.yaml | 42 -- tests/data/training_tutorial.md | 35 ++ tests/data/tutorial.md | 35 ++ tests/test_training.py | 459 +++++++------- 18 files changed, 644 insertions(+), 1178 deletions(-) delete mode 100644 project_templates/training/README.md delete mode 100644 project_templates/training/docker/Dockerfile delete mode 100644 project_templates/training/images/template.graphml delete mode 100644 project_templates/training/index.md delete mode 100644 project_templates/training/metadata.yaml delete mode 100644 project_templates/training/slides/introduction.html delete mode 100644 project_templates/training/tutorials/tutorial1/data-library.yaml delete mode 100644 project_templates/training/tutorials/tutorial1/data-manager.yaml delete mode 100644 project_templates/training/tutorials/tutorial1/slides.html delete mode 100644 project_templates/training/tutorials/tutorial1/tours/tour.yaml delete mode 100644 project_templates/training/tutorials/tutorial1/tutorial.md delete mode 100644 project_templates/training/tutorials/tutorial1/workflows/empty_workflow.ga delete mode 100644 tests/data/training_metadata_wo_zenodo.yaml create mode 100644 tests/data/training_tutorial.md create mode 100644 tests/data/tutorial.md diff --git a/planemo/options.py b/planemo/options.py index fcc3c2a27..98fde53a3 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -1182,7 +1182,7 @@ def training_datatype_option(): def training_zenodo_option(): return planemo_option( - "--zenodo", + "--zenodo_link", help="Zenodo URL with the input data") diff --git a/planemo/training.py b/planemo/training.py index 5e1811613..03660241c 100644 --- a/planemo/training.py +++ b/planemo/training.py @@ -19,6 +19,90 @@ from planemo.runnable import for_path +INDEX_FILE_TEMPLATE = """--- +layout: topic +topic_name: {{ topic }} +--- +""" + + +README_FILE_TEMPLATE = """ +{{ topic }} +========== + +Please refer to the [CONTRIBUTING.md](../../CONTRIBUTING.md) before adding or updating any material +""" + + +DOCKER_FILE_TEMPLATE = """ +# Galaxy - {{ topic_title }} +# +# to build the docker image, go to root of training repo and +# docker build -t {{ topic_name }} -f topics/{{ topic_name }}/docker/Dockerfile . +# +# to run image: +# docker run -p "8080:80" -t {{ topic_name }} + +FROM bgruening/galaxy-stable + +MAINTAINER Galaxy Training Material + +ENV GALAXY_CONFIG_BRAND "GTN: {{ topic_title }}" + +# prerequisites +RUN pip install ephemeris -U +ADD bin/galaxy-sleep.py /galaxy-sleep.py + +# copy the tutorials directory for your topic +ADD topics/{{ topic_name }}/tutorials/ /tutorials/ + +# install everything for tutorials +ADD bin/docker-install-tutorials.sh /setup-tutorials.sh +ADD bin/mergeyaml.py /mergeyaml.py +RUN /setup-tutorials.sh +""" + + +INTRO_SLIDES_FILE_TEMPLATE = """--- +layout: introduction_slides +logo: "GTN" + +title: {{ title }} +type: {{ type }} +contributors: +- contributor +--- + +### How to fill the slide decks? + +Please follow our +[tutorial to learn how to fill the slides]({{ '{{' }} site.baseurl {{ '}}' }}/topics/contributing/tutorials/create-new-tutorial-slides/slides.html) +""" + +TUTO_SLIDES_TEMPLATE = """--- +layout: tutorial_slides +logo: "GTN" + +{{ metadata }} +--- + +### How to fill the slide decks? + +Please follow our +[tutorial to learn how to fill the slides]({{ '{{' }} site.baseurl {{ '}}' }}/topics/contributing/tutorials/create-new-tutorial-slides/slides.html) +""" + + +TUTO_HAND_ON_TEMPLATE = """--- +layout: tutorial_hands_on + +{{ metadata }} +--- + +{{ body }} +""" + + INPUT_FILE_TEMPLATE = """ >{{space}}- {{ '{%' }} icon {{icon}} {{ '%}' }} *"{{input_name}}"*: {{input_value}} """ @@ -71,13 +155,7 @@ """ - -TUTORIAL_TEMPLATE = """--- -layout: tutorial_hands_on -topic_name: {{ topic_name }} -tutorial_name: {{ tutorial_name }} ---- - +TUTO_HAND_ON_BODY_TEMPLATE = """ # Introduction {:.no_toc} @@ -120,8 +198,8 @@ > ### {{ '{%' }} icon hands_on {{ '%}' }} Hands-on: Data upload > -> 1. Import the following files from [Zenodo]({{ zenodo_link }}) or from a data -> library named `TODO` if available (ask your instructor) +> 1. Create a new history for this tutorial +> 2. Import the files from [Zenodo]({{ zenodo_link }}) or from the shared data library > > ``` > {{ z_file_links }} @@ -130,26 +208,18 @@ > > ***TODO***: *Remove the useless files (if added)* > -> > ### {{ '{%' }} icon tip {{ '%}' }} Tip: Importing data via links -> > -> > * Copy the link location -> > * Open the Galaxy Upload Manager -> > * Select **Paste/Fetch Data** -> > * Paste the link into the text field -> > * Press **Start** -> > -> > By default, Galaxy uses the url as the name, so please rename them to something more pleasing. -> {: .tip} +> {{ '{%' }} include snippets/import_via_link.md {{ '%}' }} +> {{ '{%' }} include snippets/import_from_data_library.md {{ '%}' }} > -> > ### {{ '{%' }} icon tip {{ '%}' }} Tip: Importing data from a data library -> > -> > * Go into "Shared data" (top panel) then "Data libraries" -> > * Click on "Training data" and then "{{ topic_title }}" -> > * Select interesting file -> > * Click on "Import selected datasets into history" -> > * Import in a new history -> {: .tip} +> 3. Rename the datasets +> 4. Check that the datatype +> +> {{ '{%' }} include snippets/change_datatype.md datatype="datatypes" {{ '%}' }} > +> 5. Add to each database a tag corresponding to ... +> +> {{ '{%' }} include snippets/add_tag.md {{ '%}' }} +> {: .hands_on} # Title of the section usually corresponding to a big step in the analysis @@ -210,121 +280,87 @@ def save_to_yaml(content, filepath): allow_unicode=True) -def get_template_dir(kwds): - """Check and return the templates directory.""" - if not kwds["templates"]: - template_dir = "templates" - if not os.path.isdir(template_dir): - raise Exception("This script needs to be run in the training material repository") - else: - template_dir = kwds["templates"] - return template_dir - - -def update_top_metadata_file(filepath, topic_name, tuto_name="tutorial1", keep=True): - """Update metadata on the top or delete a (tutorial or index) file.""" - if keep: - with open(filepath, "r") as in_f: - content = in_f.read() - - content = content.replace("your_topic", topic_name) - content = content.replace("your_tutorial_name", tuto_name) - - with open(filepath, 'w') as out_f: - out_f.write(content) - - elif os.path.isfile(filepath): - os.remove(filepath) - - -def create_topic(kwds, topic_dir, template_dir): +def create_topic(kwds, topic_dir): """ Create the skeleton of a new topic. - 1. copy templates + 1. create the folder and its structure 2. update the index.md to match your topic's name 3. fill the metadata 4. add a symbolic link to the metadata.yaml from the metadata folder """ - # copy templates - shutil.copytree(template_dir, topic_dir) - - # update the index.md to match your topic's name - index_path = os.path.join(topic_dir, "index.md") - update_top_metadata_file(index_path, kwds["topic_name"]) - - # update the metadata file - metadata_path = os.path.join(topic_dir, "metadata.yaml") - - metadata = load_yaml(metadata_path) + # create the folder and its structure + os.makedirs(topic_dir) + img_folder = os.path.join(topic_dir, "images") + os.makedirs(img_folder) + tuto_folder = os.path.join(topic_dir, "tutorials") + os.makedirs(tuto_folder) + + # create the index.md and add the topic name + index_fp = os.path.join(topic_dir, "index.md") + with open(index_fp, 'w') as index_f: + index_f.write( + templates.render(INDEX_FILE_TEMPLATE, **{'topic': kwds["topic_name"]})) + + # create the README file + readme_fp = os.path.join(topic_dir, "README.md") + with open(readme_fp, 'w') as readme_f: + readme_f.write( + templates.render(README_FILE_TEMPLATE, **{'topic': kwds["topic_title"]})) + + # create the metadata file + metadata_fp = os.path.join(topic_dir, "metadata.yaml") + metadata = collections.OrderedDict() metadata['name'] = kwds["topic_name"] - metadata['title'] = kwds["topic_title"] metadata['type'] = kwds["topic_target"] + metadata['title'] = kwds["topic_title"] metadata['summary'] = kwds["topic_summary"] - - save_to_yaml(metadata, metadata_path) - - # update the metadata in top of tutorial.md and slides.html - tuto_path = os.path.join(topic_dir, "tutorials", "tutorial1") - hand_on_path = os.path.join(tuto_path, "tutorial.md") - update_top_metadata_file(hand_on_path, kwds["topic_name"]) - slides_path = os.path.join(tuto_path, "slides.html") - update_top_metadata_file(slides_path, kwds["topic_name"]) + metadata['requirements'] = [] + if metadata['type'] == 'use': + req = collections.OrderedDict() + req['title'] = "Galaxy introduction" + req['type'] = "internal" + req['link'] = "/introduction/" + metadata['requirements'].append(req) + metadata['docker_image'] = "" + metadata['maintainers'] = ["maintainer"] + if metadata['type'] == 'use': + metadata['references'] = [] + ref = collections.OrderedDict() + ref['authors'] = "authors et al" + ref['title'] = "the title" + ref['link'] = "link" + ref['summary'] = "A short explanation of why this reference is useful" + metadata['references'].append(ref) + save_to_yaml(metadata, metadata_fp) # add a symbolic link to the metadata.yaml metadata_dir = "metadata" if not os.path.isdir(metadata_dir): os.makedirs(metadata_dir) os.chdir(metadata_dir) - os.symlink(os.path.join("..", metadata_path), "%s.yaml" % kwds["topic_name"]) + os.symlink(os.path.join("..", metadata_fp), "%s.yaml" % kwds["topic_name"]) os.chdir("..") - -def update_tutorial(kwds, tuto_dir, topic_dir): - """Update the metadata information of a tutorial and add it if not there.""" - # update the metadata file to add the new tutorial - metadata_path = os.path.join(topic_dir, "metadata.yaml") - - metadata = load_yaml(metadata_path) - found = False - for mat in metadata["material"]: - if mat["name"] == kwds["tutorial_name"]: - mat["name"] = kwds["tutorial_name"] - mat["title"] = kwds["tutorial_title"] - mat["hands_on"] = kwds["hands_on"] - mat["slides"] = kwds["slides"] - mat["workflows"] = True if kwds["workflow"] or kwds["workflow_id"] else False - mat["zenodo_link"] = kwds["zenodo"] if kwds["zenodo"] else '' - found = True - elif mat["name"] == "tutorial1": - metadata["material"].remove(mat) - - if not found: - new_mat = collections.OrderedDict() - new_mat["title"] = kwds["tutorial_title"] - new_mat["name"] = kwds["tutorial_name"] - new_mat["type"] = 'tutorial' - new_mat["zenodo_link"] = kwds["zenodo"] if kwds["zenodo"] else '' - new_mat["hands_on"] = kwds["hands_on"] - new_mat["slides"] = kwds["slides"] - new_mat["workflows"] = True if kwds["workflow"] or kwds["workflow_id"] else False - new_mat["galaxy_tour"] = False - new_mat["questions"] = ['', ''] - new_mat["objectives"] = ['', ''] - new_mat["time_estimation"] = '1d/3h/6h' - new_mat["key_points"] = ['', ''] - new_mat["contributors"] = ['contributor1', 'contributor2'] - metadata["material"].append(new_mat) - - save_to_yaml(metadata, metadata_path) - - # update the metadata in top of tutorial.md or remove it if not needed - hand_on_path = os.path.join(tuto_dir, "tutorial.md") - update_top_metadata_file(hand_on_path, kwds["topic_name"], tuto_name=kwds["tutorial_name"], keep=kwds["hands_on"]) - - # update the metadata in top of slides.md or remove it if not needed - slides_path = os.path.join(tuto_dir, "slides.html") - update_top_metadata_file(slides_path, kwds["topic_name"], tuto_name=kwds["tutorial_name"], keep=kwds["slides"]) + # create Dockerfile + docker_folder = os.path.join(topic_dir, "docker") + os.makedirs(docker_folder) + dockerfile_fp = os.path.join(docker_folder, "Dockerfile") + with open(dockerfile_fp, 'w') as dockerfile: + dockerfile.write( + templates.render( + DOCKER_FILE_TEMPLATE, + **{'topic_name': kwds["topic_name"], 'topic_title': kwds["topic_title"]})) + + # create empty introduction slides + slides_folder = os.path.join(topic_dir, "slides") + os.makedirs(slides_folder) + intro_slide_fp = os.path.join(slides_folder, "introduction.html") + with open(intro_slide_fp, 'w') as intro_slide_f: + intro_slide_f.write( + templates.render( + INTRO_SLIDES_FILE_TEMPLATE, + **{'title': "Introduction to %s" % kwds["topic_title"], 'type': "introduction"})) def get_zenodo_record(zenodo_link): @@ -450,9 +486,9 @@ def prepare_data_library(files, kwds, z_record, tuto_dir): def prepare_data_library_from_zenodo(kwds, tuto_dir): """Get the list of URLs of the files on Zenodo and fill the data library file.""" links = [] - if not kwds['zenodo']: + if not kwds['zenodo_link']: return links - files, links, z_record = get_files_from_zenodo(kwds['zenodo'], kwds['datatypes']) + files, links, z_record = get_files_from_zenodo(kwds['zenodo_link'], kwds['datatypes']) prepare_data_library(files, kwds, z_record, tuto_dir) return links @@ -511,7 +547,7 @@ def get_input_tool_name(step_id, steps): inp_prov_id = str(step_id) if inp_prov_id in steps: name = steps[inp_prov_id]['name'] - if name.find('Input dataset') != -1: + if 'Input dataset' in name: inp_provenance = "(%s)" % name else: inp_provenance = "(output of **%s** {%% icon tool %%})" % name @@ -535,20 +571,19 @@ def format_inputs(step_inputs, tp_desc, wf_steps, level): else: # sinle input or collection inp_type = wf_steps[str(inp['id'])]['type'] - if inp_type.find('collection') != -1: + if 'collection' in inp_type: icon = 'param-collection' else: icon = 'param-file' inps = ['`%s` %s' % ( inp['output_name'], get_input_tool_name(inp['id'], wf_steps))] - context = { + inputlist += templates.render(INPUT_FILE_TEMPLATE, **{ "icon": icon, "input_name": tp_desc['label'], "input_value": ', '.join(inps), "space": SPACE * level - } - inputlist += templates.render(INPUT_FILE_TEMPLATE, **context) + }) return inputlist @@ -556,7 +591,7 @@ def get_wf_step_inputs(step_inp): """Get the inputs from a workflow step and format them.""" step_inputs = {} for inp_n, inp in step_inp.items(): - if inp_n.find('|') != -1: + if '|' in inp_n: repeat_regex = '(?P[^\|]*)_(?P\d+)\|(?P.+).+' repeat_search = re.search(repeat_regex, inp_n) hier_regex = '(?P[^\|]*)\|(?P.+)' @@ -586,7 +621,7 @@ def json_load(string): def get_lower_params(step_params, name): """Get the parameters from workflow that are below name in the hierarchy.""" params = json_load(step_params) - if name in params: + if isinstance(params, dict) and name in params: params = json_load(params[name]) return params @@ -594,7 +629,7 @@ def get_lower_params(step_params, name): def get_lower_inputs(step_inputs, name): """Get the inputs from workflow that are below name in the hierarchy.""" inputs = {} - if name in step_inputs: + if isinstance(step_inputs, dict) and name in step_inputs: inputs = step_inputs[name] else: inputs = step_inputs @@ -604,15 +639,15 @@ def get_lower_inputs(step_inputs, name): def format_section_param_desc(step_params, step_inputs, tp_desc, level, wf_steps): """Format the description (label and value) for parameters in a section.""" section_paramlist = '' - # get section description - context = {'space': SPACE * level, 'section_label': tp_desc['title']} # get sub params and inputs params = get_lower_params(step_params, tp_desc['name']) inputs = get_lower_inputs(step_inputs, tp_desc['name']) # get description of parameters in lower hierarchy sub_param_desc = get_param_desc(params, inputs, get_tool_input(tp_desc), level+1, wf_steps) if sub_param_desc != '': - section_paramlist += templates.render(INPUT_SECTION, **context) + section_paramlist += templates.render(INPUT_SECTION, **{ + 'space': SPACE * level, + 'section_label': tp_desc['title']}) section_paramlist += sub_param_desc return section_paramlist @@ -624,7 +659,12 @@ def format_conditional_param_desc(step_params, step_inputs, tp_desc, level, wf_s test_param = tp_desc['test_param'] params = get_lower_params(step_params, tp_desc['name']) inputs = get_lower_inputs(step_inputs, tp_desc['name']) - cond_param = step_params[test_param['name']] + cond_param = get_lower_params(params, test_param['name']) + print("-") + print(cond_param) + print("-") + print(test_param) + print("-") conditional_paramlist += format_param_desc( cond_param, step_inputs, @@ -652,28 +692,30 @@ def format_repeat_param_desc(step_params, step_inputs, tp_desc, level, wf_steps) inputs = get_lower_inputs(step_inputs, tp_desc['name']) repeat_paramlist = '' for r in range(len(params)): - r_inputs = inputs[str(r)] if str(r) in inputs else inputs - paramlist_in_repeat = get_param_desc(params[r], r_inputs, repeat_inp_desc, level+2, wf_steps) + r_inputs = get_lower_inputs(inputs, str(r)) + r_params = params[r] + paramlist_in_repeat = get_param_desc(r_params, r_inputs, repeat_inp_desc, level+2, wf_steps) if paramlist_in_repeat != '': # add first click - context = {'space': SPACE * (level+1), 'repeat_label': tp_desc['title']} - repeat_paramlist += templates.render(INPUT_ADD_REPEAT, **context) + repeat_paramlist += templates.render(INPUT_ADD_REPEAT, **{ + 'space': SPACE * (level+1), + 'repeat_label': tp_desc['title']}) # add description of parameters in the repeat - context = { + repeat_paramlist += templates.render(INPUT_SECTION, **{ 'space': SPACE * (level+1), - 'section_label': "%s: %s" % (r+1, tp_desc['title'])} - repeat_paramlist += templates.render(INPUT_SECTION, **context) + 'section_label': "%s: %s" % (r+1, tp_desc['title'])}) repeat_paramlist += paramlist_in_repeat if repeat_paramlist != '': - context = {'space': SPACE * level, 'section_label': tp_desc['title']} - repeat_paramlist = templates.render(INPUT_SECTION, **context) + repeat_paramlist + repeat_paramlist = templates.render(INPUT_SECTION, **{ + 'space': SPACE * level, + 'section_label': tp_desc['title']}) + repeat_paramlist return repeat_paramlist def get_param_value(step_params, tp_desc, force_default=False): """Get value of a 'simple' parameter if different from the default value, None otherwise.""" param_value = '' - if '"' in step_params: + if isinstance(step_params, str) and '"' in step_params: step_params = step_params.replace('"', '') if tp_desc['value'] == step_params and not force_default: param_value = None @@ -710,11 +752,10 @@ def format_param_desc(step_params, step_inputs, tp_desc, level, wf_steps, force_ else: param_value = get_param_value(step_params, tp_desc, force_default) if param_value is not None: - context = { + paramlist += templates.render(INPUT_PARAM, **{ 'space': SPACE * level, 'param_label': tp_desc['label'], - 'param_value': param_value} - paramlist += templates.render(INPUT_PARAM, **context) + 'param_value': param_value}) return paramlist @@ -752,7 +793,72 @@ def get_handson_box(step, steps, tools): return templates.render(HANDS_ON_TOOL_BOX_TEMPLATE, **context) -def create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx): +def init_tuto_metadata(kwds): + """Init tutorial metadata""" + metadata = collections.OrderedDict() + metadata['title'] = kwds["tutorial_title"] + metadata['zenodo_link'] = kwds["zenodo_link"] if kwds["zenodo_link"] else '' + metadata['questions'] = [ + "Which biological questions are addressed by the tutorial?", + "Which bioinformatics techniques is important to know for this type of data?"] + metadata['objectives'] = [ + "The learning objectives are the goals of the tutorial", + "They will be informed by your audience and will communicate to them and to yourself what you should focus on during the course", + "They are single sentence describing what a learner will be able to do once they have done the tutorial", + "You can use the Bloom's Taxonomy to write effective learning objectives"] + metadata['time'] = "3H" + metadata['key_points'] = [ + "The take-home messages", + "They will appear at the end of the tutorial"] + metadata['contributors'] = ["contributor1", "contributor2"] + return metadata + + +def format_tuto_metadata(metadata): + """Return the string corresponding to the tutorial metadata""" + return yaml.safe_dump(metadata, + indent=2, + default_flow_style=False, + default_style='', + explicit_start=False) + + +def write_hands_on_tutorial(metadata, body, tuto_dir): + """Write the tutorial hands-on""" + m_str = format_tuto_metadata(metadata) + template = templates.render(TUTO_HAND_ON_TEMPLATE, **{ + "metadata": m_str, + "body": body + }) + + md_path = os.path.join(tuto_dir, "tutorial.md") + with open(md_path, 'w') as md: + md.write(template) + + +def get_tuto_body(z_file_links, body = None): + """Get the body for a tutorial""" + if body is None: + body = templates.render(HANDS_ON_TOOL_BOX_TEMPLATE, **{ + 'tool_name': "My Tool", + 'inputlist': templates.render(INPUT_FILE_TEMPLATE, **{ + 'space': 1*SPACE, + 'icon': 'param-file', + 'input_name': 'Input file', + 'input_value': 'File' + }), + 'paramlist': templates.render(INPUT_PARAM, **{ + 'space': 1*SPACE, + 'param_label': 'Parameter', + 'param_value': 'a value' + }) + }) + return templates.render(TUTO_HAND_ON_BODY_TEMPLATE, **{ + "z_file_links": "\n> ".join(z_file_links), + "body": body}) + + +def create_hands_on_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx, metadata=None): """Create tutorial structure from the workflow file.""" # load workflow if kwds['workflow_id']: @@ -763,29 +869,20 @@ def create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx): wf, tools = get_wf_tools_from_running_galaxy(kwds) else: wf, tools = get_wf_tool_from_local_galaxy(kwds, kwds["workflow"], ctx) - save_to_yaml(tools, 'tools.yaml') + # get hands-on body from the workflow body = '' for step_id in range(len(wf['steps'].keys())): step = wf['steps'][str(step_id)] if not step['tool_state']: continue body += get_handson_box(step, wf['steps'], tools) + body = get_tuto_body(z_file_links, body) - context = { - "topic_name": kwds["topic_name"], - "topic_title": kwds["topic_title"], - "tutorial_name": kwds["tutorial_name"], - "zenodo_link": kwds["zenodo"] if kwds["zenodo"] else '', - "z_file_links": "\n> ".join(z_file_links), - "body": body - } - template = templates.render(TUTORIAL_TEMPLATE, **context) - - # create the tutorial markdown file - md_path = os.path.join(tuto_dir, "tutorial.md") - with open(md_path, 'w') as md: - md.write(template) + # write in the tutorial file with the metadata on the top + if not metadata: + metadata = init_tuto_metadata(kwds) + write_hands_on_tutorial(metadata, body, tuto_dir) def add_workflow_file(kwds, tuto_dir): @@ -806,133 +903,143 @@ def add_workflow_file(kwds, tuto_dir): os.remove(empty_wf_filepath) -def create_tutorial(kwds, tuto_dir, topic_dir, template_dir, ctx): +def create_tutorial(kwds, tuto_dir, ctx): """Create the skeleton of a new tutorial.""" - # copy or rename templates - template_tuto_path = os.path.join(topic_dir, "tutorials", "tutorial1") - if os.path.isdir(template_tuto_path): - os.rename(template_tuto_path, tuto_dir) - else: - shutil.copytree(template_dir, tuto_dir) + # create tuto folder and empty files + os.makedirs(tuto_dir) + tour_folder = os.path.join(tuto_dir, "tours") + os.makedirs(tour_folder) + workflow_folder = os.path.join(tuto_dir, "workflows") + os.makedirs(workflow_folder) + + metadata = init_tuto_metadata(kwds) # extract the data library from Zenodo and the links for the tutorial z_file_links = '' - if kwds["zenodo"]: + if kwds["zenodo_link"]: info("Create the data library from Zenodo") z_file_links = prepare_data_library_from_zenodo(kwds, tuto_dir) # create tutorial skeleton from workflow and copy workflow file - if kwds["workflow"] or kwds['workflow_id']: - info("Create tutorial skeleton from workflow") - create_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx) - add_workflow_file(kwds, tuto_dir) + if kwds["hands_on"]: + if kwds["workflow"] or kwds['workflow_id']: + info("Create tutorial skeleton from workflow") + create_hands_on_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx) + add_workflow_file(kwds, tuto_dir) + else: + body = get_tuto_body(z_file_links) + print(body) + write_hands_on_tutorial(metadata, body, tuto_dir) - # fill the metadata of the new tutorial - update_tutorial(kwds, tuto_dir, topic_dir) + # create slide skeleton + if kwds["slides"]: + slide_path = os.path.join(tuto_dir, 'slides.html') + m_str = format_tuto_metadata(metadata) + with open(slide_path, 'w') as slide_f: + slide_f.write( + templates.render(TUTO_SLIDES_TEMPLATE, **{"metadata": m_str})) def init(ctx, kwds): """Create/update a topic/tutorial""" - topic_template_dir = get_template_dir(kwds) - topic_dir = os.path.join("topics", kwds['topic_name']) if not os.path.isdir(topic_dir): info("The topic %s does not exist. It will be created" % kwds['topic_name']) - create_topic(kwds, topic_dir, topic_template_dir) - else: - metadata_path = os.path.join(topic_dir, "metadata.yaml") - metadata = load_yaml(metadata_path) - kwds['topic_title'] = metadata['title'] - kwds['topic_summary'] = metadata['summary'] + create_topic(kwds, topic_dir) if not kwds['tutorial_name']: + if kwds["slides"]: + raise Exception("A tutorial name is needed to create the skeleton of a tutorial slide deck") if kwds['workflow'] or kwds['workflow_id']: raise Exception("A tutorial name is needed to create the skeleton of the tutorial from a workflow") - if kwds['zenodo']: + if kwds['zenodo_link']: raise Exception("A tutorial name is needed to add Zenodo information") else: tuto_dir = os.path.join(topic_dir, "tutorials", kwds['tutorial_name']) if not os.path.isdir(tuto_dir): - tuto_template_dir = os.path.join(topic_template_dir, "tutorials", "tutorial1") info("The tutorial %s in topic %s does not exist. It will be created." % (kwds['tutorial_name'], kwds['topic_name'])) - create_tutorial(kwds, tuto_dir, topic_dir, tuto_template_dir, ctx) - else: - info("The tutorial %s in topic %s already exists. It will be updated with the other arguments" % ( - kwds['tutorial_name'], kwds['topic_name'])) - update_tutorial(kwds, tuto_dir, topic_dir) + create_tutorial(kwds, tuto_dir, ctx) + + +def get_tuto_info(tuto_dir): + """Extract the metadata front matter on the top of the tutorial file and its body""" + tuto_fp = os.path.join(tuto_dir, "tutorial.md") + with open(tuto_fp, "r") as tuto_f: + tuto_content = tuto_f.read() + regex = '^---\n(?P[\s\S]*)\n---(?P[\s\S]*)' + tuto_split_regex = re.search(regex, tuto_content) + if not tuto_split_regex: + raise Exception("No metadata found at the top of the tutorial") -def prepare_tuto_update(kwds): - """Prepare the update of a tutorial.""" - topics_dir = "topics" - if not os.path.isdir(topics_dir): - os.makedirs(topics_dir) + metadata = yaml.load(tuto_split_regex.group("metadata")) + body = tuto_split_regex.group("body") - topic_dir = os.path.join(topics_dir, kwds['topic_name']) + return metadata, body + + +def check_topic_tuto_exist(kwds): + """Check that the topic and tutorial are already there.""" + topic_dir = os.path.join("topics", kwds['topic_name']) if not os.path.isdir(topic_dir): raise Exception("The topic %s does not exists. It should be created" % kwds['topic_name']) tuto_dir = os.path.join(topic_dir, "tutorials", kwds['tutorial_name']) if not os.path.isdir(tuto_dir): raise Exception("The tutorial %s does not exists. It should be created" % kwds['tutorial_name']) - # get metadata - metadata_path = os.path.join(topic_dir, "metadata.yaml") - metadata = load_yaml(metadata_path) - tuto_metadata = collections.OrderedDict() - for mat in metadata['material']: - if mat['name'] == kwds['tutorial_name']: - tuto_metadata = mat - return (topic_dir, tuto_dir, metadata, metadata_path, tuto_metadata) + return topic_dir, tuto_dir def fill_data_library(ctx, kwds): """Fill a data library for a tutorial.""" - topic_dir, tuto_dir, metadata, metadata_path, tuto_metadata = prepare_tuto_update(kwds) + topic_dir, tuto_dir = check_topic_tuto_exist(kwds) + metadata, body = get_tuto_info(tuto_dir) # get the zenodo link z_link = '' - if 'zenodo_link' in tuto_metadata and tuto_metadata['zenodo_link'] != '': - if kwds['zenodo']: + if 'zenodo_link' in metadata and metadata['zenodo_link'] != '': + if kwds['zenodo_link']: info("The data library and the metadata will be updated with the new Zenodo link") - z_link = kwds['zenodo'] - tuto_metadata['zenodo_link'] = z_link + z_link = kwds['zenodo_link'] + metadata['zenodo_link'] = z_link else: info("The data library will be extracted using the Zenodo link in the metadata") - z_link = tuto_metadata['zenodo_link'] - elif kwds['zenodo']: + z_link = metadata['zenodo_link'] + elif kwds['zenodo_link']: info("The data library will be created and the metadata will be filled with the new Zenodo link") - z_link = kwds['zenodo'] - tuto_metadata['zenodo_link'] = z_link + z_link = kwds['zenodo_link'] + metadata['zenodo_link'] = z_link if z_link == '' or z_link is None: raise Exception("A Zenodo link should be provided either in the metadata file or as argument of the command") + # get the topic metadata + topic_metadata_fp = os.path.join(topic_dir, "metadata.yaml") + topic_metadata = load_yaml(topic_metadata_fp) + # extract the data library from Zenodo topic_kwds = { - 'topic_title': metadata['title'], - 'topic_summary': metadata['summary'], - 'tutorial_title': tuto_metadata['title'], - 'zenodo': z_link, + 'topic_title': topic_metadata['title'], + 'topic_summary': topic_metadata['summary'], + 'tutorial_title': metadata['title'], + 'zenodo_link': z_link, 'datatypes': kwds['datatypes'] } prepare_data_library_from_zenodo(topic_kwds, tuto_dir) + # update the metadata - save_to_yaml(metadata, metadata_path) + write_hands_on_tutorial(metadata, body, tuto_dir) def generate_tuto_from_wf(ctx, kwds): """Generate the skeleton of a tutorial from a workflow.""" - topic_dir, tuto_dir, metadata, metadata_path, tuto_metadata = prepare_tuto_update(kwds) if kwds["workflow"] or kwds['workflow_id']: - kwds["zenodo"] = '' - kwds["topic_title"] = metadata['title'] + topic_dir, tuto_dir = check_topic_tuto_exist(kwds) + metadata, body = get_tuto_info(tuto_dir) info("Create tutorial skeleton from workflow") - create_tutorial_from_workflow(kwds, [], tuto_dir, ctx) + create_hands_on_tutorial_from_workflow(kwds, [], tuto_dir, ctx, metadata) add_workflow_file(kwds, tuto_dir) else: exc = "A path to a local workflow or the id of a workflow on a running Galaxy instance should be provided" raise Exception(exc) - # update the metadata - tuto_metadata['workflows'] = True - save_to_yaml(metadata, metadata_path) diff --git a/project_templates/training/README.md b/project_templates/training/README.md deleted file mode 100644 index 03a7d058c..000000000 --- a/project_templates/training/README.md +++ /dev/null @@ -1,4 +0,0 @@ -Topic name -========== - -Please refer to the [CONTRIBUTING.md](../CONTRIBUTING.md) before adding or updating any material \ No newline at end of file diff --git a/project_templates/training/docker/Dockerfile b/project_templates/training/docker/Dockerfile deleted file mode 100644 index 31f674d85..000000000 --- a/project_templates/training/docker/Dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -# Galaxy - metagenomics training material -# -# to build the docker image, go to root of training repo and -# docker build -t -f topics//docker/Dockerfile . -# -# to run image: -# docker run -p "8080:80" -t - -FROM bgruening/galaxy-stable - -MAINTAINER Galaxy Training Material - -ENV GALAXY_CONFIG_BRAND "GTN: " - -# copy the tutorials directory for your topic -ADD topics//tutorials/ /tutorials/ - -# install everything for tutorials -ADD bin/docker-install-tutorials.sh /setup-tutorials.sh -ADD bin/mergeyaml.py /mergeyaml.py -RUN /setup-tutorials.sh diff --git a/project_templates/training/images/template.graphml b/project_templates/training/images/template.graphml deleted file mode 100644 index 4164882a9..000000000 --- a/project_templates/training/images/template.graphml +++ /dev/null @@ -1,373 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - Part - - - - - - - - - - - - - - - - - Part - - - - - - - - - - - - - - - - - subPart - - - - - - - - - - - subPart - - - - - - - - - - - subPart - - - - - - - - - - - subPart - - - - - - - - - - - subPart - - - - - - - - - - - Part - - - - - - - - - - - - - - - - - subPart - - - - - - - - - - - subPart - - - - - - - - - - - subPart - - - - - - - - - - - subPart - - - - - - - - - - - subPart - - - - - - - - - - - subPart - - - - - - - - - - - subPart - - - - - - - - - - - subPart - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/project_templates/training/index.md b/project_templates/training/index.md deleted file mode 100644 index 0720e4154..000000000 --- a/project_templates/training/index.md +++ /dev/null @@ -1,4 +0,0 @@ ---- -layout: topic -topic_name: your_topic ---- diff --git a/project_templates/training/metadata.yaml b/project_templates/training/metadata.yaml deleted file mode 100644 index 49f4280c0..000000000 --- a/project_templates/training/metadata.yaml +++ /dev/null @@ -1,50 +0,0 @@ ---- -name: "Name of the topic" -type: "use" -title: "topic_name" -summary: "Description of the topic" -edam_ontology: "" -requirements: - - - title: "Galaxy introduction" - type: "internal" - link: "/introduction/" - -material: - - - title: "Introduction to the topic" - type: "introduction" - name: "introduction" - slides: "yes" - contributors: - - contributor1 - - contributor2 - - - title: "tutorial1" - type: "tutorial" - name: "tutorial1" - zenodo_link: "" - galaxy_tour: no - hands_on: yes - slides: no - workflows: no - questions: - - "" - - "" - objectives: - - "" - - "" - - "" - time_estimation: "1d/3h/6h" - key_points: - - "" - - "" - - "" - - "..." - contributors: - - contributor1 - - contributor2 - -maintainers: - - maintainer1 - - maintainer2 diff --git a/project_templates/training/slides/introduction.html b/project_templates/training/slides/introduction.html deleted file mode 100644 index a1ec16762..000000000 --- a/project_templates/training/slides/introduction.html +++ /dev/null @@ -1,53 +0,0 @@ ---- -layout: introduction_slides -topic_name: "your_topic" -logo: "GTN" ---- - -# What is the topic? - ---- - -### Topic - -Blabla - -- Blabla -- Blabla - -Blabla - -- Blabla -- Blabla - ---- - -### Second slide - -![](../images/RNA_seq_zang2016.png) - -[*Zang and Mortazavi, Nature, 2012*](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4138050/) - ---- - -### A Third slide - -Some text - -??? - -Slide notes --> Pressing **P** will toggle presenter mode. - - ---- - -# Part 2 - ---- - -### An other slide - -With text and a resized image - -.image-25[![](../images/ecker_2012.jpg)] diff --git a/project_templates/training/tutorials/tutorial1/data-library.yaml b/project_templates/training/tutorials/tutorial1/data-library.yaml deleted file mode 100644 index de1e313fd..000000000 --- a/project_templates/training/tutorials/tutorial1/data-library.yaml +++ /dev/null @@ -1,7 +0,0 @@ ---- -libraries: - # This needs to be changed to reference your Zenodo data - - name: "Small test files" - files: - - url: "http://raw.githubusercontent.com/bgruening/galaxytools/master/tools/rna_tools/sortmerna/test-data/read_small.fasta" - file_type: "fasta" diff --git a/project_templates/training/tutorials/tutorial1/data-manager.yaml b/project_templates/training/tutorials/tutorial1/data-manager.yaml deleted file mode 100644 index e9420766d..000000000 --- a/project_templates/training/tutorials/tutorial1/data-manager.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# configuration for fetch and index genomes ---- -data_managers: - # Data manager ID - - id: url to data manager on ToolShed - # tool parameters, nested parameters should be specified using a pipe (|) - params: - - 'param1': '{{ item }}' - - 'param2': 'value' - # Items refer to a list of variables you want to run this data manager. You can use them inside the param field with {{ item }} - # In case of genome for example you can run this DM with multiple genomes, or you could give multiple URLs. - items: - - item1 - - item2 - # Name of the data-tables you want to reload after your DM are finished. This can be important for subsequent data managers - data_table_reload: - - all_fasta - - __dbkeys__ diff --git a/project_templates/training/tutorials/tutorial1/slides.html b/project_templates/training/tutorials/tutorial1/slides.html deleted file mode 100644 index 42e37eb0e..000000000 --- a/project_templates/training/tutorials/tutorial1/slides.html +++ /dev/null @@ -1,14 +0,0 @@ ---- -layout: tutorial_slides -topic_name: your_topic -tutorial_name: your_tutorial_name -logo: "GTN" ---- - -# What is the topic? - ---- - -### How to fill the slide decks? - -Please follow our [tutorial to learn how to fill the slides]({{ site.baseurl }}/topics/contributing/tutorials/create-new-tutorial-slides/slides.html) diff --git a/project_templates/training/tutorials/tutorial1/tours/tour.yaml b/project_templates/training/tutorials/tutorial1/tours/tour.yaml deleted file mode 100644 index e69de29bb..000000000 diff --git a/project_templates/training/tutorials/tutorial1/tutorial.md b/project_templates/training/tutorials/tutorial1/tutorial.md deleted file mode 100644 index 8303e8afe..000000000 --- a/project_templates/training/tutorials/tutorial1/tutorial.md +++ /dev/null @@ -1,128 +0,0 @@ ---- -layout: tutorial_hands_on -topic_name: your_topic -tutorial_name: your_tutorial_name ---- - -# Introduction -{:.no_toc} - - - -General introduction about the topic and then an introduction of the tutorial (the questions and the objectives). It is nice also to have a scheme to sum up the pipeline used during the tutorial. The idea is to give to trainees insight into the content of the tutorial and the (theoretical and technical) key concepts they will learn. - -**Please follow our [tutorial to learn how to fill the Markdown]({{ site.baseurl }}/topics/contributing/tutorials/create-new-tutorial-content/tutorial.html)** - -> ### Agenda -> -> In this tutorial, we will deal with: -> -> 1. TOC -> {:toc} -> -{: .agenda} - -# Part 1 - -Introduction about this part - -## Subpart 1 - -Short introduction about this subpart. - - - -> ### {% icon hands_on %} Hands-on: Data upload -> -> 1. Step1 -> 2. Step2 -> -> > ### {% icon comment %} Comments -> > A comment -> {: .comment} -> -> > ### {% icon tip %}Tip: A tip -> > -> > * Step1 -> > * Step2 -> {: .tip} -{: .hands_on} - -## Subpart 2 - -Short introduction about this subpart. - -> ### {% icon hands_on %} Hands-on: Data upload -> -> 1. Step1 -> 2. Step2 -> -> > ### {% icon question %} Question -> > -> > Question? -> > -> > > ### {% icon solution %} Solution -> > > -> > > Answer to question -> > > -> > {: .solution} -> > -> {: .question} -{: .hands_on} - -Some blabla -> ### {% icon hands_on %} Hands-on: Data upload -> -> 1. Step1 -> 2. **My Tool** {% icon tool %} with the following parameters -> - *"param1"*: the file `myfile` -> - *"param2"*: `42` -> - *"param3"*: `Yes` -> -> 3. **My Tool** {% icon tool %} with the following parameters -> - {% icon param-text %} *"My text parameter"*: `my value` -> - {% icon param-file %} *"My input file"*: `my file` -> - {% icon param-files %} *"My multiple file input or collection"*: `my collection` -> - {% icon param-select %} *"My select menu"*: `my choice` -> - {% icon param-check %} *"My check box"*: `yes` -> -> > ### {% icon question %} Questions -> > -> > 1. Question1? -> > 2. Question2? -> > -> > > ### {% icon solution %} Solution -> > > -> > > 1. Answer for question1 -> > > 2. Answer for question2 -> > > -> > {: .solution} -> > -> {: .question} -> -> 3. Step3 -{: .hands_on} - -# Part 2 - -Short introduction about this subpart. - -> ### {% icon comment %} Comment -> -> Do you want to learn more about the principles behind mapping? Follow our [training](../../NGS-mapping) -{: .comment} - - -> ### {% icon details %} More details on the .... -> -> Add more details in Markdown. By default the box is collapsed. And is expanded when clicked -> -{: .details} - -# Conclusion -{:.no_toc} - -Conclusion about the technical key points. And then relation between the techniques and the biological question to end with a global view. diff --git a/project_templates/training/tutorials/tutorial1/workflows/empty_workflow.ga b/project_templates/training/tutorials/tutorial1/workflows/empty_workflow.ga deleted file mode 100644 index f80a4f6cd..000000000 --- a/project_templates/training/tutorials/tutorial1/workflows/empty_workflow.ga +++ /dev/null @@ -1,8 +0,0 @@ -{ - "a_galaxy_workflow": "true", - "annotation": "", - "format-version": "0.1", - "name": "empty workflow", - "steps": {}, - "uuid": "2b67537f-0546-4ffa-bc9b-51b55829f6d9" -} \ No newline at end of file diff --git a/tests/data/training_metadata_wo_zenodo.yaml b/tests/data/training_metadata_wo_zenodo.yaml deleted file mode 100644 index 80a1f7a99..000000000 --- a/tests/data/training_metadata_wo_zenodo.yaml +++ /dev/null @@ -1,42 +0,0 @@ ---- -name: test -type: use -title: Test -summary: 'Summary' -edam_ontology: '' -requirements: -- title: Galaxy introduction - type: internal - link: /introduction/ -material: -- title: Introduction to the topic - type: introduction - name: introduction - slides: 'yes' - contributors: - - contributor1 - - contributor2 -- title: Test - name: test - type: tutorial - zenodo_link: '' - hands_on: true - slides: false - workflows: true - galaxy_tour: false - questions: - - '' - - '' - objectives: - - '' - - '' - time_estimation: 1d/3h/6h - key_points: - - '' - - '' - contributors: - - contributor1 - - contributor2 -maintainers: -- maintainer1 -- maintainer2 \ No newline at end of file diff --git a/tests/data/training_tutorial.md b/tests/data/training_tutorial.md new file mode 100644 index 000000000..96553f63f --- /dev/null +++ b/tests/data/training_tutorial.md @@ -0,0 +1,35 @@ +--- +layout: tutorial_hands_on + +title: "A tutorial to test" +zenodo_link: "https://zenodo.org" +questions: + - "What is the purpose of the tutorial?" +objectives: + - "A learning objective" + - "Analysis of differentially expressed genes" + - "Identification of functional enrichment among differentially expressed genes" +time_estimation: "1H" +key_points: + - "Take home message" +contributors: + - the_best_contributor +--- + +# Introduction +{:.no_toc} + +The introduction + +> ### Agenda +> +> In this tutorial, we will deal with: +> +> 1. TOC +> {:toc} +> +{: .agenda} + +# First section + +# Second section \ No newline at end of file diff --git a/tests/data/tutorial.md b/tests/data/tutorial.md new file mode 100644 index 000000000..96553f63f --- /dev/null +++ b/tests/data/tutorial.md @@ -0,0 +1,35 @@ +--- +layout: tutorial_hands_on + +title: "A tutorial to test" +zenodo_link: "https://zenodo.org" +questions: + - "What is the purpose of the tutorial?" +objectives: + - "A learning objective" + - "Analysis of differentially expressed genes" + - "Identification of functional enrichment among differentially expressed genes" +time_estimation: "1H" +key_points: + - "Take home message" +contributors: + - the_best_contributor +--- + +# Introduction +{:.no_toc} + +The introduction + +> ### Agenda +> +> In this tutorial, we will deal with: +> +> 1. TOC +> {:toc} +> +{: .agenda} + +# First section + +# Second section \ No newline at end of file diff --git a/tests/test_training.py b/tests/test_training.py index a208e5f56..f2afedb41 100644 --- a/tests/test_training.py +++ b/tests/test_training.py @@ -14,14 +14,11 @@ ) from planemo.runnable import for_path from .test_utils import ( - PROJECT_TEMPLATES_DIR, TEST_DATA_DIR ) METADATA_FP = os.path.join(TEST_DATA_DIR, "training_metadata_w_zenodo.yaml") -TRAINING_TEMPLATE_DIR = os.path.join(PROJECT_TEMPLATES_DIR, "training") -TUTORIAL_FP = os.path.join("tutorials", "tutorial1", "tutorial.md") DATATYPE_FP = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml") ZENODO_LINK = 'https://zenodo.org/record/1321885' WF_FP = os.path.join(TEST_DATA_DIR, "training_workflow.ga") @@ -32,8 +29,14 @@ def prepare_test(): """Prepare kwds, topic_dir and tuto_dir.""" + # clean before + if os.path.exists("topics"): + shutil.rmtree("topics") + if os.path.exists("metadata"): + shutil.rmtree("metadata") + # get info topic_name = 'my_new_topic' - topic_dir = topic_name + topic_dir = os.path.join("topics", topic_name) tuto_name = "new_tuto" tuto_dir = os.path.join(topic_dir, "tutorials", tuto_name) kwds = { @@ -47,7 +50,7 @@ def prepare_test(): 'slides': True, 'workflow': None, 'workflow_id': None, - 'zenodo': None, + 'zenodo_link': None, 'datatypes': DATATYPE_FP, 'templates': None, # planemo configuation @@ -126,116 +129,41 @@ def test_save_to_yaml(): new_metadata_fp = "metadata.yaml" training.save_to_yaml(metadata, new_metadata_fp) assert os.path.exists(new_metadata_fp) - assert open(new_metadata_fp, 'r').read().find('material') != -1 + assert 'material' in open(new_metadata_fp, 'r').read() os.remove(new_metadata_fp) -def test_get_template_dir_1(): - """Test :func:`planemo.training.get_template_dir`: test exception raising.""" - kwds = {"templates": None} - exp_exception = "This script needs to be run in the training material repository" - with assert_raises_regexp(Exception, exp_exception): - training.get_template_dir(kwds) - - -def test_get_template_dir_2(): - """Test :func:`planemo.training.get_template_dir`: test default return value.""" - kwds = {"templates": None} - os.makedirs("templates") - assert training.get_template_dir(kwds) == "templates" - shutil.rmtree("templates") - - -def test_get_template_dir_3(): - """Test :func:`planemo.training.get_template_dir`: test return value.""" - template_path = "temp" - kwds = {"templates": template_path} - assert training.get_template_dir(kwds) == template_path - - -def test_update_top_metadata_file_1(): - """Test :func:`planemo.training.update_top_metadata_file`: test topic change.""" - new_index_fp = "index.md" - topic_name = 'my_new_topic' - template_index_fp = os.path.join(TRAINING_TEMPLATE_DIR, "index.md") - shutil.copyfile(template_index_fp, new_index_fp) - training.update_top_metadata_file(new_index_fp, topic_name) - assert open(new_index_fp, 'r').read().find(topic_name) != -1 - os.remove(new_index_fp) - - -def test_update_top_metadata_file_2(): - """Test :func:`planemo.training.update_top_metadata_file`: test tutorial change.""" - new_tuto_fp = "tutorial.md" - topic_name = 'my_new_topic' - tuto_name = 'my_new_tuto' - template_tuto_fp = os.path.join(TRAINING_TEMPLATE_DIR, TUTORIAL_FP) - shutil.copyfile(template_tuto_fp, new_tuto_fp) - training.update_top_metadata_file(new_tuto_fp, topic_name, tuto_name=tuto_name) - assert open(new_tuto_fp, 'r').read().find(tuto_name) != -1 - os.remove(new_tuto_fp) - - -def test_update_top_metadata_file_3(): - """Test :func:`planemo.training.update_top_metadata_file`: test tutorial change.""" - new_tuto_fp = "tutorial.md" - topic_name = 'my_new_topic' - template_tuto_fp = os.path.join(TRAINING_TEMPLATE_DIR, TUTORIAL_FP) - shutil.copyfile(template_tuto_fp, new_tuto_fp) - training.update_top_metadata_file(new_tuto_fp, topic_name, keep=False) - assert not os.path.exists(new_tuto_fp) - - def test_create_topic(): """Test :func:`planemo.training.create_topic`.""" kwds, topic_dir, tuto_dir = prepare_test() topic_name = kwds['topic_name'] - training.create_topic(kwds, topic_dir, TRAINING_TEMPLATE_DIR) - # check if files has been moved and updated with topic name + topic_title = kwds['topic_title'] + training.create_topic(kwds, topic_dir) + # check if files has been created and updated with topic name index_fp = os.path.join(topic_dir, "index.md") assert os.path.exists(index_fp) - assert open(index_fp, 'r').read().find(topic_name) != -1 - tuto_fp = os.path.join(topic_dir, TUTORIAL_FP) - assert os.path.exists(tuto_fp) - assert open(tuto_fp, 'r').read().find(topic_name) != -1 + assert topic_name in open(index_fp, 'r').read() + readme_fp = os.path.join(topic_dir, "README.md") + assert os.path.exists(readme_fp) + assert topic_title in open(readme_fp, 'r').read() # check metadata content metadata = training.load_yaml(os.path.join(topic_dir, "metadata.yaml")) assert metadata['name'] == topic_name # check in metadata directory assert os.path.exists(os.path.join("metadata", "%s.yaml" % topic_name)) + # check dockerfile + docker_folder = os.path.join(topic_dir, "docker") + dockerfile_fp = os.path.join(docker_folder, "Dockerfile") + assert os.path.exists(dockerfile_fp) + assert topic_name in open(dockerfile_fp, 'r').read() + assert topic_title in open(dockerfile_fp, 'r').read() + # check introduction slide + slides_folder = os.path.join(topic_dir, "slides") + intro_slide_fp = os.path.join(slides_folder, "introduction.html") + assert os.path.exists(intro_slide_fp) + assert topic_title in open(intro_slide_fp, 'r').read() # clean - shutil.rmtree(topic_dir) - shutil.rmtree("metadata") - - -def test_update_tutorial(): - """Test :func:`planemo.training.update_tutorial`.""" - kwds, topic_dir, tuto_dir = prepare_test() - tuto_title = kwds['tutorial_title'] - metadata_fp = os.path.join(topic_dir, "metadata.yaml") - tuto_fp = os.path.join(tuto_dir, "tutorial.md") - slides_fp = os.path.join(tuto_dir, "slides.html") - # create a topic and prepare the tutorial - training.create_topic(kwds, topic_dir, TRAINING_TEMPLATE_DIR) - template_tuto_path = os.path.join(topic_dir, "tutorials", "tutorial1") - os.rename(template_tuto_path, tuto_dir) - assert open(metadata_fp, 'r').read().find("tutorial1") != -1 - # test update a new tutorial - training.update_tutorial(kwds, tuto_dir, topic_dir) - assert open(metadata_fp, 'r').read().find("tutorial1") == -1 - assert open(metadata_fp, 'r').read().find(tuto_title) != -1 - assert os.path.exists(tuto_fp) - assert os.path.exists(slides_fp) - # test update an existing tutorial - new_tuto_title = "A totally new title" - kwds['tutorial_title'] = new_tuto_title - kwds['slides'] = False - training.update_tutorial(kwds, tuto_dir, topic_dir) - assert open(metadata_fp, 'r').read().find(tuto_title) == -1 - assert open(metadata_fp, 'r').read().find(new_tuto_title) != -1 - assert not os.path.exists(slides_fp) - # clean - shutil.rmtree(topic_dir) + shutil.rmtree("topics") shutil.rmtree("metadata") @@ -246,7 +174,7 @@ def test_get_zenodo_record(): assert z_record == "1321885" assert 'files' in req_res assert req_res['files'][0]['type'] in ['rdata', 'csv'] - assert req_res['files'][0]['links']['self'].find(file_link_prefix) != -1 + assert file_link_prefix in req_res['files'][0]['links']['self'] # check with wrong zenodo link z_record, req_res = training.get_zenodo_record('https://zenodo.org/api/records/zenodooo') assert z_record is None @@ -262,14 +190,14 @@ def test_get_zenodo_record_with_doi(): assert z_record == "1321885" assert 'files' in req_res assert req_res['files'][0]['type'] in ['rdata', 'csv'] - assert req_res['files'][0]['links']['self'].find(file_link_prefix) != -1 + assert file_link_prefix in req_res['files'][0]['links']['self'] def test_get_galaxy_datatype(): """Test :func:`planemo.training.get_galaxy_datatype`.""" assert training.get_galaxy_datatype("csv", DATATYPE_FP) == "csv" assert training.get_galaxy_datatype("test", DATATYPE_FP) == "strange_datatype" - assert training.get_galaxy_datatype("unknown", DATATYPE_FP).find("# Please add") != -1 + assert "# Please add" in training.get_galaxy_datatype("unknown", DATATYPE_FP) def test_get_files_from_zenodo(): @@ -278,12 +206,12 @@ def test_get_files_from_zenodo(): assert z_record == "1321885" # test links file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" - assert links[0].find(file_link_prefix) != -1 + assert file_link_prefix in links[0] # test files dict - assert files[0]['url'].find(file_link_prefix) != -1 + assert file_link_prefix in files[0]['url'] assert files[0]['src'] == 'url' assert files[0]['info'] == ZENODO_LINK - assert files[0]['ext'].find("# Please add") != -1 + assert "# Please add" in files[0]['ext'] assert files[1]['ext'] == 'csv' @@ -333,8 +261,8 @@ def test_prepare_data_library(): assert datalib['items'][1]['items'][0]['name'] == new_tuto_title assert datalib['items'][1]['items'][0]['items'][0]['name'] == "DOI: 10.5281/zenodo.%s" % z_record # clean - shutil.rmtree(topic_dir) - + shutil.rmtree("topics") +# def test_prepare_data_library_from_zenodo(): """Test :func:`planemo.training.prepare_data_library_from_zenodo`.""" @@ -346,13 +274,13 @@ def test_prepare_data_library_from_zenodo(): assert len(links) == 0 assert not os.path.exists(datalib_fp) # test prepare_data_library_from_zenodo with a zenodo link - kwds['zenodo'] = ZENODO_LINK + kwds['zenodo_link'] = ZENODO_LINK links = training.prepare_data_library_from_zenodo(kwds, tuto_dir) file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" - assert links[0].find(file_link_prefix) != -1 + assert file_link_prefix in links[0] assert os.path.exists(datalib_fp) # clean - shutil.rmtree(topic_dir) + shutil.rmtree("topics") def test_get_tool_input(): @@ -465,10 +393,10 @@ def test_format_inputs(): step_inputs = step['input_connections'] tool = tools[step['name']] inputlist = training.format_inputs(step_inputs, tool['input_file'], wf['steps'], 1) - assert inputlist.find('param-collection ') != -1 - assert inputlist.find('Input dataset collection') != -1 + assert 'param-collection ' in inputlist + assert 'Input dataset collection' in inputlist inputlist = training.format_inputs(step_inputs, tool['contaminants'], wf['steps'], 1) - assert inputlist.find('param-file ') != -1 + assert 'param-file ' in inputlist def test_get_wf_step_inputs(): @@ -507,8 +435,7 @@ def test_get_lower_params(): step_params = {'name': '1'} assert 'name' in training.get_lower_params(step_params, 'n1') assert training.get_lower_params(step_params, 'name') == '1' - print(training.get_lower_params('{"name": "1"}', 'n1')) - assert 'name' in training.get_lower_params('{"name": "1"}', 'name') + assert 'name' not in training.get_lower_params('{"name": "1"}', 'name') assert 'name' in training.get_lower_params('name:1', 'name') @@ -524,7 +451,7 @@ def test_format_section_param_desc(): wf, tools = get_wf_a_tools() step = wf['steps']['4'] step_inputs = training.get_wf_step_inputs(step['input_connections']) - step_params = json.loads(step['tool_state']) + step_params = training.get_lower_params(step, 'tool_state') tp_desc = tools[step['name']]['add_to_database'] section_paramlist = training.format_section_param_desc( step_params, @@ -532,9 +459,9 @@ def test_format_section_param_desc(): tp_desc, 0, wf['steps']) - assert section_paramlist.find('In *"Add tables to an existing database"*') != -1 - assert section_paramlist.find('icon param-collection') != -1 - assert section_paramlist.find('Input dataset collection') != -1 + assert 'In *"Add tables to an existing database"*' in section_paramlist + assert 'icon param-collection' in section_paramlist + assert 'Input dataset collection' in section_paramlist def test_format_conditional_param_desc(): @@ -542,40 +469,41 @@ def test_format_conditional_param_desc(): wf, tools = get_wf_a_tools() step = wf['steps']['4'] step_inputs = training.get_wf_step_inputs(step['input_connections']) - step_params = json.loads(step['tool_state']) + step_params = training.get_lower_params(step, 'tool_state') tp_desc = tools[step['name']]['query_result'] - section_paramlist = training.format_conditional_param_desc( + conditional_paramlist = training.format_conditional_param_desc( step_params, step_inputs, tp_desc, 0, wf['steps']) - print(section_paramlist) - assert section_paramlist.find('column headers') != -1 - assert section_paramlist.find('`Yes`') != -1 - assert section_paramlist.find('column_header line') != -1 + print(step_params) + print(tp_desc) + print(conditional_paramlist) + assert 'column headers' in conditional_paramlist + assert '`Yes`' in conditional_paramlist + assert 'column_header line' in conditional_paramlist def test_format_repeat_param_desc(): """Test :func:`planemo.training.format_repeat_param_desc`.""" wf, tools = get_wf_a_tools() step = wf['steps']['4'] - step_inputs = training.get_wf_step_inputs(step['input_connections']) - step_params = json.loads(step['tool_state']) tp_desc = tools[step['name']]['tables'] + step_inputs = training.get_wf_step_inputs(step['input_connections']) + step_params = training.get_lower_params(step, 'tool_state') repeat_paramlist = training.format_repeat_param_desc( step_params, step_inputs, tp_desc, 0, wf['steps']) - print(repeat_paramlist) - assert repeat_paramlist.find('Click on *"Insert Database Table"*') != -1 - assert repeat_paramlist.find('In *"1: Database Table"*') != -1 - assert repeat_paramlist.find('In *"1: Database Table"*') != -1 - assert repeat_paramlist.find('Click on *"Insert Filter Tabular Input Lines"*') != -1 - assert repeat_paramlist.find('In *"1: Filter Tabular Input Lines"*:') != -1 - assert repeat_paramlist.find('In *"2: Database Table"*:') != -1 + assert 'Click on *"Insert Database Table"*' in repeat_paramlist + assert 'In *"1: Database Table"*' in repeat_paramlist + assert 'In *"1: Database Table"*' in repeat_paramlist + assert 'Click on *"Insert Filter Tabular Input Lines"*' in repeat_paramlist + assert 'In *"1: Filter Tabular Input Lines"*:' in repeat_paramlist + assert 'In *"2: Database Table"*:' in repeat_paramlist def test_get_param_value(): @@ -606,7 +534,7 @@ def test_format_param_desc(): wf, tools = get_wf_a_tools() step = wf['steps']['4'] step_inputs = training.get_wf_step_inputs(step['input_connections']) - step_params = json.loads(step['tool_state']) + step_params = training.get_lower_params(step, 'tool_state') # test section (add_to_database) n = 'add_to_database' tp_desc = tools[step['name']][n] @@ -618,7 +546,7 @@ def test_format_param_desc(): 0, wf['steps'], force_default=False) - assert paramlist.find('In *"Add tables to an existing database"*') != -1 + assert 'In *"Add tables to an existing database"*' in paramlist # test repeat (tables) n = 'tables' tp_desc = tools[step['name']][n] @@ -630,11 +558,11 @@ def test_format_param_desc(): 0, wf['steps'], force_default=False) - assert paramlist.find('In *"1: Filter Tabular Input Lines"*:') != -1 + assert 'In *"1: Filter Tabular Input Lines"*:' in paramlist # test boolean (save_db) n = 'save_db' tp_desc = tools[step['name']][n] - step_param = training.get_lower_params(step_params, n) + step_param = 'true' paramlist = training.format_param_desc( step_param, step_inputs, @@ -654,7 +582,7 @@ def test_format_param_desc(): 0, wf['steps'], force_default=False) - assert paramlist.find('Prefix character') != -1 + assert 'Prefix character' in paramlist # no type exp_exception = "No type for the paramater name" with assert_raises_regexp(Exception, exp_exception): @@ -672,7 +600,7 @@ def test_get_param_desc(): wf, tools = get_wf_a_tools() step_3 = wf['steps']['3'] step_inputs = training.get_wf_step_inputs(step_3['input_connections']) - step_params = json.loads(step_3['tool_state']) + step_params = training.get_lower_params(step_3, 'tool_state') # not in workflow and should be there step_4 = wf['steps']['4'] tp_desc = tools[step_4['name']] @@ -720,8 +648,49 @@ def test_get_handson_box(): assert hand_boxes == '' -def test_create_tutorial_from_workflow(): - """Test :func:`planemo.training.create_tutorial_from_workflow`.""" +def test_init_tuto_metadata(): + """Test :func:`planemo.training.init_tuto_metadata`.""" + kwds, topic_dir, tuto_dir = prepare_test() + metadata = training.init_tuto_metadata(kwds) + assert metadata['title'] == kwds["tutorial_title"] + assert "contributor1" in metadata['contributors'] +# + +def test_get_tuto_body(): + """Test :func:`planemo.training.get_tuto_body`.""" + z_file_links = ['URL1', 'URL2'] + # with body + body = 'the body' + init_body = training.get_tuto_body(z_file_links, body) + assert "General introduction about the topic" in init_body + assert body in init_body + assert "> URL1" in init_body + # without body + init_body = training.get_tuto_body(z_file_links) + assert "Sub-step with **My Tool**" in init_body + assert "> 1. **My Tool** {% icon tool %} with the following parameters:" in init_body + assert '> - {% icon param-file %} *"Input file"*: File' in init_body + assert '> - *"Parameter"*: `a value`' in init_body + + +def test_write_hands_on_tutorial(): + """Test :func:`planemo.training.write_hands_on_tutorial`.""" + kwds, topic_dir, tuto_dir = prepare_test() + os.makedirs(tuto_dir) + metadata = training.init_tuto_metadata(kwds) + body = '' + training.write_hands_on_tutorial(metadata, body, tuto_dir) + + tuto_fp = os.path.join(tuto_dir, "tutorial.md") + assert os.path.exists(tuto_fp) + with open(tuto_fp, "r") as tuto_f: + tuto_content = tuto_f.read() + assert "layout: tutorial_hands_on" in tuto_content + assert kwds["tutorial_title"] in tuto_content + + +def test_create_hands_on_tutorial_from_workflow(): + """Test :func:`planemo.training.create_hands_on_tutorial_from_workflow`.""" kwds, topic_dir, tuto_dir = prepare_test() os.makedirs(tuto_dir) assert is_galaxy_engine(**kwds) @@ -731,14 +700,13 @@ def test_create_tutorial_from_workflow(): workflow_id = config.workflow_id(WF_FP) kwds['workflow_id'] = workflow_id kwds['galaxy_api_key'] = config.user_api_key - training.create_tutorial_from_workflow(kwds, '', tuto_dir, CTX) + training.create_hands_on_tutorial_from_workflow(kwds, '', tuto_dir, CTX) # tests tuto_path = os.path.join(tuto_dir, "tutorial.md") assert os.path.exists(tuto_path) with open(tuto_path, 'r') as tuto: tuto_content = tuto.read() - assert 'topic_name: my_new_topic' in tuto_content - assert 'tutorial_name: new_tuto' in tuto_content + assert kwds["tutorial_title"] in tuto_content assert '> ### Agenda' in tuto_content assert '## Get data' in tuto_content assert '{% icon tool %} with the following parameters:' in tuto_content @@ -776,152 +744,195 @@ def test_add_workflow_file(): def test_create_tutorial(): """Test :func:`planemo.training.create_tutorial`.""" kwds, topic_dir, tuto_dir = prepare_test() - kwds["templates"] = TRAINING_TEMPLATE_DIR - topic_template_dir = training.get_template_dir(kwds) - metadata_fp = os.path.join(topic_dir, 'metadata.yaml') tuto_fp = os.path.join(tuto_dir, 'tutorial.md') + slide_fp = os.path.join(tuto_dir, 'slides.html') data_library_fp = os.path.join(tuto_dir, 'data-library.yaml') + tour_folder = os.path.join(tuto_dir, "tours") + workflow_folder = os.path.join(tuto_dir, "workflows") # wo zenodo and wo workflow - training.create_topic(kwds, topic_dir, topic_template_dir) - tuto_template_dir = os.path.join(topic_template_dir, "tutorials", "tutorial1") - training.create_tutorial(kwds, tuto_dir, topic_dir, tuto_template_dir, CTX) - with open(metadata_fp, 'r') as metadata: - metadata_content = metadata.read() - assert 'name: new_tuto' in metadata_content - assert "zenodo_link: ''" in metadata_content - assert 'workflows: false' in metadata_content - assert '**My Tool** {% icon tool %}' in open(tuto_fp, 'r').read() - assert 'name: "Small test files"' in open(data_library_fp, 'r').read() - shutil.rmtree(topic_dir) + kwds["workflow"] = None + kwds["workflow_id"] = None + kwds["zenodo_link"] = None + kwds["slides"] = False + training.create_topic(kwds, topic_dir) + training.create_tutorial(kwds, tuto_dir, CTX) + assert os.path.exists(tuto_fp) + assert os.path.exists(tour_folder) + assert os.path.exists(workflow_folder) + assert not os.path.exists(data_library_fp) + with open(tuto_fp, 'r') as tuto_f: + tuto_content = tuto_f.read() + assert kwds["tutorial_title"] in tuto_content + assert "zenodo_link: ''" in tuto_content + assert '**My Tool** {% icon tool %}' in tuto_content + shutil.rmtree("topics") shutil.rmtree("metadata") # w zenodo and wo workflow - kwds["zenodo"] = ZENODO_LINK - training.create_topic(kwds, topic_dir, topic_template_dir) - tuto_template_dir = os.path.join(topic_template_dir, "tutorials", "tutorial1") - training.create_tutorial(kwds, tuto_dir, topic_dir, tuto_template_dir, CTX) - with open(metadata_fp, 'r') as metadata: - metadata_content = metadata.read() - assert 'name: new_tuto' in metadata_content - assert 'zenodo_link: %s' % ZENODO_LINK in metadata_content - assert 'workflows: false' in metadata_content - assert '**My Tool** {% icon tool %}' in open(tuto_fp, 'r').read() + kwds["workflow"] = None + kwds["workflow_id"] = None + kwds["zenodo_link"] = ZENODO_LINK + kwds["slides"] = False + training.create_topic(kwds, topic_dir) + training.create_tutorial(kwds, tuto_dir, CTX) + with open(tuto_fp, 'r') as tuto_f: + tuto_content = tuto_f.read() + assert kwds["tutorial_title"] in tuto_content + assert 'zenodo_link: %s' % ZENODO_LINK in tuto_content + assert '**My Tool** {% icon tool %}' in tuto_content + assert os.path.exists(data_library_fp) assert 'DOI: 10.5281/zenodo.1321885' in open(data_library_fp, 'r').read() - shutil.rmtree(topic_dir) + shutil.rmtree("topics") shutil.rmtree("metadata") # w zenodo and w workflow kwds["workflow"] = WF_FP - training.create_topic(kwds, topic_dir, topic_template_dir) - tuto_template_dir = os.path.join(topic_template_dir, "tutorials", "tutorial1") - training.create_tutorial(kwds, tuto_dir, topic_dir, tuto_template_dir, CTX) - with open(metadata_fp, 'r') as metadata: - metadata_content = metadata.read() - assert 'name: new_tuto' in metadata_content - assert 'zenodo_link: %s' % ZENODO_LINK in metadata_content - assert 'workflows: true' in metadata_content - assert '**FastQC** {% icon tool %} with the following parameters:' in open(tuto_fp, 'r').read() + kwds["workflow_id"] = None + kwds["zenodo_link"] = ZENODO_LINK + kwds["slides"] = False + training.create_topic(kwds, topic_dir) + training.create_tutorial(kwds, tuto_dir, CTX) + with open(tuto_fp, 'r') as tuto_f: + tuto_content = tuto_f.read() + assert kwds["tutorial_title"] in tuto_content + assert 'zenodo_link: %s' % ZENODO_LINK in tuto_content + assert '**FastQC** {% icon tool %} with the following parameters:' in tuto_content assert 'DOI: 10.5281/zenodo.1321885' in open(data_library_fp, 'r').read() assert os.path.exists(os.path.join(tuto_dir, 'workflows', 'init_workflow.ga')) - shutil.rmtree(topic_dir) + shutil.rmtree("topics") + shutil.rmtree("metadata") + # w slides + kwds["hands_on"] = False + kwds["workflow"] = None + kwds["workflow_id"] = None + kwds["zenodo_link"] = None + kwds["slides"] = True + training.create_topic(kwds, topic_dir) + training.create_tutorial(kwds, tuto_dir, CTX) + assert not os.path.exists(tuto_fp) + assert os.path.exists(slide_fp) + with open(slide_fp, 'r') as slide_f: + slide_content = slide_f.read() + assert kwds["tutorial_title"] in slide_content + assert 'layout: tutorial_slides' in slide_content + shutil.rmtree("topics") shutil.rmtree("metadata") def test_init(): """Test :func:`planemo.training.init`.""" kwds, topic_dir, tuto_dir = prepare_test() - kwds["templates"] = TRAINING_TEMPLATE_DIR - topic_dir = os.path.join('topics', topic_dir) - tuto_dir = os.path.join('topics', tuto_dir) metadata_fp = os.path.join(topic_dir, 'metadata.yaml') + tuto_fp = os.path.join(tuto_dir, "tutorial.md") tuto_name = kwds['tutorial_name'] - # new topic, no tutorial name but workflow + # new topic kwds['tutorial_name'] = None + kwds['slides'] = False + kwds['workflow'] = None + kwds['zenodo_link'] = None + kwds['workflow_id'] = None + training.init(CTX, kwds) + assert os.path.exists(metadata_fp) + assert not os.path.exists(tuto_fp) + # no new topic, no tutorial name but hands-on + kwds['slides'] = True + exp_exception = "A tutorial name is needed to create the skeleton of a tutorial slide deck" + with assert_raises_regexp(Exception, exp_exception): + training.init(CTX, kwds) + # no new topic, no tutorial name but workflow kwds['workflow'] = WF_FP + kwds['slides'] = False exp_exception = "A tutorial name is needed to create the skeleton of the tutorial from a workflow" with assert_raises_regexp(Exception, exp_exception): training.init(CTX, kwds) # no new topic, no tutorial name but zenodo kwds['workflow'] = None - kwds['zenodo'] = ZENODO_LINK + kwds['zenodo_link'] = ZENODO_LINK exp_exception = "A tutorial name is needed to add Zenodo information" with assert_raises_regexp(Exception, exp_exception): training.init(CTX, kwds) # no new topic, new tutorial kwds['tutorial_name'] = tuto_name kwds['workflow'] = None - kwds['zenodo'] = None - training.init(CTX, kwds) - assert kwds['tutorial_title'] in open(metadata_fp, 'r').read() - # no new topic, update tutorial - kwds['tutorial_title'] = 'Totally new tutorial title' + kwds['zenodo_link'] = None training.init(CTX, kwds) - assert 'Totally new tutorial title' in open(metadata_fp, 'r').read() + assert os.path.exists(tuto_fp) + assert kwds['tutorial_title'] in open(tuto_fp, 'r').read() # clean after - shutil.rmtree(topic_dir) + shutil.rmtree("topics") shutil.rmtree("metadata") -def test_prepare_tuto_update(): - """Test :func:`planemo.training.prepare_tuto_update`.""" +def test_get_tuto_info(): + """Test :func:`planemo.training.get_tuto_info`.""" + metadata, body = training.get_tuto_info(TEST_DATA_DIR) + assert "A tutorial to test" in metadata["title"] + assert "https://zenodo.org" in metadata["zenodo_link"] + assert "What is the purpose of the tutorial?" in metadata["questions"][0] + assert "A learning objective" in metadata["objectives"][0] + assert "Take home message" in metadata["key_points"][0] + assert "the_best_contributor" in metadata["contributors"][0] + assert "# First section" in body + + +def test_check_topic_tuto_exist(): + """Test :func:`planemo.training.check_topic_tuto_exist`.""" kwds, topic_dir, tuto_dir = prepare_test() - new_topic_name = 'a_topic' - topic_dir = os.path.join('topics', new_topic_name) - # non existing topic - kwds['topic_name'] = new_topic_name - exp_exception = "The topic %s does not exists. It should be created" % new_topic_name + # no topic + exp_exception = "The topic my_new_topic does not exists. It should be created" with assert_raises_regexp(Exception, exp_exception): - training.prepare_tuto_update(kwds) - # non existing tutorial - kwds["templates"] = TRAINING_TEMPLATE_DIR - topic_template_dir = training.get_template_dir(kwds) - training.create_topic(kwds, topic_dir, topic_template_dir) + training.check_topic_tuto_exist(kwds) + # no tutorial + training.create_topic(kwds, topic_dir) exp_exception = "The tutorial new_tuto does not exists. It should be created" with assert_raises_regexp(Exception, exp_exception): - training.prepare_tuto_update(kwds) + training.check_topic_tuto_exist(kwds) + # both exist + training.create_tutorial(kwds, tuto_dir, CTX) + n_topic_dir, n_tuto_dir = training.check_topic_tuto_exist(kwds) + assert n_topic_dir == topic_dir + assert n_tuto_dir == tuto_dir + # clean after + shutil.rmtree("topics") + shutil.rmtree("metadata") def test_fill_data_library(): """Test :func:`planemo.training.fill_data_library`.""" kwds, topic_dir, tuto_dir = prepare_test() - kwds["templates"] = TRAINING_TEMPLATE_DIR - topic_dir = os.path.join('topics', topic_dir) - tuto_dir = os.path.join(topic_dir, 'tutorials', 'new_tuto') training.init(CTX, kwds) - metadata_fp = os.path.join(topic_dir, 'metadata.yaml') data_library_fp = os.path.join(tuto_dir, 'data-library.yaml') + tuto_fp = os.path.join(tuto_dir, 'tutorial.md') # no Zenodo link - kwds['zenodo'] = None + kwds['zenodo_link'] = None kwds['workflow'] = None exp_exception = "A Zenodo link should be provided either in the metadata file or as argument of the command" with assert_raises_regexp(Exception, exp_exception): training.fill_data_library(CTX, kwds) # with a given Zenodo link and no Zenodo in metadata - kwds['zenodo'] = ZENODO_LINK + kwds['zenodo_link'] = ZENODO_LINK training.fill_data_library(CTX, kwds) assert 'DOI: 10.5281/zenodo.1321885' in open(data_library_fp, 'r').read() - assert 'zenodo_link: %s' % ZENODO_LINK in open(metadata_fp, 'r').read() + assert 'zenodo_link: %s' % ZENODO_LINK in open(tuto_fp, 'r').read() # with a given Zenodo link and Zenodo in metadata new_z_link = 'https://zenodo.org/record/1324204' - kwds['zenodo'] = new_z_link + kwds['zenodo_link'] = new_z_link training.fill_data_library(CTX, kwds) assert 'DOI: 10.5281/zenodo.1324204' in open(data_library_fp, 'r').read() - assert 'zenodo_link: %s' % new_z_link in open(metadata_fp, 'r').read() + assert 'zenodo_link: %s' % new_z_link in open(tuto_fp, 'r').read() # with no given Zenodo link - kwds['zenodo'] = None + kwds['zenodo_link'] = None training.fill_data_library(CTX, kwds) assert 'DOI: 10.5281/zenodo.1324204' in open(data_library_fp, 'r').read() - assert 'zenodo_link: %s' % new_z_link in open(metadata_fp, 'r').read() + assert 'zenodo_link: %s' % new_z_link in open(tuto_fp, 'r').read() # clean after - shutil.rmtree(topic_dir) + shutil.rmtree("topics") shutil.rmtree("metadata") def test_generate_tuto_from_wf(): """Test :func:`planemo.training.generate_tuto_from_wf`.""" kwds, topic_dir, tuto_dir = prepare_test() - topic_dir = os.path.join('topics', topic_dir) - tuto_dir = os.path.join(topic_dir, 'tutorials', 'new_tuto') - kwds["templates"] = TRAINING_TEMPLATE_DIR training.init(CTX, kwds) + tuto_fp = os.path.join(tuto_dir, 'tutorial.md') # no workflow kwds['workflow'] = None exp_exception = "A path to a local workflow or the id of a workflow on a running Galaxy instance should be provided" @@ -930,8 +941,8 @@ def test_generate_tuto_from_wf(): # with workflow kwds['workflow'] = WF_FP training.generate_tuto_from_wf(CTX, kwds) - tuto_fp = os.path.join(tuto_dir, 'tutorial.md') - metadata_fp = os.path.join(topic_dir, 'metadata.yaml') - assert 'workflows: true' in open(metadata_fp, 'r').read() assert '**FastQC** {% icon tool %} with the following parameters:' in open(tuto_fp, 'r').read() assert os.path.exists(os.path.join(tuto_dir, 'workflows', 'init_workflow.ga')) + # clean after + shutil.rmtree("topics") + shutil.rmtree("metadata") From 6713909fca64dd8069e11c18d62006e6024ee55d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Tue, 4 Sep 2018 17:19:18 +0200 Subject: [PATCH 24/26] Restructuration of the code with classes --- .../cmd_training_fill_data_library.py | 5 +- .../commands/cmd_training_generate_from_wf.py | 5 +- planemo/commands/cmd_training_init.py | 5 +- planemo/options.py | 7 +- planemo/training.py | 1045 ---------------- planemo/training/__init__.py | 94 ++ planemo/training/tool_input.py | 234 ++++ planemo/training/topic.py | 240 ++++ planemo/training/tutorial.py | 601 +++++++++ planemo/training/utils.py | 76 ++ setup.py | 1 + tests/data/training_metadata.yaml | 13 + tests/data/training_metadata_w_zenodo.yaml | 42 - tests/data/training_query_tabular.json | 893 ++++++++++++++ tests/data/training_tutorial.md | 2 +- tests/data/training_tutorial_wo_zenodo.md | 35 + tests/data/training_wf_param_values.json | 65 + tests/test_cmd_training_fill_data_library.py | 25 +- tests/test_cmd_training_generate_from_wf.py | 29 +- tests/test_cmd_training_init.py | 45 +- tests/test_training.py | 1069 +++-------------- tests/test_training_tool_input.py | 292 +++++ tests/test_training_topic.py | 156 +++ tests/test_training_tutorial.py | 440 +++++++ tests/test_training_utils.py | 109 ++ 25 files changed, 3471 insertions(+), 2057 deletions(-) delete mode 100644 planemo/training.py create mode 100644 planemo/training/__init__.py create mode 100644 planemo/training/tool_input.py create mode 100644 planemo/training/topic.py create mode 100644 planemo/training/tutorial.py create mode 100644 planemo/training/utils.py create mode 100644 tests/data/training_metadata.yaml delete mode 100644 tests/data/training_metadata_w_zenodo.yaml create mode 100644 tests/data/training_query_tabular.json create mode 100644 tests/data/training_tutorial_wo_zenodo.md create mode 100644 tests/data/training_wf_param_values.json create mode 100644 tests/test_training_tool_input.py create mode 100644 tests/test_training_topic.py create mode 100644 tests/test_training_tutorial.py create mode 100644 tests/test_training_utils.py diff --git a/planemo/commands/cmd_training_fill_data_library.py b/planemo/commands/cmd_training_fill_data_library.py index 77ca5d76f..d0d03db1a 100644 --- a/planemo/commands/cmd_training_fill_data_library.py +++ b/planemo/commands/cmd_training_fill_data_library.py @@ -2,8 +2,8 @@ import click from planemo import options -from planemo import training from planemo.cli import command_function +from planemo.training import Training @click.command('training_fill_data_library') @@ -13,4 +13,5 @@ def cli(ctx, uris, **kwds): """Build training template from workflow.""" kwds["no_dependency_resolution"] = True - training.fill_data_library(ctx, kwds) + training = Training(kwds) + training.fill_data_library(ctx) diff --git a/planemo/commands/cmd_training_generate_from_wf.py b/planemo/commands/cmd_training_generate_from_wf.py index ceb10b7d5..8896de3dd 100644 --- a/planemo/commands/cmd_training_generate_from_wf.py +++ b/planemo/commands/cmd_training_generate_from_wf.py @@ -3,8 +3,8 @@ import click from planemo import options -from planemo import training from planemo.cli import command_function +from planemo.training import Training @click.command('training_generate_from_wf') @@ -15,4 +15,5 @@ def cli(ctx, uris, **kwds): """Create tutorial skeleton from workflow.""" kwds["no_dependency_resolution"] = True - training.generate_tuto_from_wf(ctx, kwds) + training = Training(kwds) + training.generate_tuto_from_wf(ctx) diff --git a/planemo/commands/cmd_training_init.py b/planemo/commands/cmd_training_init.py index eec41e2bd..bb105ab0d 100644 --- a/planemo/commands/cmd_training_init.py +++ b/planemo/commands/cmd_training_init.py @@ -3,8 +3,8 @@ import click from planemo import options -from planemo import training from planemo.cli import command_function +from planemo.training import Training @click.command('training_init') @@ -16,4 +16,5 @@ def cli(ctx, uris, **kwds): """Build training template from workflow.""" kwds["no_dependency_resolution"] = True - training.init(ctx, kwds) + training = Training(kwds) + training.init_training(ctx) diff --git a/planemo/options.py b/planemo/options.py index 98fde53a3..9dbec8f64 100644 --- a/planemo/options.py +++ b/planemo/options.py @@ -1147,12 +1147,7 @@ def training_topic_option(): "--topic_target", type=click.Choice(['use', 'admin-dev', 'instructors']), default="use", - help="Target audience for the topic"), - planemo_option( - "--templates", - type=click.Path(file_okay=True, resolve_path=True), - default="templates", - help="Directory with the training templates") + help="Target audience for the topic") ) diff --git a/planemo/training.py b/planemo/training.py deleted file mode 100644 index 03660241c..000000000 --- a/planemo/training.py +++ /dev/null @@ -1,1045 +0,0 @@ -"""gtdk: Galaxy training development kit.""" - -import collections -import json -import os -import re -import shutil - -import oyaml as yaml -import requests - -from planemo import templates -from planemo.bioblend import galaxy -from planemo.engine import ( - engine_context, - is_galaxy_engine, -) -from planemo.io import info -from planemo.runnable import for_path - - -INDEX_FILE_TEMPLATE = """--- -layout: topic -topic_name: {{ topic }} ---- -""" - - -README_FILE_TEMPLATE = """ -{{ topic }} -========== - -Please refer to the [CONTRIBUTING.md](../../CONTRIBUTING.md) before adding or updating any material -""" - - -DOCKER_FILE_TEMPLATE = """ -# Galaxy - {{ topic_title }} -# -# to build the docker image, go to root of training repo and -# docker build -t {{ topic_name }} -f topics/{{ topic_name }}/docker/Dockerfile . -# -# to run image: -# docker run -p "8080:80" -t {{ topic_name }} - -FROM bgruening/galaxy-stable - -MAINTAINER Galaxy Training Material - -ENV GALAXY_CONFIG_BRAND "GTN: {{ topic_title }}" - -# prerequisites -RUN pip install ephemeris -U -ADD bin/galaxy-sleep.py /galaxy-sleep.py - -# copy the tutorials directory for your topic -ADD topics/{{ topic_name }}/tutorials/ /tutorials/ - -# install everything for tutorials -ADD bin/docker-install-tutorials.sh /setup-tutorials.sh -ADD bin/mergeyaml.py /mergeyaml.py -RUN /setup-tutorials.sh -""" - - -INTRO_SLIDES_FILE_TEMPLATE = """--- -layout: introduction_slides -logo: "GTN" - -title: {{ title }} -type: {{ type }} -contributors: -- contributor ---- - -### How to fill the slide decks? - -Please follow our -[tutorial to learn how to fill the slides]({{ '{{' }} site.baseurl {{ '}}' }}/topics/contributing/tutorials/create-new-tutorial-slides/slides.html) -""" - -TUTO_SLIDES_TEMPLATE = """--- -layout: tutorial_slides -logo: "GTN" - -{{ metadata }} ---- - -### How to fill the slide decks? - -Please follow our -[tutorial to learn how to fill the slides]({{ '{{' }} site.baseurl {{ '}}' }}/topics/contributing/tutorials/create-new-tutorial-slides/slides.html) -""" - - -TUTO_HAND_ON_TEMPLATE = """--- -layout: tutorial_hands_on - -{{ metadata }} ---- - -{{ body }} -""" - - -INPUT_FILE_TEMPLATE = """ ->{{space}}- {{ '{%' }} icon {{icon}} {{ '%}' }} *"{{input_name}}"*: {{input_value}} -""" - -INPUT_SECTION = """ ->{{space}}- In *"{{section_label}}"*: -""" - -INPUT_ADD_REPEAT = """ ->{{space}}- Click on *"Insert {{repeat_label}}"*: -""" - -INPUT_PARAM = """ ->{{space}}- *"{{param_label}}"*: `{{param_value}}` -""" - -HANDS_ON_TOOL_BOX_TEMPLATE = """ -## Sub-step with **{{tool_name}}** - -> ### {{ '{%' }} icon hands_on {{ '%}' }} Hands-on: Task description -> -> 1. **{{tool_name}}** {{ '{%' }} icon tool {{ '%}' }} with the following parameters:{{inputlist}}{{paramlist}} -> -> ***TODO***: *Check parameter descriptions* -> -> ***TODO***: *Consider adding a comment or tip box* -> -> > ### {{ '{%' }} icon comment {{ '%}' }} Comment -> > -> > A comment about the tool or something else. This box can also be in the main text -> {: .comment} -> -{: .hands_on} - -***TODO***: *Consider adding a question to test the learners understanding of the previous exercise* - -> ### {{ '{%' }} icon question {{ '%}' }} Questions -> -> 1. Question1? -> 2. Question2? -> -> > ### {{ '{%' }} icon solution {{ '%}' }} Solution -> > -> > 1. Answer for question1 -> > 2. Answer for question2 -> > -> {: .solution} -> -{: .question} - -""" - -TUTO_HAND_ON_BODY_TEMPLATE = """ -# Introduction -{:.no_toc} - - - -General introduction about the topic and then an introduction of the -tutorial (the questions and the objectives). It is nice also to have a -scheme to sum up the pipeline used during the tutorial. The idea is to -give to trainees insight into the content of the tutorial and the (theoretical -and technical) key concepts they will learn. - -**Please follow our -[tutorial to learn how to fill the Markdown]({{ '{{' }} site.baseurl {{ '}}' }}/topics/contributing/tutorials/\ -create-new-tutorial-content/tutorial.html)** - -> ### Agenda -> -> In this tutorial, we will cover: -> -> 1. TOC -> {:toc} -> -{: .agenda} - -# Title for your first section - -Give some background about what the trainees will be doing in the section. - -Below are a series of hand-on boxes, one for each tool in your workflow file. -Often you may wish to combine several boxes into one or make other adjustments such -as breaking the tutorial into sections, we encourage you to make such changes as you -see fit, this is just a starting point :) - -Anywhere you find the word "***TODO***", there is something that needs to be changed -depending on the specifics of your tutorial. - -have fun! - -## Get data - -> ### {{ '{%' }} icon hands_on {{ '%}' }} Hands-on: Data upload -> -> 1. Create a new history for this tutorial -> 2. Import the files from [Zenodo]({{ zenodo_link }}) or from the shared data library -> -> ``` -> {{ z_file_links }} -> ``` -> ***TODO***: *Add the files by the ones on Zenodo here (if not added)* -> -> ***TODO***: *Remove the useless files (if added)* -> -> {{ '{%' }} include snippets/import_via_link.md {{ '%}' }} -> {{ '{%' }} include snippets/import_from_data_library.md {{ '%}' }} -> -> 3. Rename the datasets -> 4. Check that the datatype -> -> {{ '{%' }} include snippets/change_datatype.md datatype="datatypes" {{ '%}' }} -> -> 5. Add to each database a tag corresponding to ... -> -> {{ '{%' }} include snippets/add_tag.md {{ '%}' }} -> -{: .hands_on} - -# Title of the section usually corresponding to a big step in the analysis - -It comes first a description of the step: some background and some theory. -Some image can be added there to support the theory explanation: - -![Alternative text](../../images/image_name "Legend of the image") - -The idea is to keep the theory description before quite simple to focus more on the practical part. - -***TODO***: *Consider adding a detail box to expand the theory* - -> ### {{ '{%' }} icon details {{ '%}' }} More details about the theory -> -> But to describe more details, it is possible to use the detail boxes which are expandable -> -{: .details} - -A big step can have several subsections or sub steps: - -{{ body }} - -## Re-arrange - -To create the template, each step of the workflow had its own subsection. - -***TODO***: *Re-arrange the generated subsections into sections or other subsections. -Consider merging some hands-on boxes to have a meaningful flow of the analyses* - -# Conclusion -{:.no_toc} - -Sum up the tutorial and the key takeaways here. We encourage adding an overview image of the -pipeline used. -""" - -SPACE = ' ' - - -def load_yaml(filepath): - """Load the content of a YAML file to a dictionary.""" - with open(filepath, "r") as m_file: - content = yaml.load(m_file) - return content - - -def save_to_yaml(content, filepath): - """Save a dictionary to a YAML file.""" - with open(filepath, 'w') as stream: - yaml.safe_dump(content, - stream, - indent=2, - default_flow_style=False, - default_style='', - explicit_start=True, - encoding='utf-8', - allow_unicode=True) - - -def create_topic(kwds, topic_dir): - """ - Create the skeleton of a new topic. - - 1. create the folder and its structure - 2. update the index.md to match your topic's name - 3. fill the metadata - 4. add a symbolic link to the metadata.yaml from the metadata folder - """ - # create the folder and its structure - os.makedirs(topic_dir) - img_folder = os.path.join(topic_dir, "images") - os.makedirs(img_folder) - tuto_folder = os.path.join(topic_dir, "tutorials") - os.makedirs(tuto_folder) - - # create the index.md and add the topic name - index_fp = os.path.join(topic_dir, "index.md") - with open(index_fp, 'w') as index_f: - index_f.write( - templates.render(INDEX_FILE_TEMPLATE, **{'topic': kwds["topic_name"]})) - - # create the README file - readme_fp = os.path.join(topic_dir, "README.md") - with open(readme_fp, 'w') as readme_f: - readme_f.write( - templates.render(README_FILE_TEMPLATE, **{'topic': kwds["topic_title"]})) - - # create the metadata file - metadata_fp = os.path.join(topic_dir, "metadata.yaml") - metadata = collections.OrderedDict() - metadata['name'] = kwds["topic_name"] - metadata['type'] = kwds["topic_target"] - metadata['title'] = kwds["topic_title"] - metadata['summary'] = kwds["topic_summary"] - metadata['requirements'] = [] - if metadata['type'] == 'use': - req = collections.OrderedDict() - req['title'] = "Galaxy introduction" - req['type'] = "internal" - req['link'] = "/introduction/" - metadata['requirements'].append(req) - metadata['docker_image'] = "" - metadata['maintainers'] = ["maintainer"] - if metadata['type'] == 'use': - metadata['references'] = [] - ref = collections.OrderedDict() - ref['authors'] = "authors et al" - ref['title'] = "the title" - ref['link'] = "link" - ref['summary'] = "A short explanation of why this reference is useful" - metadata['references'].append(ref) - save_to_yaml(metadata, metadata_fp) - - # add a symbolic link to the metadata.yaml - metadata_dir = "metadata" - if not os.path.isdir(metadata_dir): - os.makedirs(metadata_dir) - os.chdir(metadata_dir) - os.symlink(os.path.join("..", metadata_fp), "%s.yaml" % kwds["topic_name"]) - os.chdir("..") - - # create Dockerfile - docker_folder = os.path.join(topic_dir, "docker") - os.makedirs(docker_folder) - dockerfile_fp = os.path.join(docker_folder, "Dockerfile") - with open(dockerfile_fp, 'w') as dockerfile: - dockerfile.write( - templates.render( - DOCKER_FILE_TEMPLATE, - **{'topic_name': kwds["topic_name"], 'topic_title': kwds["topic_title"]})) - - # create empty introduction slides - slides_folder = os.path.join(topic_dir, "slides") - os.makedirs(slides_folder) - intro_slide_fp = os.path.join(slides_folder, "introduction.html") - with open(intro_slide_fp, 'w') as intro_slide_f: - intro_slide_f.write( - templates.render( - INTRO_SLIDES_FILE_TEMPLATE, - **{'title': "Introduction to %s" % kwds["topic_title"], 'type': "introduction"})) - - -def get_zenodo_record(zenodo_link): - """Get the content of a Zenodo record.""" - # get the record in the Zenodo link - if 'doi' in zenodo_link: - z_record = zenodo_link.split('.')[-1] - else: - z_record = zenodo_link.split('/')[-1] - # get JSON corresponding to the record from Zenodo API - req = "https://zenodo.org/api/records/%s" % (z_record) - r = requests.get(req) - if r: - req_res = r.json() - else: - info("The Zenodo link (%s) seems invalid" % (zenodo_link)) - req_res = {'files': []} - z_record = None - return(z_record, req_res) - - -def get_galaxy_datatype(z_ext, datatype_fp): - """Get the Galaxy datatype corresponding to a Zenodo file type.""" - g_datatype = '' - datatypes = load_yaml(datatype_fp) - if z_ext in datatypes: - g_datatype = datatypes[z_ext] - if g_datatype == '': - g_datatype = '# Please add a Galaxy datatype or update the shared/datatypes.yaml file' - info("Get Galaxy datatypes: %s --> %s" % (z_ext, g_datatype)) - return g_datatype - - -def get_files_from_zenodo(z_link, datatype_fp): - """ - Extract a list of URLs and dictionary describing the files from the JSON output of the Zenodo API. - """ - z_record, req_res = get_zenodo_record(z_link) - - links = [] - if 'files' not in req_res: - raise ValueError("No files in the Zenodo record") - - files = [] - for f in req_res['files']: - file_dict = {'url': '', 'src': 'url', 'ext': '', 'info': z_link} - if 'type' in f: - file_dict['ext'] = get_galaxy_datatype(f['type'], datatype_fp) - if 'links' not in f and 'self' not in f['links']: - raise ValueError("No link for file %s" % f) - file_dict['url'] = f['links']['self'] - links.append(f['links']['self']) - files.append(file_dict) - - return (files, links, z_record) - - -def init_data_lib(data_lib_filepath): - """Init the data library dictionary.""" - if os.path.exists(data_lib_filepath): - data_lib = load_yaml(data_lib_filepath) - else: - data_lib = collections.OrderedDict() - # set default information - data_lib.setdefault('destination', collections.OrderedDict()) - data_lib['destination']['type'] = 'library' - data_lib['destination']['name'] = 'GTN - Material' - data_lib['destination']['description'] = 'Galaxy Training Network Material' - data_lib['destination']['synopsis'] = 'Galaxy Training Network Material. See https://training.galaxyproject.org' - data_lib.setdefault('items', []) - data_lib.pop('libraries', None) - return data_lib - - -def prepare_data_library(files, kwds, z_record, tuto_dir): - """Fill or update the data library file.""" - data_lib_filepath = os.path.join(tuto_dir, "data-library.yaml") - data_lib = init_data_lib(data_lib_filepath) - # get topic or create new one - topic = collections.OrderedDict() - for item in data_lib['items']: - if item['name'] == kwds['topic_title']: - topic = item - if not topic: - data_lib['items'].append(topic) - topic['name'] = kwds['topic_title'] - topic['description'] = kwds['topic_summary'] - topic['items'] = [] - # get tutorial or create new one - tuto = collections.OrderedDict() - for item in topic['items']: - if item['name'] == kwds['tutorial_title']: - tuto = item - if not tuto: - topic['items'].append(tuto) - tuto['name'] = kwds['tutorial_title'] - tuto['items'] = [] - # get current data library and/or previous data library for the tutorial - # remove the latest tag of any existing library - # remove the any other existing library - if z_record: - current_data_lib = collections.OrderedDict() - previous_data_lib = collections.OrderedDict() - for item in tuto['items']: - if item['name'] == "DOI: 10.5281/zenodo.%s" % z_record: - current_data_lib = item - elif item['description'] == 'latest': - previous_data_lib = item - previous_data_lib['description'] = '' - if not current_data_lib: - current_data_lib['name'] = "DOI: 10.5281/zenodo.%s" % z_record - current_data_lib['description'] = 'latest' - current_data_lib['items'] = [] - current_data_lib['items'] = files - - tuto['items'] = [current_data_lib] - if previous_data_lib: - tuto['items'].append(previous_data_lib) - - save_to_yaml(data_lib, data_lib_filepath) - - -def prepare_data_library_from_zenodo(kwds, tuto_dir): - """Get the list of URLs of the files on Zenodo and fill the data library file.""" - links = [] - if not kwds['zenodo_link']: - return links - files, links, z_record = get_files_from_zenodo(kwds['zenodo_link'], kwds['datatypes']) - prepare_data_library(files, kwds, z_record, tuto_dir) - return links - - -def get_tool_input(tool_desc): - """ - Get a dictionary with the tool descriptions. - - The labels are the tool parameter name and the value the description - of the parameter extracted from the show_tool function of bioblend - """ - tool_inp = collections.OrderedDict() - for inp in tool_desc["inputs"]: - tool_inp.setdefault(inp['name'], inp) - return tool_inp - - -def get_wf_tool_description(wf, gi): - """Get a dictionary with description of inputs of all tools in a workflow.""" - tools = {} - for s in wf['steps']: - step = wf['steps'][s] - if not step['input_connections']: - continue - try: - tool_desc = gi.tools.show_tool(step['tool_id'], io_details=True) - except Exception: - tool_desc = {'inputs': []} - tools.setdefault(step['name'], get_tool_input(tool_desc)) - return tools - - -def get_wf_tool_from_local_galaxy(kwds, wf_filepath, ctx): - """Server local Galaxy and get the workflow dictionary.""" - assert is_galaxy_engine(**kwds) - runnable = for_path(wf_filepath) - with engine_context(ctx, **kwds) as galaxy_engine: - with galaxy_engine.ensure_runnables_served([runnable]) as config: - workflow_id = config.workflow_id(wf_filepath) - wf = config.gi.workflows.export_workflow_dict(workflow_id) - tools = get_wf_tool_description(wf, config.gi) - return wf, tools - - -def get_wf_tools_from_running_galaxy(kwds): - """Get the workflow dictionary from a running Galaxy instance with the workflow installed on it.""" - gi = galaxy.GalaxyInstance(kwds['galaxy_url'], key=kwds['galaxy_api_key']) - wf = gi.workflows.export_workflow_dict(kwds['workflow_id']) - tools = get_wf_tool_description(wf, gi) - return wf, tools - - -def get_input_tool_name(step_id, steps): - """Get the string with the name of the tool that generated an input.""" - inp_provenance = '' - inp_prov_id = str(step_id) - if inp_prov_id in steps: - name = steps[inp_prov_id]['name'] - if 'Input dataset' in name: - inp_provenance = "(%s)" % name - else: - inp_provenance = "(output of **%s** {%% icon tool %%})" % name - return inp_provenance - - -def format_inputs(step_inputs, tp_desc, wf_steps, level): - """Format the inputs of a step.""" - inputlist = '' - for inp_n, inp in step_inputs.items(): - if inp_n != tp_desc['name']: - continue - inps = [] - if isinstance(inp, list): - # multiple input (not collection) - icon = 'param-files' - for i in inp: - inps.append('`%s` %s' % ( - i['output_name'], - get_input_tool_name(i['id'], wf_steps))) - else: - # sinle input or collection - inp_type = wf_steps[str(inp['id'])]['type'] - if 'collection' in inp_type: - icon = 'param-collection' - else: - icon = 'param-file' - inps = ['`%s` %s' % ( - inp['output_name'], - get_input_tool_name(inp['id'], wf_steps))] - inputlist += templates.render(INPUT_FILE_TEMPLATE, **{ - "icon": icon, - "input_name": tp_desc['label'], - "input_value": ', '.join(inps), - "space": SPACE * level - }) - return inputlist - - -def get_wf_step_inputs(step_inp): - """Get the inputs from a workflow step and format them.""" - step_inputs = {} - for inp_n, inp in step_inp.items(): - if '|' in inp_n: - repeat_regex = '(?P[^\|]*)_(?P\d+)\|(?P.+).+' - repeat_search = re.search(repeat_regex, inp_n) - hier_regex = '(?P[^\|]*)\|(?P.+)' - hier_regex = re.search(hier_regex, inp_n) - if repeat_search and repeat_search.start(0) <= hier_regex.start(0): - step_inputs.setdefault(repeat_search.group('prefix'), {}) - step_inputs[repeat_search.group('prefix')].setdefault( - repeat_search.group('nb'), - get_wf_step_inputs({hier_regex.group('suffix'): inp})) - else: - step_inputs.setdefault(hier_regex.group('prefix'), {}) - step_inputs[hier_regex.group('prefix')].update( - get_wf_step_inputs({hier_regex.group('suffix'): inp})) - else: - step_inputs.setdefault(inp_n, inp) - return step_inputs - - -def json_load(string): - """Transform a string into a dictionary.""" - if string is not None and ":" in string and '{' in string: - return json.loads(string) - else: - return string - - -def get_lower_params(step_params, name): - """Get the parameters from workflow that are below name in the hierarchy.""" - params = json_load(step_params) - if isinstance(params, dict) and name in params: - params = json_load(params[name]) - return params - - -def get_lower_inputs(step_inputs, name): - """Get the inputs from workflow that are below name in the hierarchy.""" - inputs = {} - if isinstance(step_inputs, dict) and name in step_inputs: - inputs = step_inputs[name] - else: - inputs = step_inputs - return inputs - - -def format_section_param_desc(step_params, step_inputs, tp_desc, level, wf_steps): - """Format the description (label and value) for parameters in a section.""" - section_paramlist = '' - # get sub params and inputs - params = get_lower_params(step_params, tp_desc['name']) - inputs = get_lower_inputs(step_inputs, tp_desc['name']) - # get description of parameters in lower hierarchy - sub_param_desc = get_param_desc(params, inputs, get_tool_input(tp_desc), level+1, wf_steps) - if sub_param_desc != '': - section_paramlist += templates.render(INPUT_SECTION, **{ - 'space': SPACE * level, - 'section_label': tp_desc['title']}) - section_paramlist += sub_param_desc - return section_paramlist - - -def format_conditional_param_desc(step_params, step_inputs, tp_desc, level, wf_steps): - """Format the description (label and value) for parameters in a conditional.""" - conditional_paramlist = '' - # Get conditional parameter - test_param = tp_desc['test_param'] - params = get_lower_params(step_params, tp_desc['name']) - inputs = get_lower_inputs(step_inputs, tp_desc['name']) - cond_param = get_lower_params(params, test_param['name']) - print("-") - print(cond_param) - print("-") - print(test_param) - print("-") - conditional_paramlist += format_param_desc( - cond_param, - step_inputs, - test_param, - level, - wf_steps, - force_default=True) - # Get parameters in the when - for case in tp_desc['cases']: - if case['value'] == cond_param: - if len(case['inputs']) > 0: - conditional_paramlist += get_param_desc( - params, - inputs, - get_tool_input(case), - level+1, - wf_steps) - return conditional_paramlist - - -def format_repeat_param_desc(step_params, step_inputs, tp_desc, level, wf_steps): - """Format the description (label and value) for parameters in a repeat.""" - repeat_inp_desc = get_tool_input(tp_desc) - params = get_lower_params(step_params, tp_desc['name']) - inputs = get_lower_inputs(step_inputs, tp_desc['name']) - repeat_paramlist = '' - for r in range(len(params)): - r_inputs = get_lower_inputs(inputs, str(r)) - r_params = params[r] - paramlist_in_repeat = get_param_desc(r_params, r_inputs, repeat_inp_desc, level+2, wf_steps) - if paramlist_in_repeat != '': - # add first click - repeat_paramlist += templates.render(INPUT_ADD_REPEAT, **{ - 'space': SPACE * (level+1), - 'repeat_label': tp_desc['title']}) - # add description of parameters in the repeat - repeat_paramlist += templates.render(INPUT_SECTION, **{ - 'space': SPACE * (level+1), - 'section_label': "%s: %s" % (r+1, tp_desc['title'])}) - repeat_paramlist += paramlist_in_repeat - if repeat_paramlist != '': - repeat_paramlist = templates.render(INPUT_SECTION, **{ - 'space': SPACE * level, - 'section_label': tp_desc['title']}) + repeat_paramlist - return repeat_paramlist - - -def get_param_value(step_params, tp_desc, force_default=False): - """Get value of a 'simple' parameter if different from the default value, None otherwise.""" - param_value = '' - if isinstance(step_params, str) and '"' in step_params: - step_params = step_params.replace('"', '') - if tp_desc['value'] == step_params and not force_default: - param_value = None - elif tp_desc['type'] == 'boolean': - if bool(tp_desc['value']) == step_params: - param_value = None - else: - param_value = 'Yes' if step_params else 'No' - elif tp_desc['type'] == 'select': - param_value = '' - for opt in tp_desc['options']: - if opt[1] == step_params: - param_value = opt[0] - elif tp_desc['type'] == 'data_column': - param_value = "c%s" % step_params - else: - param_value = step_params - return param_value - - -def format_param_desc(step_params, step_inputs, tp_desc, level, wf_steps, force_default=False): - """Format the parameter description (label and value) given the type of parameter.""" - paramlist = '' - if 'type' not in tp_desc: - raise ValueError("No type for the paramater %s" % tp_desc['name']) - if tp_desc['type'] == 'data' or tp_desc['type'] == 'data_collection': - paramlist += format_inputs(step_inputs, tp_desc, wf_steps, level) - elif tp_desc['type'] == 'section': - paramlist += format_section_param_desc(step_params, step_inputs, tp_desc, level, wf_steps) - elif tp_desc['type'] == 'conditional': - paramlist += format_conditional_param_desc(step_params, step_inputs, tp_desc, level, wf_steps) - elif tp_desc['type'] == 'repeat': - paramlist += format_repeat_param_desc(step_params, step_inputs, tp_desc, level, wf_steps) - else: - param_value = get_param_value(step_params, tp_desc, force_default) - if param_value is not None: - paramlist += templates.render(INPUT_PARAM, **{ - 'space': SPACE * level, - 'param_label': tp_desc['label'], - 'param_value': param_value}) - return paramlist - - -def get_param_desc(step_params, step_inputs, tp_desc, level, wf_steps, should_be_there=False): - """Parse the parameters of the tool and return a formatted list with the values set in the workflow.""" - paramlist = '' - for n, tp_d in tp_desc.items(): - if n not in step_params: - if not should_be_there: - info("%s not in workflow" % n) - else: - raise ValueError("%s not in workflow" % n) - else: - step_param = get_lower_params(step_params, n) - if step_param is None: - continue - paramlist += format_param_desc(step_param, step_inputs, tp_d, level, wf_steps) - return paramlist - - -def get_handson_box(step, steps, tools): - """Get the string for an hands-on box based on a step in a workflow.""" - # get input (if none: input step) - step_inputs = get_wf_step_inputs(step['input_connections']) - if not step_inputs: - return '' - # get params - step_params = json.loads(step['tool_state']) - # get tool - tool_name = step['name'] - tp_desc = tools[tool_name] - # get formatted param description - paramlist = get_param_desc(step_params, step_inputs, tp_desc, 1, steps, should_be_there=True) - context = {"tool_name": tool_name, "paramlist": paramlist} - return templates.render(HANDS_ON_TOOL_BOX_TEMPLATE, **context) - - -def init_tuto_metadata(kwds): - """Init tutorial metadata""" - metadata = collections.OrderedDict() - metadata['title'] = kwds["tutorial_title"] - metadata['zenodo_link'] = kwds["zenodo_link"] if kwds["zenodo_link"] else '' - metadata['questions'] = [ - "Which biological questions are addressed by the tutorial?", - "Which bioinformatics techniques is important to know for this type of data?"] - metadata['objectives'] = [ - "The learning objectives are the goals of the tutorial", - "They will be informed by your audience and will communicate to them and to yourself what you should focus on during the course", - "They are single sentence describing what a learner will be able to do once they have done the tutorial", - "You can use the Bloom's Taxonomy to write effective learning objectives"] - metadata['time'] = "3H" - metadata['key_points'] = [ - "The take-home messages", - "They will appear at the end of the tutorial"] - metadata['contributors'] = ["contributor1", "contributor2"] - return metadata - - -def format_tuto_metadata(metadata): - """Return the string corresponding to the tutorial metadata""" - return yaml.safe_dump(metadata, - indent=2, - default_flow_style=False, - default_style='', - explicit_start=False) - - -def write_hands_on_tutorial(metadata, body, tuto_dir): - """Write the tutorial hands-on""" - m_str = format_tuto_metadata(metadata) - template = templates.render(TUTO_HAND_ON_TEMPLATE, **{ - "metadata": m_str, - "body": body - }) - - md_path = os.path.join(tuto_dir, "tutorial.md") - with open(md_path, 'w') as md: - md.write(template) - - -def get_tuto_body(z_file_links, body = None): - """Get the body for a tutorial""" - if body is None: - body = templates.render(HANDS_ON_TOOL_BOX_TEMPLATE, **{ - 'tool_name': "My Tool", - 'inputlist': templates.render(INPUT_FILE_TEMPLATE, **{ - 'space': 1*SPACE, - 'icon': 'param-file', - 'input_name': 'Input file', - 'input_value': 'File' - }), - 'paramlist': templates.render(INPUT_PARAM, **{ - 'space': 1*SPACE, - 'param_label': 'Parameter', - 'param_value': 'a value' - }) - }) - return templates.render(TUTO_HAND_ON_BODY_TEMPLATE, **{ - "z_file_links": "\n> ".join(z_file_links), - "body": body}) - - -def create_hands_on_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx, metadata=None): - """Create tutorial structure from the workflow file.""" - # load workflow - if kwds['workflow_id']: - if not kwds['galaxy_url']: - raise ValueError("No Galaxy URL given") - if not kwds['galaxy_api_key']: - raise ValueError("No API key to access Galaxy given") - wf, tools = get_wf_tools_from_running_galaxy(kwds) - else: - wf, tools = get_wf_tool_from_local_galaxy(kwds, kwds["workflow"], ctx) - - # get hands-on body from the workflow - body = '' - for step_id in range(len(wf['steps'].keys())): - step = wf['steps'][str(step_id)] - if not step['tool_state']: - continue - body += get_handson_box(step, wf['steps'], tools) - body = get_tuto_body(z_file_links, body) - - # write in the tutorial file with the metadata on the top - if not metadata: - metadata = init_tuto_metadata(kwds) - write_hands_on_tutorial(metadata, body, tuto_dir) - - -def add_workflow_file(kwds, tuto_dir): - """Copy or extract workflow file and add it to the tutorial directory""" - wf_dir = os.path.join(tuto_dir, "workflows") - # copy / extract workflow - wf_filepath = os.path.join(wf_dir, "init_workflow.ga") - if kwds["workflow"]: - shutil.copy(kwds["workflow"], wf_filepath) - else: - gi = galaxy.GalaxyInstance(kwds['galaxy_url'], key=kwds['galaxy_api_key']) - gi.workflows.export_workflow_to_local_path(kwds['workflow_id'], - wf_filepath, - use_default_filename=False) - # remove empty workflow file if there - empty_wf_filepath = os.path.join(wf_dir, "empty_workflow.ga") - if os.path.exists(empty_wf_filepath): - os.remove(empty_wf_filepath) - - -def create_tutorial(kwds, tuto_dir, ctx): - """Create the skeleton of a new tutorial.""" - # create tuto folder and empty files - os.makedirs(tuto_dir) - tour_folder = os.path.join(tuto_dir, "tours") - os.makedirs(tour_folder) - workflow_folder = os.path.join(tuto_dir, "workflows") - os.makedirs(workflow_folder) - - metadata = init_tuto_metadata(kwds) - - # extract the data library from Zenodo and the links for the tutorial - z_file_links = '' - if kwds["zenodo_link"]: - info("Create the data library from Zenodo") - z_file_links = prepare_data_library_from_zenodo(kwds, tuto_dir) - - # create tutorial skeleton from workflow and copy workflow file - if kwds["hands_on"]: - if kwds["workflow"] or kwds['workflow_id']: - info("Create tutorial skeleton from workflow") - create_hands_on_tutorial_from_workflow(kwds, z_file_links, tuto_dir, ctx) - add_workflow_file(kwds, tuto_dir) - else: - body = get_tuto_body(z_file_links) - print(body) - write_hands_on_tutorial(metadata, body, tuto_dir) - - # create slide skeleton - if kwds["slides"]: - slide_path = os.path.join(tuto_dir, 'slides.html') - m_str = format_tuto_metadata(metadata) - with open(slide_path, 'w') as slide_f: - slide_f.write( - templates.render(TUTO_SLIDES_TEMPLATE, **{"metadata": m_str})) - - -def init(ctx, kwds): - """Create/update a topic/tutorial""" - topic_dir = os.path.join("topics", kwds['topic_name']) - if not os.path.isdir(topic_dir): - info("The topic %s does not exist. It will be created" % kwds['topic_name']) - create_topic(kwds, topic_dir) - - if not kwds['tutorial_name']: - if kwds["slides"]: - raise Exception("A tutorial name is needed to create the skeleton of a tutorial slide deck") - if kwds['workflow'] or kwds['workflow_id']: - raise Exception("A tutorial name is needed to create the skeleton of the tutorial from a workflow") - if kwds['zenodo_link']: - raise Exception("A tutorial name is needed to add Zenodo information") - else: - tuto_dir = os.path.join(topic_dir, "tutorials", kwds['tutorial_name']) - if not os.path.isdir(tuto_dir): - info("The tutorial %s in topic %s does not exist. It will be created." % (kwds['tutorial_name'], kwds['topic_name'])) - create_tutorial(kwds, tuto_dir, ctx) - - -def get_tuto_info(tuto_dir): - """Extract the metadata front matter on the top of the tutorial file and its body""" - tuto_fp = os.path.join(tuto_dir, "tutorial.md") - with open(tuto_fp, "r") as tuto_f: - tuto_content = tuto_f.read() - - regex = '^---\n(?P[\s\S]*)\n---(?P[\s\S]*)' - tuto_split_regex = re.search(regex, tuto_content) - if not tuto_split_regex: - raise Exception("No metadata found at the top of the tutorial") - - metadata = yaml.load(tuto_split_regex.group("metadata")) - body = tuto_split_regex.group("body") - - return metadata, body - - -def check_topic_tuto_exist(kwds): - """Check that the topic and tutorial are already there.""" - topic_dir = os.path.join("topics", kwds['topic_name']) - if not os.path.isdir(topic_dir): - raise Exception("The topic %s does not exists. It should be created" % kwds['topic_name']) - - tuto_dir = os.path.join(topic_dir, "tutorials", kwds['tutorial_name']) - if not os.path.isdir(tuto_dir): - raise Exception("The tutorial %s does not exists. It should be created" % kwds['tutorial_name']) - - return topic_dir, tuto_dir - - -def fill_data_library(ctx, kwds): - """Fill a data library for a tutorial.""" - topic_dir, tuto_dir = check_topic_tuto_exist(kwds) - metadata, body = get_tuto_info(tuto_dir) - - # get the zenodo link - z_link = '' - if 'zenodo_link' in metadata and metadata['zenodo_link'] != '': - if kwds['zenodo_link']: - info("The data library and the metadata will be updated with the new Zenodo link") - z_link = kwds['zenodo_link'] - metadata['zenodo_link'] = z_link - else: - info("The data library will be extracted using the Zenodo link in the metadata") - z_link = metadata['zenodo_link'] - elif kwds['zenodo_link']: - info("The data library will be created and the metadata will be filled with the new Zenodo link") - z_link = kwds['zenodo_link'] - metadata['zenodo_link'] = z_link - - if z_link == '' or z_link is None: - raise Exception("A Zenodo link should be provided either in the metadata file or as argument of the command") - - # get the topic metadata - topic_metadata_fp = os.path.join(topic_dir, "metadata.yaml") - topic_metadata = load_yaml(topic_metadata_fp) - - # extract the data library from Zenodo - topic_kwds = { - 'topic_title': topic_metadata['title'], - 'topic_summary': topic_metadata['summary'], - 'tutorial_title': metadata['title'], - 'zenodo_link': z_link, - 'datatypes': kwds['datatypes'] - } - prepare_data_library_from_zenodo(topic_kwds, tuto_dir) - - # update the metadata - write_hands_on_tutorial(metadata, body, tuto_dir) - - -def generate_tuto_from_wf(ctx, kwds): - """Generate the skeleton of a tutorial from a workflow.""" - if kwds["workflow"] or kwds['workflow_id']: - topic_dir, tuto_dir = check_topic_tuto_exist(kwds) - metadata, body = get_tuto_info(tuto_dir) - info("Create tutorial skeleton from workflow") - create_hands_on_tutorial_from_workflow(kwds, [], tuto_dir, ctx, metadata) - add_workflow_file(kwds, tuto_dir) - else: - exc = "A path to a local workflow or the id of a workflow on a running Galaxy instance should be provided" - raise Exception(exc) diff --git a/planemo/training/__init__.py b/planemo/training/__init__.py new file mode 100644 index 000000000..971734a74 --- /dev/null +++ b/planemo/training/__init__.py @@ -0,0 +1,94 @@ +"""Module contains code for gtdk: Galaxy training development kit.""" + +from planemo.io import info +from .topic import Topic +from .tutorial import Tutorial + + +class Training: + """Class to describe a training.""" + + def __init__(self, kwds): + """Init an instance of Training.""" + self.kwds = kwds + self.topics_dir = "topics" + self.topic = Topic(parent_dir=self.topics_dir, name=kwds['topic_name']) + self.galaxy_url = kwds['galaxy_url'] if 'galaxy_url' in kwds else '' + self.galaxy_api_key = kwds['galaxy_api_key'] if 'galaxy_api_key' in kwds else '' + self.tuto = None + + def init_training(self, ctx): + """Create/update a topic/tutorial.""" + if not self.topic.exists(): + info("The topic %s does not exist. It will be created" % self.topic.name) + self.topic.init_from_kwds(self.kwds) + self.topic.create_topic_structure() + + if not self.kwds['tutorial_name']: + if self.kwds["slides"]: + raise Exception("A tutorial name is needed to create the skeleton of a tutorial slide deck") + if self.kwds['workflow'] or self.kwds['workflow_id']: + raise Exception("A tutorial name is needed to create the skeleton of the tutorial from a workflow") + if self.kwds['zenodo_link']: + raise Exception("A tutorial name is needed to add Zenodo information") + else: + self.tuto = Tutorial(training=self, topic=self.topic) + self.tuto.init_from_kwds(self.kwds) + if not self.tuto.exists(): + info("The tutorial %s in topic %s does not exist. It will be created." % (self.tuto.name, self.topic.name)) + self.tuto.create_tutorial(ctx) + + def check_topic_init_tuto(self): + """Check that the topic and tutorial are already there and retrieve them.""" + # check topic + if not self.topic.exists(): + raise Exception("The topic %s does not exists. It should be created" % self.topic.name) + self.topic.init_from_metadata() + # initiate the tutorial + self.tuto = Tutorial(training=self, topic=self.topic) + self.tuto.init_from_existing_tutorial(self.kwds['tutorial_name']) + if 'datatypes' in self.kwds: + self.tuto.datatype_fp = self.kwds['datatypes'] + if 'workflow' in self.kwds: + self.tuto.init_wf_fp = self.kwds['workflow'] + if 'workflow_id' in self.kwds: + self.tuto.init_wf_id = self.kwds['workflow_id'] + + def fill_data_library(self, ctx): + """Fill a data library for a tutorial.""" + self.check_topic_init_tuto() + # get the zenodo link + z_link = '' + if self.tuto.zenodo_link != '': + if self.kwds['zenodo_link']: + info("The data library and the metadata will be updated with the new Zenodo link") + z_link = self.kwds['zenodo_link'] + self.tuto.zenodo_link = z_link + else: + info("The data library will be extracted using the Zenodo link in the metadata of the tutorial") + z_link = self.tuto.zenodo_link + elif self.kwds['zenodo_link']: + info("The data library will be created and the metadata will be filled with the new Zenodo link") + z_link = self.kwds['zenodo_link'] + self.tuto.zenodo_link = z_link + + if z_link == '' or z_link is None: + raise Exception("A Zenodo link should be provided either in the metadata file or as argument of the command") + + # extract the data library from Zenodo + self.tuto.prepare_data_library_from_zenodo() + + # update the metadata + self.tuto.write_hands_on_tutorial() + + def generate_tuto_from_wf(self, ctx): + """Generate the skeleton of a tutorial from a workflow.""" + self.check_topic_init_tuto() + if self.tuto.has_workflow(): + info("Create tutorial skeleton from workflow") + self.tuto.create_hands_on_tutorial(ctx) + self.tuto.export_workflow_file() + else: + raise Exception( + "A path to a local workflow or the id of a workflow on a running Galaxy instance should be provided" + ) diff --git a/planemo/training/tool_input.py b/planemo/training/tool_input.py new file mode 100644 index 000000000..2c9475cc7 --- /dev/null +++ b/planemo/training/tool_input.py @@ -0,0 +1,234 @@ +"""Module contains code for the ToolInput class, dealing with the description of tool in workflow and XML.""" + +from planemo import templates +from planemo.io import info + + +INPUT_PARAM = """ +>{{space}}- *"{{param_label}}"*: `{{param_value}}` +""" + +INPUT_FILE_TEMPLATE = """ +>{{space}}- {{ '{%' }} icon {{icon}} {{ '%}' }} *"{{input_name}}"*: {{input_value}} +""" + +INPUT_SECTION = """ +>{{space}}- In *"{{section_label}}"*: +""" + +INPUT_ADD_REPEAT = """ +>{{space}}- Click on *"Insert {{repeat_label}}"*: +""" + +SPACE = ' ' + + +class ToolInput(): + """Class to describe a tool input / parameter and its value from a workflow.""" + + def __init__(self, tool_inp_desc, wf_param_values, wf_steps, level, should_be_there=False, force_default=False): + """Init an instance of ToolInput.""" + self.name = tool_inp_desc['name'] + if 'type' not in tool_inp_desc: + raise ValueError("No type for the parameter %s" % tool_inp_desc['name']) + self.type = tool_inp_desc['type'] + self.tool_inp_desc = tool_inp_desc + self.level = level + self.wf_param_values = wf_param_values + self.wf_steps = wf_steps + self.formatted_desc = '' + self.force_default = force_default + + if self.name not in self.wf_param_values: + if not should_be_there: + info("%s not in workflow" % self.name) + else: + raise ValueError("%s not in workflow" % self.name) + else: + self.wf_param_values = self.wf_param_values[self.name] + + def get_formatted_inputs(self): + """Format the inputs of a step.""" + inputlist = '' + inps = [] + if isinstance(self.wf_param_values, list): + # multiple input (not collection) + icon = 'param-files' + for i in self.wf_param_values: + inps.append('`%s` %s' % ( + i['output_name'], + get_input_tool_name(i['id'], self.wf_steps))) + else: + inp = self.wf_param_values + if 'id' in inp: + # sinle input or collection + inp_type = self.wf_steps[str(inp['id'])]['type'] + if 'collection' in inp_type: + icon = 'param-collection' + else: + icon = 'param-file' + inps = ['`%s` %s' % ( + inp['output_name'], + get_input_tool_name(inp['id'], self.wf_steps))] + if len(inps) > 0: + inputlist += templates.render(INPUT_FILE_TEMPLATE, **{ + "icon": icon, + "input_name": self.tool_inp_desc['label'], + "input_value": ', '.join(inps), + "space": SPACE * self.level + }) + return inputlist + + def get_lower_param_desc(self): + """Get the formatted description of the paramaters in the 'inputs' of the tool description.""" + sub_param_desc = '' + for inp in self.tool_inp_desc["inputs"]: + tool_inp = ToolInput( + inp, + self.wf_param_values, + self.wf_steps, + self.level + 1) + sub_param_desc += tool_inp.get_formatted_desc() + return sub_param_desc + + def get_formatted_section_desc(self): + """Format the description (label and value) for parameters in a section.""" + section_paramlist = '' + sub_param_desc = self.get_lower_param_desc() + if sub_param_desc != '': + section_paramlist += templates.render(INPUT_SECTION, **{ + 'space': SPACE * self.level, + 'section_label': self.tool_inp_desc['title']}) + section_paramlist += sub_param_desc + return section_paramlist + + def get_formatted_conditional_desc(self): + """Format the description (label and value) for parameters in a conditional.""" + conditional_paramlist = '' + # Get conditional parameter + inp = ToolInput( + self.tool_inp_desc['test_param'], + self.wf_param_values, + self.wf_steps, + self.level, + should_be_there=True, + force_default=True) + conditional_paramlist = inp.get_formatted_desc() + cond_param = inp.wf_param_values + + # Get parameters in the when and their values + tmp_tool_inp_desc = self.tool_inp_desc + for case in tmp_tool_inp_desc['cases']: + if case['value'] == cond_param and len(case['inputs']) > 0: + self.tool_inp_desc = case + conditional_paramlist += self.get_lower_param_desc() + self.tool_inp_desc = tmp_tool_inp_desc + return conditional_paramlist + + def get_formatted_repeat_desc(self): + """Format the description (label and value) for parameters in a repeat.""" + tool_inp = {} + for inp in self.tool_inp_desc["inputs"]: + tool_inp.setdefault(inp['name'], inp) + repeat_paramlist = '' + tmp_wf_param_values = self.wf_param_values + cur_level = self.level + for ind, param in enumerate(tmp_wf_param_values): + self.wf_param_values = param + self.level = cur_level + 1 + paramlist_in_repeat = self.get_lower_param_desc() + if paramlist_in_repeat != '': + # add first click + repeat_paramlist += templates.render(INPUT_ADD_REPEAT, **{ + 'space': SPACE * (self.level), + 'repeat_label': self.tool_inp_desc['title']}) + # add description of parameters in the repeat + repeat_paramlist += templates.render(INPUT_SECTION, **{ + 'space': SPACE * (self.level), + 'section_label': "%s: %s" % (ind+1, self.tool_inp_desc['title'])}) + repeat_paramlist += paramlist_in_repeat + self.level = cur_level + self.wf_param_values = tmp_wf_param_values + + repeat_desc = '' + if repeat_paramlist != '': + repeat_desc += templates.render(INPUT_SECTION, **{ + 'space': SPACE * self.level, + 'section_label': self.tool_inp_desc['title']}) + repeat_paramlist + return repeat_desc + + def get_formatted_other_param_desc(self): + """Get value of a 'simple' parameter if different from the default value, None otherwise.""" + param_value = None + if self.tool_inp_desc['value'] == self.wf_param_values and not self.force_default: + param_value = None + elif self.type == 'boolean': + if bool(self.tool_inp_desc['value']) == self.wf_param_values: + param_value = None + else: + param_value = 'Yes' if self.wf_param_values else 'No' + elif self.type == 'select': + param_values = [] + for opt in self.tool_inp_desc['options']: + if opt[1] == self.wf_param_values: + param_values.append(opt[0]) + param_value = ', '.join(param_values) + elif self.type == 'data_column': + param_value = "c%s" % self.wf_param_values + else: + param_value = self.wf_param_values + + param_desc = '' + if param_value is not None: + param_desc = templates.render(INPUT_PARAM, **{ + 'space': SPACE * self.level, + 'param_label': self.tool_inp_desc['label'], + 'param_value': param_value}) + return param_desc + + def get_formatted_desc(self): + """Get the formatted description (ready for hands-on tutorial) of the parameter.""" + if self.wf_param_values: + if self.type == 'data' or self.type == 'data_collection': + self.formatted_desc += self.get_formatted_inputs() + elif self.type == 'section': + self.formatted_desc += self.get_formatted_section_desc() + elif self.type == 'conditional': + self.formatted_desc += self.get_formatted_conditional_desc() + elif self.type == 'repeat': + self.formatted_desc += self.get_formatted_repeat_desc() + else: + self.formatted_desc += self.get_formatted_other_param_desc() + return self.formatted_desc + + +def get_input_tool_name(step_id, steps): + """Get the string with the name of the tool that generated an input.""" + inp_provenance = '' + inp_prov_id = str(step_id) + if inp_prov_id in steps: + name = steps[inp_prov_id]['name'] + if 'Input dataset' in name: + inp_provenance = "(%s)" % name + else: + inp_provenance = "(output of **%s** {%% icon tool %%})" % name + return inp_provenance + + +def get_empty_input(): + """Get the string for an empty input.""" + return templates.render(INPUT_FILE_TEMPLATE, **{ + 'space': 1*SPACE, + 'icon': 'param-file', + 'input_name': 'Input file', + 'input_value': 'File' + }) + + +def get_empty_param(): + """Get the string for an empty param.""" + return templates.render(INPUT_PARAM, **{ + 'space': 1*SPACE, + 'param_label': 'Parameter', + 'param_value': 'a value' + }) diff --git a/planemo/training/topic.py b/planemo/training/topic.py new file mode 100644 index 000000000..1187dded6 --- /dev/null +++ b/planemo/training/topic.py @@ -0,0 +1,240 @@ +"""Module contains code for the Topic class, dealing with the creation of a training topic.""" + +import collections +import os + +from planemo import templates +from .utils import ( + load_yaml, + Reference, + Requirement, + save_to_yaml +) + + +INDEX_FILE_TEMPLATE = """--- +layout: topic +topic_name: {{ topic }} +--- +""" + +README_FILE_TEMPLATE = """ +{{ topic }} +========== + +Please refer to the [CONTRIBUTING.md](../../CONTRIBUTING.md) before adding or updating any material +""" + + +DOCKER_FILE_TEMPLATE = """ +# Galaxy - {{ topic_title }} +# +# to build the docker image, go to root of training repo and +# docker build -t {{ topic_name }} -f topics/{{ topic_name }}/docker/Dockerfile . +# +# to run image: +# docker run -p "8080:80" -t {{ topic_name }} + +FROM bgruening/galaxy-stable + +MAINTAINER Galaxy Training Material + +ENV GALAXY_CONFIG_BRAND "GTN: {{ topic_title }}" + +# prerequisites +RUN pip install ephemeris -U +ADD bin/galaxy-sleep.py /galaxy-sleep.py + +# copy the tutorials directory for your topic +ADD topics/{{ topic_name }}/tutorials/ /tutorials/ + +# install everything for tutorials +ADD bin/docker-install-tutorials.sh /setup-tutorials.sh +ADD bin/mergeyaml.py /mergeyaml.py +RUN /setup-tutorials.sh +""" + + +INTRO_SLIDES_FILE_TEMPLATE = """--- +layout: introduction_slides +logo: "GTN" + +title: {{ title }} +type: {{ type }} +contributors: +- contributor +--- + +### How to fill the slide decks? + +Please follow our +[tutorial to learn how to fill the slides]({{ '{{' }} site.baseurl {{ '}}' }}/topics/contributing/tutorials/create-new-tutorial-slides/slides.html) +""" + + +class Topic: + """Class to describe a training topic.""" + + def __init__(self, name="new_topic", target="use", title="The new topic", summary="Summary", parent_dir="topics"): + """Init a topic instance.""" + self.name = name + self.type = target + self.title = title + self.summary = summary + self.docker_image = "" + self.maintainers = ["maintainers"] + self.parent_dir = parent_dir + self.set_default_requirement() + self.set_default_reference() + self.set_paths() + + def init_from_kwds(self, kwds): + """Init a topic instance from a kwds dictionary.""" + self.name = kwds["topic_name"] + self.type = kwds["topic_target"] + self.title = kwds["topic_title"] + self.summary = kwds["topic_summary"] + self.set_default_requirement() + self.set_default_reference() + self.set_paths() + + def init_from_metadata(self): + """Init a topic instance from the metadata file.""" + metadata = load_yaml(self.metadata_fp) + self.name = metadata['name'] + self.type = metadata['type'] + self.title = metadata['title'] + self.summary = metadata['summary'] + self.requirements = [] + for r in metadata['requirements']: + req = Requirement() + req.init_from_dict(r) + self.requirements.append(req) + if 'docker_image' in metadata: + self.docker_image = metadata['docker_image'] + self.maintainers = metadata['maintainers'] + self.references = [] + if 'references' in metadata: + for r in metadata['references']: + ref = Reference() + ref.init_from_dict(r) + self.references.append(ref) + self.set_paths() + + # GETTERS + def get_requirements(self): + """Get the requirements as a list of ordered dictionaries.""" + reqs = [] + for req in self.requirements: + reqs.append(req.export_to_ordered_dict()) + return reqs + + def get_references(self): + """Get the references as a list of ordered dictionaries.""" + refs = [] + for ref in self.references: + refs.append(ref.export_to_ordered_dict()) + return refs + + def export_metadata_to_ordered_dict(self): + """Export the topic metadata into an ordered dictionary.""" + metadata = collections.OrderedDict() + metadata['name'] = self.name + metadata['type'] = self.type + metadata['title'] = self.title + metadata['summary'] = self.summary + metadata['requirements'] = self.get_requirements() + metadata['docker_image'] = self.docker_image + metadata['maintainers'] = self.maintainers + metadata['references'] = self.get_references() + return metadata + + # SETTERS + def set_default_requirement(self): + """Set default requirement: Galaxy introduction.""" + self.requirements = [] + if self.type == 'use': + self.requirements.append(Requirement()) + + def set_default_reference(self): + """Set default refences: no information.""" + self.references = [] + if self.type == 'use': + self.references.append(Reference()) + + def set_paths(self): + """Set the paths to folder and files.""" + self.dir = os.path.join(self.parent_dir, self.name) + self.img_folder = os.path.join(self.dir, "images") + self.tuto_folder = os.path.join(self.dir, "tutorials") + self.index_fp = os.path.join(self.dir, "index.md") + self.readme_fp = os.path.join(self.dir, "README.md") + self.metadata_fp = os.path.join(self.dir, "metadata.yaml") + self.docker_folder = os.path.join(self.dir, "docker") + self.dockerfile_fp = os.path.join(self.docker_folder, "Dockerfile") + self.slides_folder = os.path.join(self.dir, "slides") + + # TESTS + def exists(self): + """Test if the topic exists.""" + return os.path.isdir(self.dir) + + # OTHER METHODS + def create_topic_structure(self): + """Create the skeleton of a new topic. + + 1. create the folder and its structure + 2. update the index.md to match your topic's name + 3. fill the metadata + 4. add a symbolic link to the metadata.yaml from the metadata folder + """ + # create the folder and its structure + os.makedirs(self.dir) + self.img_folder = os.path.join(self.dir, "images") + os.makedirs(self.img_folder) + self.tuto_folder = os.path.join(self.dir, "tutorials") + os.makedirs(self.tuto_folder) + + # create the index.md and add the topic name + self.index_fp = os.path.join(self.dir, "index.md") + with open(self.index_fp, 'w') as index_f: + index_f.write( + templates.render(INDEX_FILE_TEMPLATE, **{'topic': self.name})) + + # create the README file + self.readme_fp = os.path.join(self.dir, "README.md") + with open(self.readme_fp, 'w') as readme_f: + readme_f.write( + templates.render(README_FILE_TEMPLATE, **{'topic': self.title})) + + # create the metadata file + self.metadata_fp = os.path.join(self.dir, "metadata.yaml") + save_to_yaml(self.export_metadata_to_ordered_dict(), self.metadata_fp) + + # create Dockerfile + self.docker_folder = os.path.join(self.dir, "docker") + os.makedirs(self.docker_folder) + self.dockerfile_fp = os.path.join(self.docker_folder, "Dockerfile") + with open(self.dockerfile_fp, 'w') as dockerfile: + dockerfile.write( + templates.render( + DOCKER_FILE_TEMPLATE, + **{'topic_name': self.name, 'topic_title': self.title})) + + # create empty introduction slides + self.slides_folder = os.path.join(self.dir, "slides") + os.makedirs(self.slides_folder) + self.intro_slide_fp = os.path.join(self.slides_folder, "introduction.html") + with open(self.intro_slide_fp, 'w') as intro_slide_f: + intro_slide_f.write( + templates.render( + INTRO_SLIDES_FILE_TEMPLATE, + **{'title': "Introduction to %s" % self.title, 'type': "introduction"})) + + # add a symbolic link to the metadata.yaml + metadata_dir = "metadata" + if not os.path.isdir(metadata_dir): + os.makedirs(metadata_dir) + os.chdir(metadata_dir) + os.symlink(os.path.join("..", self.metadata_fp), "%s.yaml" % self.name) + os.chdir("..") diff --git a/planemo/training/tutorial.py b/planemo/training/tutorial.py new file mode 100644 index 000000000..0eba650cb --- /dev/null +++ b/planemo/training/tutorial.py @@ -0,0 +1,601 @@ +"""Module contains code for the Tutorial class, dealing with the creation of a training tutorial.""" + +import collections +import json +import os +import re +import shutil + +import oyaml as yaml +import requests + +from planemo import templates +from planemo.bioblend import galaxy +from planemo.engine import ( + engine_context, + is_galaxy_engine, +) +from planemo.io import info +from planemo.runnable import for_path +from .tool_input import ( + get_empty_input, + get_empty_param, + ToolInput +) +from .utils import ( + load_yaml, + save_to_yaml +) + + +TUTO_HAND_ON_TEMPLATE = """--- +layout: tutorial_hands_on + +{{ metadata }} +--- + +{{ body }} +""" + +TUTO_SLIDES_TEMPLATE = """--- +layout: tutorial_slides +logo: "GTN" + +{{ metadata }} +--- + +### How to fill the slide decks? + +Please follow our +[tutorial to learn how to fill the slides]({{ '{{' }} site.baseurl {{ '}}' }}/topics/contributing/tutorials/create-new-tutorial-slides/slides.html) +""" + + +HANDS_ON_TOOL_BOX_TEMPLATE = """ +## Sub-step with **{{tool_name}}** + +> ### {{ '{%' }} icon hands_on {{ '%}' }} Hands-on: Task description +> +> 1. **{{tool_name}}** {{ '{%' }} icon tool {{ '%}' }} with the following parameters:{{inputlist}}{{paramlist}} +> +> ***TODO***: *Check parameter descriptions* +> +> ***TODO***: *Consider adding a comment or tip box* +> +> > ### {{ '{%' }} icon comment {{ '%}' }} Comment +> > +> > A comment about the tool or something else. This box can also be in the main text +> {: .comment} +> +{: .hands_on} + +***TODO***: *Consider adding a question to test the learners understanding of the previous exercise* + +> ### {{ '{%' }} icon question {{ '%}' }} Questions +> +> 1. Question1? +> 2. Question2? +> +> > ### {{ '{%' }} icon solution {{ '%}' }} Solution +> > +> > 1. Answer for question1 +> > 2. Answer for question2 +> > +> {: .solution} +> +{: .question} + +""" + +TUTO_HAND_ON_BODY_TEMPLATE = """ +# Introduction +{:.no_toc} + + + +General introduction about the topic and then an introduction of the +tutorial (the questions and the objectives). It is nice also to have a +scheme to sum up the pipeline used during the tutorial. The idea is to +give to trainees insight into the content of the tutorial and the (theoretical +and technical) key concepts they will learn. + +**Please follow our +[tutorial to learn how to fill the Markdown]({{ '{{' }} site.baseurl {{ '}}' }}/topics/contributing/tutorials/\ +create-new-tutorial-content/tutorial.html)** + +> ### Agenda +> +> In this tutorial, we will cover: +> +> 1. TOC +> {:toc} +> +{: .agenda} + +# Title for your first section + +Give some background about what the trainees will be doing in the section. + +Below are a series of hand-on boxes, one for each tool in your workflow file. +Often you may wish to combine several boxes into one or make other adjustments such +as breaking the tutorial into sections, we encourage you to make such changes as you +see fit, this is just a starting point :) + +Anywhere you find the word "***TODO***", there is something that needs to be changed +depending on the specifics of your tutorial. + +have fun! + +## Get data + +> ### {{ '{%' }} icon hands_on {{ '%}' }} Hands-on: Data upload +> +> 1. Create a new history for this tutorial +> 2. Import the files from [Zenodo]({{ zenodo_link }}) or from the shared data library +> +> ``` +> {{ z_file_links }} +> ``` +> ***TODO***: *Add the files by the ones on Zenodo here (if not added)* +> +> ***TODO***: *Remove the useless files (if added)* +> +> {{ '{%' }} include snippets/import_via_link.md {{ '%}' }} +> {{ '{%' }} include snippets/import_from_data_library.md {{ '%}' }} +> +> 3. Rename the datasets +> 4. Check that the datatype +> +> {{ '{%' }} include snippets/change_datatype.md datatype="datatypes" {{ '%}' }} +> +> 5. Add to each database a tag corresponding to ... +> +> {{ '{%' }} include snippets/add_tag.md {{ '%}' }} +> +{: .hands_on} + +# Title of the section usually corresponding to a big step in the analysis + +It comes first a description of the step: some background and some theory. +Some image can be added there to support the theory explanation: + +![Alternative text](../../images/image_name "Legend of the image") + +The idea is to keep the theory description before quite simple to focus more on the practical part. + +***TODO***: *Consider adding a detail box to expand the theory* + +> ### {{ '{%' }} icon details {{ '%}' }} More details about the theory +> +> But to describe more details, it is possible to use the detail boxes which are expandable +> +{: .details} + +A big step can have several subsections or sub steps: + +{{ body }} + +## Re-arrange + +To create the template, each step of the workflow had its own subsection. + +***TODO***: *Re-arrange the generated subsections into sections or other subsections. +Consider merging some hands-on boxes to have a meaningful flow of the analyses* + +# Conclusion +{:.no_toc} + +Sum up the tutorial and the key takeaways here. We encourage adding an overview image of the +pipeline used. +""" + + +class Tutorial: + """Class to describe a training tutorial.""" + + def __init__(self, training, topic, name="new_tuto", title="The new tutorial", zenodo_link=""): + """Init a tutorial instance.""" + self.training = training + self.topic = topic + self.name = name + self.title = title + self.zenodo_link = zenodo_link + self.zenodo_file_links = [] + self.questions = [] + self.objectives = [] + self.time = "" + self.key_points = [] + self.contributors = [] + self.body = "" + self.init_wf_fp = None + self.init_wf_id = None + self.hands_on = True + self.slides = False + self.datatype_fp = "" + self.set_dir_name() + self.init_data_lib() + self.body = templates.render(HANDS_ON_TOOL_BOX_TEMPLATE, **{ + 'tool_name': "My Tool", + 'inputlist': get_empty_input(), + 'paramlist': get_empty_param() + }) + + def init_from_kwds(self, kwds): + """Init a tutorial instance from a kwds dictionary.""" + self.name = kwds["tutorial_name"] + self.title = kwds["tutorial_title"] + self.zenodo_link = kwds["zenodo_link"] if kwds["zenodo_link"] else '' + self.questions = [ + "Which biological questions are addressed by the tutorial?", + "Which bioinformatics techniques is important to know for this type of data?"] + self.objectives = [ + "The learning objectives are the goals of the tutorial", + "They will be informed by your audience and will communicate to them and to yourself what you should focus on during the course", + "They are single sentence describing what a learner will be able to do once they have done the tutorial", + "You can use the Bloom's Taxonomy to write effective learning objectives"] + self.time = "3H" + self.key_points = [ + "The take-home messages", + "They will appear at the end of the tutorial"] + self.contributors = ["contributor1", "contributor2"] + self.init_wf_fp = kwds['workflow'] + self.init_wf_id = kwds['workflow_id'] + self.hands_on = kwds['hands_on'] + self.slides = kwds['slides'] + self.datatype_fp = kwds['datatypes'] + self.set_dir_name() + self.init_data_lib() + + def init_from_existing_tutorial(self, tuto_name): + """Init a tutorial instance from an existing tutorial (data library and tutorial.md).""" + self.name = tuto_name + self.set_dir_name() + + if not self.exists(): + raise Exception("The tutorial %s does not exists. It should be created" % self.name) + + # get the metadata information of the tutorial (from the top of the tutorial.md) + with open(self.tuto_fp, "r") as tuto_f: + tuto_content = tuto_f.read() + regex = '^---\n(?P[\s\S]*)\n---(?P[\s\S]*)' + tuto_split_regex = re.search(regex, tuto_content) + if not tuto_split_regex: + raise Exception("No metadata found at the top of the tutorial") + metadata = yaml.load(tuto_split_regex.group("metadata")) + self.title = metadata["title"] + self.zenodo_link = metadata["zenodo_link"] + self.questions = metadata["questions"] + self.objectives = metadata["objectives"] + self.time_estimation = metadata["time_estimation"] + self.key_points = metadata["key_points"] + self.contributors = metadata["contributors"] + + # the the tutorial content + self.body = tuto_split_regex.group("body") + + # get the data library + self.init_data_lib() + + def init_data_lib(self): + """Init the data library dictionary.""" + if os.path.exists(self.data_lib_fp): + self.data_lib = load_yaml(self.data_lib_fp) + else: + self.data_lib = collections.OrderedDict() + # set default information + self.data_lib.setdefault('destination', collections.OrderedDict()) + self.data_lib['destination']['type'] = 'library' + self.data_lib['destination']['name'] = 'GTN - Material' + self.data_lib['destination']['description'] = 'Galaxy Training Network Material' + self.data_lib['destination']['synopsis'] = 'Galaxy Training Network Material. See https://training.galaxyproject.org' + self.data_lib.setdefault('items', []) + self.data_lib.pop('libraries', None) + # get topic or create new one + topic = collections.OrderedDict() + for item in self.data_lib['items']: + if item['name'] == self.topic.title: + topic = item + if not topic: + self.data_lib['items'].append(topic) + topic['name'] = self.topic.title + topic['description'] = self.topic.summary + topic['items'] = [] + # get tutorial or create new one + self.tuto_data_lib = collections.OrderedDict() + for item in topic['items']: + if item['name'] == self.title: + self.tuto_data_lib = item + if not self.tuto_data_lib: + topic['items'].append(self.tuto_data_lib) + self.tuto_data_lib['name'] = self.title + self.tuto_data_lib['items'] = [] + + # GETTERS + def get_tuto_metata(self): + """Return the string corresponding to the tutorial metadata.""" + metadata = collections.OrderedDict() + metadata['title'] = self.title + metadata['zenodo_link'] = self.zenodo_link + metadata['questions'] = self.questions + metadata['objectives'] = self.objectives + metadata['time_estimation'] = self.time + metadata['key_points'] = self.key_points + metadata['contributors'] = self.contributors + return yaml.safe_dump( + metadata, + indent=2, + default_flow_style=False, + default_style='', + explicit_start=False) + + # SETTERS + def set_dir_name(self): + """Set the path to dir and files of a tutorial.""" + self.dir = os.path.join(self.topic.dir, "tutorials", self.name) + self.tuto_fp = os.path.join(self.dir, "tutorial.md") + self.slide_fp = os.path.join(self.dir, 'slides.html') + self.data_lib_fp = os.path.join(self.dir, "data_library.yaml") + self.wf_dir = os.path.join(self.dir, "workflows") + self.wf_fp = os.path.join(self.wf_dir, "main_workflow.ga") + self.tour_dir = os.path.join(self.dir, "tours") + # remove empty workflow file if there + empty_wf_filepath = os.path.join(self.wf_dir, "empty_workflow.ga") + if os.path.exists(empty_wf_filepath): + os.remove(empty_wf_filepath) + + # TEST METHODS + def exists(self): + """Test if the tutorial exists.""" + return os.path.isdir(self.dir) + + def has_workflow(self): + """Test if a workflow is provided for the tutorial.""" + return self.init_wf_fp or self.init_wf_id + + # EXPORT METHODS + def export_workflow_file(self): + """Copy or extract workflow file and add it to the tutorial directory.""" + if not os.path.exists(self.wf_dir): + os.makedirs(self.wf_dir) + if self.init_wf_fp: + shutil.copy(self.init_wf_fp, self.wf_fp) + elif self.init_wf_id: + gi = galaxy.GalaxyInstance(self.training.galaxy_url, key=self.training.galaxy_api_key) + gi.workflows.export_workflow_to_local_path( + self.init_wf_id, + self.wf_fp, + use_default_filename=False) + + # OTHER METHODS + def get_files_from_zenodo(self): + """Extract a list of URLs and dictionary describing the files from the JSON output of the Zenodo API.""" + z_record, req_res = get_zenodo_record(self.zenodo_link) + + self.zenodo_file_links = [] + if 'files' not in req_res: + raise ValueError("No files in the Zenodo record") + + files = [] + for f in req_res['files']: + file_dict = {'url': '', 'src': 'url', 'ext': '', 'info': self.zenodo_link} + if 'type' in f: + file_dict['ext'] = get_galaxy_datatype(f['type'], self.datatype_fp) + if 'links' not in f and 'self' not in f['links']: + raise ValueError("No link for file %s" % f) + file_dict['url'] = f['links']['self'] + self.zenodo_file_links.append(f['links']['self']) + files.append(file_dict) + + return (files, z_record) + + def prepare_data_library_from_zenodo(self): + """Get the list of URLs of the files on Zenodo, fill the data library, save it into the file.""" + self.zenodo_file_links = [] + if self.zenodo_link != '': + files, z_record = self.get_files_from_zenodo() + if z_record: + # get current data library and/or previous data library for the tutorial + # remove the latest tag of any existing library + # remove the any other existing library + current_data_lib = collections.OrderedDict() + previous_data_lib = collections.OrderedDict() + for item in self.tuto_data_lib['items']: + if item['name'] == "DOI: 10.5281/zenodo.%s" % z_record: + current_data_lib = item + elif item['description'] == 'latest': + previous_data_lib = item + previous_data_lib['description'] = '' + if not current_data_lib: + current_data_lib['name'] = "DOI: 10.5281/zenodo.%s" % z_record + current_data_lib['description'] = 'latest' + current_data_lib['items'] = [] + current_data_lib['items'] = files + + self.tuto_data_lib['items'] = [current_data_lib] + if previous_data_lib: + self.tuto_data_lib['items'].append(previous_data_lib) + save_to_yaml(self.data_lib, self.data_lib_fp) + + def write_hands_on_tutorial(self): + """Write the content of the hands-on tutorial in the corresponding file.""" + # add the zenodo links + self.body = templates.render(TUTO_HAND_ON_BODY_TEMPLATE, **{ + "z_file_links": "\n> ".join(self.zenodo_file_links), + "body": self.body + }) + # write in the tutorial file with the metadata on the top + metadata = self.get_tuto_metata() + with open(self.tuto_fp, 'w') as md: + md.write(templates.render(TUTO_HAND_ON_TEMPLATE, **{ + "metadata": metadata, + "body": self.body + })) + + def create_hands_on_tutorial(self, ctx): + """Create tutorial structure from the workflow file (if it is provided).""" + # load workflow and get hands-on body from the workflow + if self.init_wf_id: + if not self.training.galaxy_url: + raise ValueError("No Galaxy URL given") + if not self.training.galaxy_api_key: + raise ValueError("No API key to access the given Galaxy instance") + self.body = get_hands_on_boxes_from_running_galaxy(self.init_wf_id, self.training.galaxy_url, self.training.galaxy_api_key) + elif self.init_wf_fp: + self.body = get_hands_on_boxes_from_local_galaxy(self.training.kwds, self.init_wf_fp, ctx) + # write tutorial body + self.write_hands_on_tutorial() + + def create_tutorial(self, ctx): + """Create the skeleton of a new tutorial.""" + # create tuto folder and empty files + os.makedirs(self.dir) + os.makedirs(self.tour_dir) + os.makedirs(self.wf_dir) + + # extract the data library from Zenodo and the links for the tutorial + if self.zenodo_link != '': + info("Create the data library from Zenodo") + self.prepare_data_library_from_zenodo() + + # create tutorial skeleton from workflow and copy workflow file + if self.hands_on: + info("Create tutorial skeleton from workflow (if it is provided)") + self.create_hands_on_tutorial(ctx) + self.export_workflow_file() + + # create slide skeleton + if self.slides: + with open(self.slide_fp, 'w') as slide_f: + slide_f.write( + templates.render(TUTO_SLIDES_TEMPLATE, **{"metadata": self.get_tuto_metata()})) + + +def get_galaxy_datatype(z_ext, datatype_fp): + """Get the Galaxy datatype corresponding to a Zenodo file type.""" + g_datatype = '' + datatypes = load_yaml(datatype_fp) + if z_ext in datatypes: + g_datatype = datatypes[z_ext] + if g_datatype == '': + g_datatype = '# Please add a Galaxy datatype or update the shared/datatypes.yaml file' + info("Get Galaxy datatypes: %s --> %s" % (z_ext, g_datatype)) + return g_datatype + + +def get_zenodo_record(zenodo_link): + """Get the content of a Zenodo record.""" + # get the record in the Zenodo link + if 'doi' in zenodo_link: + z_record = zenodo_link.split('.')[-1] + else: + z_record = zenodo_link.split('/')[-1] + # get JSON corresponding to the record from Zenodo API + req = "https://zenodo.org/api/records/%s" % (z_record) + r = requests.get(req) + if r: + req_res = r.json() + else: + info("The Zenodo link (%s) seems invalid" % (zenodo_link)) + req_res = {'files': []} + z_record = None + return(z_record, req_res) + + +def get_wf_inputs(step_inp): + """Get the inputs from a workflow step and format them into a hierarchical dictionary.""" + inputs = {} + for inp_n, inp in step_inp.items(): + if '|' in inp_n: + repeat_regex = '(?P[^\|]*)_(?P\d+)\|(?P.+).+' + repeat_search = re.search(repeat_regex, inp_n) + hier_regex = '(?P[^\|]*)\|(?P.+)' + hier_regex = re.search(hier_regex, inp_n) + if repeat_search and repeat_search.start(0) <= hier_regex.start(0): + inputs.setdefault(repeat_search.group('prefix'), {}) + inputs[repeat_search.group('prefix')].setdefault( + repeat_search.group('nb'), + get_wf_inputs({hier_regex.group('suffix'): inp})) + else: + inputs.setdefault(hier_regex.group('prefix'), {}) + inputs[hier_regex.group('prefix')].update( + get_wf_inputs({hier_regex.group('suffix'): inp})) + else: + inputs.setdefault(inp_n, inp) + return inputs + + +def get_wf_param_values(init_params, inp_connections): + """Get the param values from a workflow step and format them into a hierarchical dictionary.""" + if not isinstance(init_params, str) or '\": \"' not in init_params: + form_params = init_params + else: + form_params = json.loads(init_params) + if isinstance(form_params, dict): + if '__class__' in form_params and form_params['__class__'] == 'RuntimeValue': + form_params = inp_connections + else: + json_params = form_params + form_params = {} + for p in json_params: + inp = inp_connections[p] if p in inp_connections else {} + form_params[p] = get_wf_param_values(json_params[p], inp) + elif isinstance(form_params, list): + json_params = form_params + form_params = [] + for i, p in enumerate(json_params): + inp = inp_connections[str(i)] if str(i) in inp_connections else {} + form_params.append(get_wf_param_values(p, inp)) + elif isinstance(form_params, str) and '"' in form_params: + form_params = form_params.replace('"', '') + return form_params + + +def format_wf_steps(wf, gi): + """Get a string with the hands-on boxes describing the different steps of the worklow.""" + body = '' + steps = wf['steps'] + + for s in range(len(steps)): + wf_step = steps[str(s)] + # get params in workflow + wf_param_values = {} + if wf_step['tool_state'] and wf_step['input_connections']: + wf_param_values = get_wf_param_values(wf_step['tool_state'], get_wf_inputs(wf_step['input_connections'])) + if not wf_param_values: + continue + # get tool description + try: + tool_desc = gi.tools.show_tool(wf_step['tool_id'], io_details=True) + except Exception: + tool_desc = {'inputs': []} + # get formatted param description + paramlist = '' + for inp in tool_desc["inputs"]: + tool_inp = ToolInput(inp, wf_param_values, steps, 1, should_be_there=True) + paramlist += tool_inp.get_formatted_desc() + # format the hands-on box + body += templates.render(HANDS_ON_TOOL_BOX_TEMPLATE, **{ + "tool_name": wf_step['name'], + "paramlist": paramlist}) + return body + + +def get_hands_on_boxes_from_local_galaxy(kwds, wf_filepath, ctx): + """Server local Galaxy and get the workflow dictionary.""" + assert is_galaxy_engine(**kwds) + runnable = for_path(wf_filepath) + tuto_body = '' + with engine_context(ctx, **kwds) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([runnable]) as config: + workflow_id = config.workflow_id(wf_filepath) + wf = config.gi.workflows.export_workflow_dict(workflow_id) + tuto_body = format_wf_steps(wf, config.gi) + return tuto_body + + +def get_hands_on_boxes_from_running_galaxy(wf_id, galaxy_url, galaxy_api_key): + """Get the workflow dictionary from a running Galaxy instance with the workflow installed on it.""" + gi = galaxy.GalaxyInstance(galaxy_url, key=galaxy_api_key) + wf = gi.workflows.export_workflow_dict(wf_id) + tuto_body = format_wf_steps(wf, gi) + return tuto_body diff --git a/planemo/training/utils.py b/planemo/training/utils.py new file mode 100644 index 000000000..bfcc69bc4 --- /dev/null +++ b/planemo/training/utils.py @@ -0,0 +1,76 @@ +"""Module contains code for the Requirement, Reference and some general functions for training.""" + +import collections + +import oyaml as yaml + + +class Requirement: + """Class to describe a training requirement.""" + + def __init__(self, title="", req_type="internal", link="/introduction/"): + """Init a Requirement instance.""" + self.title = title + self.type = req_type + self.link = link + + def init_from_dict(self, dict): + """Init from a dictionary generated by export_to_ordered_dict.""" + self.title = dict['title'] + self.type = dict['type'] + self.link = dict['link'] + + def export_to_ordered_dict(self): + """Export the requirement into an ordered dictionary.""" + req = collections.OrderedDict() + req['title'] = self.title + req['type'] = self.type + req['link'] = self.link + return req + + +class Reference: + """Class to describe a training reference.""" + + def __init__(self, authors="authors et al", title="the title", link="link", summary="Why this reference is useful"): + """Init a Reference instance.""" + self.authors = authors + self.title = title + self.link = link + self.summary = summary + + def init_from_dict(self, dict): + """Init from a dictionary generated by export_to_ordered_dict.""" + self.authors = dict['authors'] + self.title = dict['title'] + self.link = dict['link'] + self.summary = dict['summary'] + + def export_to_ordered_dict(self): + """Export the reference into an ordered dictionary.""" + ref = collections.OrderedDict() + ref['authors'] = self.authors + ref['title'] = self.title + ref['link'] = self.link + ref['summary'] = self.summary + return ref + + +def load_yaml(filepath): + """Load the content of a YAML file to a dictionary.""" + with open(filepath, "r") as m_file: + content = yaml.load(m_file) + return content + + +def save_to_yaml(content, filepath): + """Save a dictionary to a YAML file.""" + with open(filepath, 'w') as stream: + yaml.safe_dump(content, + stream, + indent=2, + default_flow_style=False, + default_style='', + explicit_start=True, + encoding='utf-8', + allow_unicode=True) diff --git a/setup.py b/setup.py index f441bcfcb..7a5bbfd6e 100644 --- a/setup.py +++ b/setup.py @@ -58,6 +58,7 @@ def get_var(var_name): 'planemo.shed', 'planemo.shed2tap', 'planemo.test', + 'planemo.training', 'planemo.xml', ] ENTRY_POINTS = ''' diff --git a/tests/data/training_metadata.yaml b/tests/data/training_metadata.yaml new file mode 100644 index 000000000..bfa13b156 --- /dev/null +++ b/tests/data/training_metadata.yaml @@ -0,0 +1,13 @@ +--- +name: test +type: use +title: Test +summary: 'Summary' +edam_ontology: '' +requirements: +- title: Galaxy introduction + type: internal + link: /introduction/ +maintainers: +- maintainer1 +- maintainer2 \ No newline at end of file diff --git a/tests/data/training_metadata_w_zenodo.yaml b/tests/data/training_metadata_w_zenodo.yaml deleted file mode 100644 index fe87389ad..000000000 --- a/tests/data/training_metadata_w_zenodo.yaml +++ /dev/null @@ -1,42 +0,0 @@ ---- -name: test -type: use -title: Test -summary: 'Summary' -edam_ontology: '' -requirements: -- title: Galaxy introduction - type: internal - link: /introduction/ -material: -- title: Introduction to the topic - type: introduction - name: introduction - slides: 'yes' - contributors: - - contributor1 - - contributor2 -- title: Test - name: test - type: tutorial - zenodo_link: 'https://zenodo.org/record/1321885' - hands_on: true - slides: false - workflows: true - galaxy_tour: false - questions: - - '' - - '' - objectives: - - '' - - '' - time_estimation: 1d/3h/6h - key_points: - - '' - - '' - contributors: - - contributor1 - - contributor2 -maintainers: -- maintainer1 -- maintainer2 \ No newline at end of file diff --git a/tests/data/training_query_tabular.json b/tests/data/training_query_tabular.json new file mode 100644 index 000000000..1507c75ab --- /dev/null +++ b/tests/data/training_query_tabular.json @@ -0,0 +1,893 @@ +{ + "model_class": "Tool", + "version": "2.0.0", + "id": "toolshed.g2.bx.psu.edu/repos/iuc/query_tabular/query_tabular/2.0.0", + "inputs": [ + { + "type": "hidden", + "model_class": "HiddenToolParameter", + "value": "workdb.sqlite", + "is_dynamic": false, + "refresh_on_change": false, + "label": "", + "name": "workdb", + "argument": null, + "help": "", + "hidden": true, + "optional": false + }, + { + "type": "section", + "expanded": false, + "inputs": [ + { + "type": "data", + "model_class": "DataToolParameter", + "value": null, + "edam": { + "edam_formats": [ + "format_3621" + ], + "edam_data": [ + "data_0006" + ] + }, + "extensions": [ + "sqlite" + ], + "argument": null, + "optional": true, + "hidden": false, + "help": "Make sure your added table names are not already in this database", + "name": "withdb", + "refresh_on_change": true, + "multiple": false, + "label": "Add tables to this Database", + "options": { + "hda": [], + "hdca": [] + }, + "is_dynamic": false + } + ], + "model_class": "Section", + "name": "add_to_database", + "title": "Add tables to an existing database", + "help": null + }, + { + "type": "repeat", + "model_class": "Repeat", + "default": 0, + "max": "__Infinity__", + "inputs": [ + { + "type": "data", + "model_class": "DataToolParameter", + "value": null, + "edam": { + "edam_formats": [ + "format_3475" + ], + "edam_data": [ + "data_0006" + ] + }, + "extensions": [ + "tabular" + ], + "argument": null, + "optional": false, + "hidden": false, + "help": "", + "name": "table", + "refresh_on_change": true, + "multiple": false, + "label": "Tabular Dataset for Table", + "options": { + "hda": [], + "hdca": [] + }, + "is_dynamic": false + }, + { + "type": "section", + "expanded": false, + "inputs": [ + { + "type": "repeat", + "model_class": "Repeat", + "default": 0, + "max": "__Infinity__", + "inputs": [ + { + "type": "conditional", + "model_class": "Conditional", + "name": "filter", + "test_param": { + "type": "select", + "model_class": "SelectToolParameter", + "value": "skip", + "is_dynamic": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "", + "name": "filter_type", + "refresh_on_change": true, + "multiple": false, + "textable": true, + "label": "Filter By", + "options": [ + [ + "skip leading lines", + "skip", + false + ], + [ + "comment char", + "comment", + false + ], + [ + "by regex expression matching", + "regex", + false + ], + [ + "select columns", + "select_columns", + false + ], + [ + "regex replace value in column", + "replace", + false + ], + [ + "prepend a line number column", + "prepend_line_num", + false + ], + [ + "append a line number column", + "append_line_num", + false + ], + [ + "prepend a column with the given text", + "prepend_text", + false + ], + [ + "append a column with the given text", + "append_text", + false + ], + [ + "normalize list columns, replicates row for each item in list", + "normalize", + false + ] + ], + "display": null + }, + "cases": [ + { + "model_class": "ConditionalWhen", + "value": "skip", + "inputs": [ + { + "type": "integer", + "model_class": "IntegerToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "help": "Leave blank to use the comment lines metadata for this dataset", + "hidden": false, + "optional": true, + "name": "skip_lines", + "min": 0, + "max": null, + "label": "Skip lines", + "datalist": [], + "is_dynamic": false + } + ] + }, + { + "model_class": "ConditionalWhen", + "value": "comment", + "inputs": [ + { + "type": "select", + "model_class": "SelectToolParameter", + "value": "35", + "is_dynamic": false, + "argument": null, + "optional": true, + "hidden": false, + "help": "lines beginning with these are skipped", + "name": "comment_char", + "refresh_on_change": false, + "multiple": true, + "textable": true, + "label": "Ignore lines beginning with these characters", + "options": [ + [ + ">", + "62", + false + ], + [ + "@", + "64", + false + ], + [ + "+", + "43", + false + ], + [ + "<", + "60", + false + ], + [ + "*", + "42", + false + ], + [ + "-", + "45", + false + ], + [ + "=", + "61", + false + ], + [ + "|", + "124", + false + ], + [ + "?", + "63", + false + ], + [ + "$", + "36", + false + ], + [ + ".", + "46", + false + ], + [ + ":", + "58", + false + ], + [ + "&", + "38", + false + ], + [ + "%", + "37", + false + ], + [ + "^", + "94", + false + ], + [ + "#", + "35", + true + ], + [ + "!", + "33", + false + ] + ], + "display": "checkboxes" + } + ] + }, + { + "model_class": "ConditionalWhen", + "value": "prepend_line_num", + "inputs": [] + }, + { + "model_class": "ConditionalWhen", + "value": "append_line_num", + "inputs": [] + }, + { + "model_class": "ConditionalWhen", + "value": "prepend_text", + "inputs": [ + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "", + "name": "column_text", + "label": "text for column", + "datalist": [], + "is_dynamic": false + } + ] + }, + { + "model_class": "ConditionalWhen", + "value": "append_text", + "inputs": [ + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "", + "name": "column_text", + "label": "text for column", + "datalist": [], + "is_dynamic": false + } + ] + }, + { + "model_class": "ConditionalWhen", + "value": "regex", + "inputs": [ + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "", + "name": "regex_pattern", + "label": "regex pattern", + "datalist": [], + "is_dynamic": false + }, + { + "type": "select", + "model_class": "SelectToolParameter", + "value": "exclude_match", + "is_dynamic": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "", + "name": "regex_action", + "refresh_on_change": false, + "multiple": false, + "textable": true, + "label": "action for regex match", + "options": [ + [ + "exclude line on pattern match", + "exclude_match", + false + ], + [ + "include line on pattern match", + "include_match", + false + ], + [ + "exclude line if pattern found", + "exclude_find", + false + ], + [ + "include line if pattern found", + "include_find", + false + ] + ], + "display": null + } + ] + }, + { + "model_class": "ConditionalWhen", + "value": "select_columns", + "inputs": [ + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "example: 1,4,2 or c1,c4,c2(selects the first,fourth, and second columns)", + "name": "columns", + "label": "enter column numbers to keep", + "datalist": [], + "is_dynamic": false + } + ] + }, + { + "model_class": "ConditionalWhen", + "value": "replace", + "inputs": [ + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "example: 1 or c1 (selects the first column)", + "name": "column", + "label": "enter column number to replace", + "datalist": [], + "is_dynamic": false + }, + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "", + "name": "regex_pattern", + "label": "regex pattern", + "datalist": [], + "is_dynamic": false + }, + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "", + "name": "regex_replace", + "label": "replacement expression", + "datalist": [], + "is_dynamic": false + } + ] + }, + { + "model_class": "ConditionalWhen", + "value": "normalize", + "inputs": [ + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "example: 2,4 or c2,c4 (selects the second, and fourth columns) If multiple columns are selected, they should have the same length and separator on each line", + "name": "columns", + "label": "enter column numbers to normalize", + "datalist": [], + "is_dynamic": false + }, + { + "type": "text", + "model_class": "TextToolParameter", + "value": ",", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "", + "name": "separator", + "label": "List item delimiter in column", + "datalist": [], + "is_dynamic": false + } + ] + } + ] + } + ], + "min": 0, + "name": "linefilters", + "title": "Filter Tabular Input Lines", + "help": null + } + ], + "model_class": "Section", + "name": "input_opts", + "title": "Filter Dataset Input", + "help": null + }, + { + "type": "section", + "expanded": false, + "inputs": [ + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": true, + "hidden": false, + "help": "By default, tables will be named: t1,t2,...,tn (table names must be unique)", + "name": "table_name", + "label": "Specify Name for Table", + "datalist": [], + "is_dynamic": false + }, + { + "type": "boolean", + "model_class": "BooleanToolParameter", + "value": "false", + "refresh_on_change": false, + "argument": null, + "help": "The names will be quoted if they are not valid SQLite column names.", + "hidden": false, + "optional": false, + "name": "column_names_from_first_line", + "falsevalue": "False", + "truevalue": "True", + "label": "Use first line as column names", + "is_dynamic": false + }, + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": true, + "hidden": false, + "help": "By default, table columns will be named: c1,c2,c3,...,cn (column names for a table must be unique) You can override the default names by entering a comma -separated list of names, e.g. ',name1,,,name2' would rename the second and fifth columns.", + "name": "col_names", + "label": "Specify Column Names (comma-separated list)", + "datalist": [], + "is_dynamic": false + }, + { + "type": "boolean", + "model_class": "BooleanToolParameter", + "value": "false", + "refresh_on_change": false, + "argument": null, + "help": "", + "hidden": false, + "optional": false, + "name": "load_named_columns", + "falsevalue": "", + "truevalue": "load_named_columns", + "label": "Only load the columns you have named into database", + "is_dynamic": false + }, + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": true, + "hidden": false, + "help": "Only creates this additional column when a name is entered. (This can not be the same name as any of the other columns in this table.)", + "name": "pkey_autoincr", + "label": "Add an auto increment primary key column with this name", + "datalist": [], + "is_dynamic": false + }, + { + "type": "repeat", + "model_class": "Repeat", + "default": 0, + "max": "__Infinity__", + "inputs": [ + { + "type": "boolean", + "model_class": "BooleanToolParameter", + "value": "false", + "refresh_on_change": false, + "argument": null, + "help": "", + "hidden": false, + "optional": false, + "name": "unique", + "falsevalue": "no", + "truevalue": "yes", + "label": "This is a unique index", + "is_dynamic": false + }, + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "Create an index on the column names: e.g. for default column names: c1 or c2,c4 ( use the names you gave for columns)", + "name": "index_columns", + "label": "Index on Columns", + "datalist": [], + "is_dynamic": false + } + ], + "min": 0, + "name": "indexes", + "title": "Table Index", + "help": null + } + ], + "model_class": "Section", + "name": "tbl_opts", + "title": "Table Options", + "help": null + } + ], + "min": 0, + "name": "tables", + "title": "Database Table", + "help": null + }, + { + "type": "boolean", + "model_class": "BooleanToolParameter", + "value": "false", + "refresh_on_change": false, + "argument": null, + "help": "SQLite to tabular tool can run additional queries on this database", + "hidden": false, + "optional": false, + "name": "save_db", + "falsevalue": "no", + "truevalue": "yes", + "label": "Save the sqlite database in your history", + "is_dynamic": false + }, + { + "type": "text", + "model_class": "TextToolParameter", + "value": "", + "refresh_on_change": false, + "area": true, + "argument": null, + "optional": true, + "hidden": false, + "help": "By default: tables are named: t1,t2,...,tn and columns in each table: c1,c2,...,cn", + "name": "sqlquery", + "label": "SQL Query to generate tabular output", + "datalist": [], + "is_dynamic": false + }, + { + "type": "conditional", + "model_class": "Conditional", + "name": "query_result", + "test_param": { + "type": "select", + "model_class": "SelectToolParameter", + "value": "yes", + "is_dynamic": false, + "argument": null, + "optional": false, + "hidden": false, + "help": "", + "name": "header", + "refresh_on_change": true, + "multiple": false, + "textable": true, + "label": "include query result column headers", + "options": [ + [ + "Yes", + "yes", + false + ], + [ + "No", + "no", + false + ] + ], + "display": null + }, + "cases": [ + { + "model_class": "ConditionalWhen", + "value": "yes", + "inputs": [ + { + "type": "select", + "model_class": "SelectToolParameter", + "value": "35", + "is_dynamic": false, + "argument": null, + "optional": true, + "hidden": false, + "help": "", + "name": "header_prefix", + "refresh_on_change": false, + "multiple": false, + "textable": true, + "label": "Prefix character for column_header line", + "options": [ + [ + "no comment character prefix", + "", + false + ], + [ + ">", + "62", + false + ], + [ + "@", + "64", + false + ], + [ + "+", + "43", + false + ], + [ + "<", + "60", + false + ], + [ + "*", + "42", + false + ], + [ + "-", + "45", + false + ], + [ + "=", + "61", + false + ], + [ + "|", + "124", + false + ], + [ + "?", + "63", + false + ], + [ + "$", + "36", + false + ], + [ + ".", + "46", + false + ], + [ + ":", + "58", + false + ], + [ + "&", + "38", + false + ], + [ + "%", + "37", + false + ], + [ + "^", + "94", + false + ], + [ + "#", + "35", + true + ], + [ + "!", + "33", + false + ] + ], + "display": null + } + ] + }, + { + "model_class": "ConditionalWhen", + "value": "no", + "inputs": [] + } + ] + } + ], + "outputs": [ + { + "label": "sqlite db of ${on_string}", + "model_class": "ToolOutput", + "name": "sqlitedb", + "format": "sqlite", + "edam_format": "format_3621", + "hidden": false, + "edam_data": "data_0006" + }, + { + "label": "query results on ${on_string}", + "model_class": "ToolOutput", + "name": "output", + "format": "tabular", + "edam_format": "format_3475", + "hidden": false, + "edam_data": "data_0006" + } + ], + "edam_operations": [], + "description": "using sqlite sql", + "panel_section_id": "proteomics", + "panel_section_name": "Proteomics", + "labels": [], + "tool_shed_repository": { + "name": "query_tabular", + "owner": "iuc", + "changeset_revision": "1ea4e668bf73", + "tool_shed": "toolshed.g2.bx.psu.edu" + }, + "name": "Query Tabular", + "form_style": "regular", + "edam_topics": [] +} \ No newline at end of file diff --git a/tests/data/training_tutorial.md b/tests/data/training_tutorial.md index 96553f63f..f0855ce2e 100644 --- a/tests/data/training_tutorial.md +++ b/tests/data/training_tutorial.md @@ -2,7 +2,7 @@ layout: tutorial_hands_on title: "A tutorial to test" -zenodo_link: "https://zenodo.org" +zenodo_link: "https://zenodo.org/record/1321885" questions: - "What is the purpose of the tutorial?" objectives: diff --git a/tests/data/training_tutorial_wo_zenodo.md b/tests/data/training_tutorial_wo_zenodo.md new file mode 100644 index 000000000..40c1b5df6 --- /dev/null +++ b/tests/data/training_tutorial_wo_zenodo.md @@ -0,0 +1,35 @@ +--- +layout: tutorial_hands_on + +title: "A tutorial to test" +zenodo_link: "" +questions: + - "What is the purpose of the tutorial?" +objectives: + - "A learning objective" + - "Analysis of differentially expressed genes" + - "Identification of functional enrichment among differentially expressed genes" +time_estimation: "1H" +key_points: + - "Take home message" +contributors: + - the_best_contributor +--- + +# Introduction +{:.no_toc} + +The introduction + +> ### Agenda +> +> In this tutorial, we will deal with: +> +> 1. TOC +> {:toc} +> +{: .agenda} + +# First section + +# Second section \ No newline at end of file diff --git a/tests/data/training_wf_param_values.json b/tests/data/training_wf_param_values.json new file mode 100644 index 000000000..80cff6d63 --- /dev/null +++ b/tests/data/training_wf_param_values.json @@ -0,0 +1,65 @@ +{ + "save_db": "false", + "add_to_database": { + "withdb": { + "output_name": "output", + "id": 0 + } + }, + "workdb": "workdb.sqlite", + "__rerun_remap_job_id__": null, + "__page__": null, + "tables": [ + { + "tbl_opts": { + "pkey_autoincr": "", + "column_names_from_first_line": "false", + "table_name": "", + "indexes": [], + "load_named_columns": "false", + "col_names": "" + }, + "__index__": 0, + "input_opts": { + "linefilters": [ + { + "__index__": 0, + "filter": { + "filter_type": "skip", + "skip_lines": "1", + "__current_case__": 0 + } + } + ] + }, + "table": { + "output_name": "output", + "id": 1 + } + }, + { + "tbl_opts": { + "pkey_autoincr": "", + "column_names_from_first_line": "false", + "table_name": "", + "indexes": [], + "load_named_columns": "false", + "col_names": "" + }, + "__index__": 1, + "input_opts": { + "linefilters": [] + }, + "table": { + "output_name": "output", + "id": 2 + } + } + ], + "query_result": { + "header_prefix": "38", + "header": "yes", + "__current_case__": 0 + }, + "sqlquery": "" +} \ No newline at end of file diff --git a/tests/test_cmd_training_fill_data_library.py b/tests/test_cmd_training_fill_data_library.py index f1031ec91..5f19ab4bc 100644 --- a/tests/test_cmd_training_fill_data_library.py +++ b/tests/test_cmd_training_fill_data_library.py @@ -2,26 +2,13 @@ import os import shutil +from .test_cmd_training_generate_from_wf import create_tutorial_dir from .test_utils import ( CliTestCase, TEST_DATA_DIR ) -def create_tutorial_dir(topic_n, tuto_n, metadata_n): - """Create the tutorial directory structure.""" - topic_dir = os.path.join("topics", topic_n) - tuto_dir = os.path.join(topic_dir, "tutorials", tuto_n) - metadata_path = os.path.join(topic_dir, "metadata.yaml") - if not os.path.isdir(topic_dir): - os.makedirs(topic_dir) - if not os.path.isdir(tuto_dir): - os.makedirs(tuto_dir) - if not os.path.exists(metadata_path): - metadata = os.path.join(TEST_DATA_DIR, metadata_n) - shutil.copy(metadata, metadata_path) - - class CmdTrainingFillDataLibraryTestCase(CliTestCase): """Container class defining test cases for the ``training_fill_data_library`` command.""" @@ -49,7 +36,7 @@ def test_training_fill_data_library_command_tutorial_topic(self): tuto_n = "test" datatype = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml") # not working - create_tutorial_dir(topic_n, tuto_n, "training_metadata_wo_zenodo.yaml") + create_tutorial_dir(topic_n, tuto_n) training_fill_data_library_command = [ "training_fill_data_library", "--topic_name", topic_n, @@ -59,7 +46,7 @@ def test_training_fill_data_library_command_tutorial_topic(self): shutil.rmtree("topics") self._check_exit_code(training_fill_data_library_command, exit_code=-1) # working - create_tutorial_dir(topic_n, tuto_n, "training_metadata_w_zenodo.yaml") + create_tutorial_dir(topic_n, tuto_n) training_fill_data_library_command = [ "training_fill_data_library", "--topic_name", topic_n, @@ -73,14 +60,14 @@ def test_training_fill_data_library_command_tutorial_zenodo(self): with self._isolate(): topic_n = "test" tuto_n = "test" - create_tutorial_dir(topic_n, tuto_n, "training_metadata_wo_zenodo.yaml") + create_tutorial_dir(topic_n, tuto_n) datatype = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml") # not working test training_fill_data_library_command = [ "training_fill_data_library", "--topic_name", topic_n, "--tutorial_name", tuto_n, - "--zenodo", "https://zenodo.org/record/1321885" + "--zenodo_link", "https://zenodo.org/record/1321885" ] self._check_exit_code(training_fill_data_library_command, exit_code=-1) # working @@ -88,7 +75,7 @@ def test_training_fill_data_library_command_tutorial_zenodo(self): "training_fill_data_library", "--topic_name", topic_n, "--tutorial_name", tuto_n, - "--zenodo", "https://zenodo.org/record/1321885", + "--zenodo_link", "https://zenodo.org/record/1321885", "--datatypes", datatype ] self._check_exit_code(training_fill_data_library_command, exit_code=0) diff --git a/tests/test_cmd_training_generate_from_wf.py b/tests/test_cmd_training_generate_from_wf.py index e3533a7ec..4b9619bd6 100644 --- a/tests/test_cmd_training_generate_from_wf.py +++ b/tests/test_cmd_training_generate_from_wf.py @@ -8,7 +8,7 @@ ) -def create_tutorial_dir(topic_n, tuto_n, metadata_n): +def create_tutorial_dir(topic_n, tuto_n): """Create the tutorial directory structure.""" topic_dir = os.path.join("topics", topic_n) tuto_dir = os.path.join(topic_dir, "tutorials", tuto_n) @@ -18,8 +18,11 @@ def create_tutorial_dir(topic_n, tuto_n, metadata_n): if not os.path.isdir(tuto_dir): os.makedirs(tuto_dir) if not os.path.exists(metadata_path): - metadata = os.path.join(TEST_DATA_DIR, metadata_n) + metadata = os.path.join(TEST_DATA_DIR, "training_metadata.yaml") shutil.copy(metadata, metadata_path) + shutil.copy( + os.path.join(TEST_DATA_DIR, "training_tutorial.md"), + os.path.join(tuto_dir, "tutorial.md")) class CmdTrainingGenerateFromWfTestCase(CliTestCase): @@ -49,14 +52,14 @@ def test_training_generate_from_wf_command_local_wf(self): tuto_n = "test" test_workflow = os.path.join(TEST_DATA_DIR, "test_workflow_1.ga") # working test - create_tutorial_dir(topic_n, tuto_n, "training_metadata_wo_zenodo.yaml") + create_tutorial_dir(topic_n, tuto_n) training_init_command = [ - "training_generate_tuto_from_wf", - "--topic_name", "test", - "--tutorial_name", "test", + "training_generate_from_wf", + "--topic_name", topic_n, + "--tutorial_name", tuto_n, "--workflow", test_workflow ] - self._check_exit_code(training_init_command, exit_code=-1) + self._check_exit_code(training_init_command, exit_code=0) shutil.rmtree("topics") def test_training_generate_from_wf_command_remote_wf(self): @@ -67,20 +70,20 @@ def test_training_generate_from_wf_command_remote_wf(self): # not working test training_init_command = [ "training_generate_from_wf", - "--topic_name", "test", - "--tutorial_name", "test", + "--topic_name", topic_n, + "--tutorial_name", tuto_n, "--workflow_id", "ID" ] self._check_exit_code(training_init_command, exit_code=-1) # not working test - create_tutorial_dir(topic_n, tuto_n, "training_metadata_wo_zenodo.yaml") + create_tutorial_dir(topic_n, tuto_n) training_init_command = [ "training_generate_from_wf", - "--topic_name", "test", - "--tutorial_name", "test", + "--topic_name", topic_n, + "--tutorial_name", tuto_n, "--workflow_id", "ID", "--galaxy_url", "https://usegalaxy.eu/", "--galaxy_api_key", "API" ] - self._check_exit_code(training_init_command, exit_code=0) + self._check_exit_code(training_init_command, exit_code=-1) shutil.rmtree("topics") diff --git a/tests/test_cmd_training_init.py b/tests/test_cmd_training_init.py index 2f04991d6..71bb17f27 100644 --- a/tests/test_cmd_training_init.py +++ b/tests/test_cmd_training_init.py @@ -3,7 +3,6 @@ from .test_utils import ( CliTestCase, - PROJECT_TEMPLATES_DIR, TEST_DATA_DIR ) @@ -18,28 +17,15 @@ def test_training_init_command_by_default(self): "training_init", "--topic_name", "test" ] - self._check_exit_code(training_init_command, exit_code=-1) - - def test_training_init_command_templates(self): - """Test training_init command with template path.""" - with self._isolate(): - training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") - training_init_command = [ - "training_init", - "--topic_name", "test", - "--templates", training_template - ] self._check_exit_code(training_init_command, exit_code=0) def test_training_init_command_topic(self): """Test training_init command to create new topic.""" with self._isolate(): - training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") # working test training_init_command = [ "training_init", "--topic_name", "test", - "--templates", training_template, "--topic_title", "Topic title", "--topic_target", "use", "--topic_summary", "Summary" @@ -49,7 +35,6 @@ def test_training_init_command_topic(self): training_init_command = [ "training_init", "--topic_name", "test", - "--templates", training_template, "--topic_title", "Topic title", "--topic_target", "test", "--topic_summary", "Summary" @@ -59,25 +44,21 @@ def test_training_init_command_topic(self): def test_training_init_command_tutorial_no_topic(self): """Test training_init command with tutorial but no topic.""" with self._isolate(): - training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") # working test training_init_command = [ "training_init", - "--tutorial_name", "test", - "--templates", training_template, + "--tutorial_name", "test" ] self._check_exit_code(training_init_command, exit_code=2) def test_training_init_command_tutorial(self): """Test training_init command to create new tutorial.""" with self._isolate(): - training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") # working test training_init_command = [ "training_init", "--topic_name", "test", "--tutorial_name", "test", - "--templates", training_template, "--tutorial_title", "Title of the tutorial", "--hands_on", "--slides" @@ -87,15 +68,13 @@ def test_training_init_command_tutorial(self): def test_training_init_command_tutorial_zenodo(self): """Test training_init command to create new tutorial with zenodo.""" with self._isolate(): - training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") datatype = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml") # not working test training_init_command = [ "training_init", "--topic_name", "test", "--tutorial_name", "test", - "--zenodo", "https://zenodo.org/record/1321885", - "--templates", training_template + "--zenodo_link", "https://zenodo.org/record/1321885" ] self._check_exit_code(training_init_command, exit_code=-1) # working @@ -103,38 +82,33 @@ def test_training_init_command_tutorial_zenodo(self): "training_init", "--topic_name", "test", "--tutorial_name", "test", - "--zenodo", "https://zenodo.org/record/1321885", - "--datatypes", datatype, - "--templates", training_template + "--zenodo_link", "https://zenodo.org/record/1321885", + "--datatypes", datatype ] self._check_exit_code(training_init_command, exit_code=0) def test_training_init_command_tutorial_local_wf(self): """Test training_init command to create new tutorial with local workflow.""" with self._isolate(): - training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") test_workflow = os.path.join(TEST_DATA_DIR, "test_workflow_1.ga") # working test training_init_command = [ "training_init", "--topic_name", "test", "--tutorial_name", "test", - "--workflow", test_workflow, - "--templates", training_template + "--workflow", test_workflow ] - self._check_exit_code(training_init_command, exit_code=-1) + self._check_exit_code(training_init_command, exit_code=0) def test_training_init_command_tutorial_remote_wf(self): """Test training_init command to create new tutorial with workflow on running instance.""" with self._isolate(): - training_template = os.path.join(PROJECT_TEMPLATES_DIR, "training") # not working test training_init_command = [ "training_init", "--topic_name", "test", "--tutorial_name", "test", - "--workflow_id", "ID", - "--templates", training_template + "--workflow_id", "ID" ] self._check_exit_code(training_init_command, exit_code=-1) # working test @@ -144,7 +118,6 @@ def test_training_init_command_tutorial_remote_wf(self): "--tutorial_name", "test", "--workflow_id", "ID", "--galaxy_url", "https://usegalaxy.eu/", - "--galaxy_api_key", "API", - "--templates", training_template + "--galaxy_api_key", "API" ] - self._check_exit_code(training_init_command, exit_code=-1) + self._check_exit_code(training_init_command, exit_code=0) diff --git a/tests/test_training.py b/tests/test_training.py index f2afedb41..e52763237 100644 --- a/tests/test_training.py +++ b/tests/test_training.py @@ -1,5 +1,4 @@ """Training training functions.""" - import json import os import shutil @@ -7,942 +6,234 @@ from nose.tools import assert_raises_regexp from planemo import cli -from planemo import training -from planemo.engine import ( - engine_context, - is_galaxy_engine, -) from planemo.runnable import for_path -from .test_utils import ( - TEST_DATA_DIR -) +from planemo.training import Training +from .test_utils import TEST_DATA_DIR -METADATA_FP = os.path.join(TEST_DATA_DIR, "training_metadata_w_zenodo.yaml") -DATATYPE_FP = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml") -ZENODO_LINK = 'https://zenodo.org/record/1321885' +datatype_fp = os.path.join(TEST_DATA_DIR, "training_datatypes.yaml") +tuto_fp = os.path.join(TEST_DATA_DIR, "training_tutorial.md") +tuto_wo_zenodo_fp = os.path.join(TEST_DATA_DIR, "training_tutorial_wo_zenodo.md") +zenodo_link = 'https://zenodo.org/record/1321885' +# load a workflow generated from Galaxy WF_FP = os.path.join(TEST_DATA_DIR, "training_workflow.ga") +with open(WF_FP, "r") as wf_f: + wf = json.load(wf_f) +# load wf_param_values (output of tutorial.get_wf_param_values on wf['steps']['4']) +with open(os.path.join(TEST_DATA_DIR, "training_wf_param_values.json"), "r") as wf_param_values_f: + wf_param_values = json.load(wf_param_values_f) +# configuration RUNNABLE = for_path(WF_FP) CTX = cli.Context() CTX.planemo_directory = "/tmp/planemo-test-workspace" - - -def prepare_test(): - """Prepare kwds, topic_dir and tuto_dir.""" - # clean before - if os.path.exists("topics"): - shutil.rmtree("topics") - if os.path.exists("metadata"): - shutil.rmtree("metadata") - # get info - topic_name = 'my_new_topic' - topic_dir = os.path.join("topics", topic_name) - tuto_name = "new_tuto" - tuto_dir = os.path.join(topic_dir, "tutorials", tuto_name) - kwds = { - 'topic_name': topic_name, - 'topic_title': "New topic", - 'topic_target': "use", - 'topic_summary': "Topic summary", - 'tutorial_name': tuto_name, - 'tutorial_title': "Title of tuto", - 'hands_on': True, - 'slides': True, - 'workflow': None, - 'workflow_id': None, - 'zenodo_link': None, - 'datatypes': DATATYPE_FP, - 'templates': None, - # planemo configuation - 'conda_auto_init': True, - 'conda_auto_install': True, - 'conda_copy_dependencies': False, - 'conda_debug': False, - 'conda_dependency_resolution': False, - 'conda_ensure_channels': 'iuc,bioconda,conda-forge,defaults', - 'conda_exec': None, - 'conda_prefix': None, - 'conda_use_local': False, - 'brew_dependency_resolution': False, - 'daemon': False, - 'database_connection': None, - 'database_type': 'auto', - 'dependency_resolvers_config_file': None, - 'docker': False, - 'docker_cmd': 'docker', - 'docker_extra_volume': None, - 'docker_galaxy_image': 'quay.io/bgruening/galaxy', - 'docker_host': None, - 'docker_sudo': False, - 'docker_sudo_cmd': 'sudo', - 'engine': 'galaxy', - 'extra_tools': (), - 'file_path': None, - 'galaxy_api_key': None, - 'galaxy_branch': None, - 'galaxy_database_seed': None, - 'galaxy_email': 'planemo@galaxyproject.org', - 'galaxy_root': None, - 'galaxy_single_user': True, - 'galaxy_source': None, - 'galaxy_url': None, - 'host': '127.0.0.1', - 'ignore_dependency_problems': False, - 'install_galaxy': False, - 'job_config_file': None, - 'mulled_containers': False, - 'no_cleanup': False, - 'no_cache_galaxy': False, - 'no_dependency_resolution': True, - 'non_strict_cwl': False, - 'pid_file': None, - 'port': '9090', - 'postgres_database_host': None, - 'postgres_database_port': None, - 'postgres_database_user': 'postgres', - 'postgres_psql_path': 'psql', - 'profile': None, - 'shed_dependency_resolution': False, - 'shed_install': True, - 'shed_tool_conf': None, - 'shed_tool_path': None, - 'skip_venv': False, - 'test_data': None, - 'tool_data_table': None, - 'tool_dependency_dir': None - } - return (kwds, topic_dir, tuto_dir) - - -def test_load_yaml(): - """Test :func:`planemo.training.load_yaml`.""" - metadata = training.load_yaml(METADATA_FP) - # test if name there - assert metadata["name"] == "test" - # test if order of material is conserved - assert metadata["material"][1]["name"] == "test" - - -def test_save_to_yaml(): - """Test :func:`planemo.training.save_to_yaml`.""" - metadata = training.load_yaml(METADATA_FP) - new_metadata_fp = "metadata.yaml" - training.save_to_yaml(metadata, new_metadata_fp) - assert os.path.exists(new_metadata_fp) - assert 'material' in open(new_metadata_fp, 'r').read() - os.remove(new_metadata_fp) - - -def test_create_topic(): - """Test :func:`planemo.training.create_topic`.""" - kwds, topic_dir, tuto_dir = prepare_test() - topic_name = kwds['topic_name'] - topic_title = kwds['topic_title'] - training.create_topic(kwds, topic_dir) - # check if files has been created and updated with topic name - index_fp = os.path.join(topic_dir, "index.md") - assert os.path.exists(index_fp) - assert topic_name in open(index_fp, 'r').read() - readme_fp = os.path.join(topic_dir, "README.md") - assert os.path.exists(readme_fp) - assert topic_title in open(readme_fp, 'r').read() - # check metadata content - metadata = training.load_yaml(os.path.join(topic_dir, "metadata.yaml")) - assert metadata['name'] == topic_name - # check in metadata directory - assert os.path.exists(os.path.join("metadata", "%s.yaml" % topic_name)) - # check dockerfile - docker_folder = os.path.join(topic_dir, "docker") - dockerfile_fp = os.path.join(docker_folder, "Dockerfile") - assert os.path.exists(dockerfile_fp) - assert topic_name in open(dockerfile_fp, 'r').read() - assert topic_title in open(dockerfile_fp, 'r').read() - # check introduction slide - slides_folder = os.path.join(topic_dir, "slides") - intro_slide_fp = os.path.join(slides_folder, "introduction.html") - assert os.path.exists(intro_slide_fp) - assert topic_title in open(intro_slide_fp, 'r').read() - # clean - shutil.rmtree("topics") - shutil.rmtree("metadata") - - -def test_get_zenodo_record(): - """Test :func:`planemo.training.get_zenodo_record`.""" - z_record, req_res = training.get_zenodo_record(ZENODO_LINK) - file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" - assert z_record == "1321885" - assert 'files' in req_res - assert req_res['files'][0]['type'] in ['rdata', 'csv'] - assert file_link_prefix in req_res['files'][0]['links']['self'] - # check with wrong zenodo link - z_record, req_res = training.get_zenodo_record('https://zenodo.org/api/records/zenodooo') - assert z_record is None - assert 'files' in req_res - assert len(req_res['files']) == 0 - - -def test_get_zenodo_record_with_doi(): - """Test :func:`planemo.training.get_zenodo_record`: link with DOI.""" - z_link = 'https://doi.org/10.5281/zenodo.1321885' - z_record, req_res = training.get_zenodo_record(z_link) - file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" - assert z_record == "1321885" - assert 'files' in req_res - assert req_res['files'][0]['type'] in ['rdata', 'csv'] - assert file_link_prefix in req_res['files'][0]['links']['self'] - - -def test_get_galaxy_datatype(): - """Test :func:`planemo.training.get_galaxy_datatype`.""" - assert training.get_galaxy_datatype("csv", DATATYPE_FP) == "csv" - assert training.get_galaxy_datatype("test", DATATYPE_FP) == "strange_datatype" - assert "# Please add" in training.get_galaxy_datatype("unknown", DATATYPE_FP) - - -def test_get_files_from_zenodo(): - """Test :func:`planemo.training.get_files_from_zenodo`.""" - files, links, z_record = training.get_files_from_zenodo(ZENODO_LINK, DATATYPE_FP) - assert z_record == "1321885" - # test links - file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" - assert file_link_prefix in links[0] - # test files dict - assert file_link_prefix in files[0]['url'] - assert files[0]['src'] == 'url' - assert files[0]['info'] == ZENODO_LINK - assert "# Please add" in files[0]['ext'] - assert files[1]['ext'] == 'csv' - - -def test_init_data_lib(): - """Test :func:`planemo.training.init_data_lib`.""" - data_lib_filepath = 'data-library.yaml' - datalib = training.init_data_lib(data_lib_filepath) - assert datalib['destination']['name'] == 'GTN - Material' - - -def test_prepare_data_library(): - """Test :func:`planemo.training.prepare_data_library`.""" - kwds, topic_dir, tuto_dir = prepare_test() - os.makedirs(tuto_dir) - files, links, z_record = training.get_files_from_zenodo(ZENODO_LINK, DATATYPE_FP) - datalib_fp = os.path.join(tuto_dir, "data-library.yaml") - # test default prepare_data_library - training.prepare_data_library(files, kwds, z_record, tuto_dir) - assert os.path.exists(datalib_fp) - datalib = training.load_yaml(datalib_fp) - assert datalib['items'][0]['name'] == kwds['topic_title'] - assert datalib['items'][0]['items'][0]['name'] == kwds['tutorial_title'] - assert datalib['items'][0]['items'][0]['items'][0]['name'] == "DOI: 10.5281/zenodo.%s" % z_record - assert datalib['items'][0]['items'][0]['items'][0]['description'] == "latest" - assert datalib['items'][0]['items'][0]['items'][0]['items'][0]['url'] == files[0]['url'] - # test adding a new collection for same tutorial - new_z_record = '124' - training.prepare_data_library(files, kwds, new_z_record, tuto_dir) - datalib = training.load_yaml(datalib_fp) - assert datalib['items'][0]['items'][0]['items'][0]['name'] == "DOI: 10.5281/zenodo.%s" % new_z_record - assert datalib['items'][0]['items'][0]['items'][0]['description'] == "latest" - assert datalib['items'][0]['items'][0]['items'][1]['name'] == "DOI: 10.5281/zenodo.%s" % z_record - assert datalib['items'][0]['items'][0]['items'][1]['description'] == "" - # test adding a new tutorial - new_tuto_title = "New title" - kwds['tutorial_title'] = new_tuto_title - training.prepare_data_library(files, kwds, z_record, tuto_dir) - datalib = training.load_yaml(datalib_fp) - assert datalib['items'][0]['items'][1]['name'] == new_tuto_title - assert datalib['items'][0]['items'][1]['items'][0]['name'] == "DOI: 10.5281/zenodo.%s" % z_record - # test adding a new topic - new_topic_title = "New title" - kwds['topic_title'] = new_topic_title - training.prepare_data_library(files, kwds, z_record, tuto_dir) - datalib = training.load_yaml(datalib_fp) - assert datalib['items'][1]['name'] == new_topic_title - assert datalib['items'][1]['items'][0]['name'] == new_tuto_title - assert datalib['items'][1]['items'][0]['items'][0]['name'] == "DOI: 10.5281/zenodo.%s" % z_record - # clean - shutil.rmtree("topics") -# - -def test_prepare_data_library_from_zenodo(): - """Test :func:`planemo.training.prepare_data_library_from_zenodo`.""" - kwds, topic_dir, tuto_dir = prepare_test() - os.makedirs(tuto_dir) - datalib_fp = os.path.join(tuto_dir, "data-library.yaml") - # test prepare_data_library_from_zenodo with no zenodo - links = training.prepare_data_library_from_zenodo(kwds, tuto_dir) - assert len(links) == 0 - assert not os.path.exists(datalib_fp) - # test prepare_data_library_from_zenodo with a zenodo link - kwds['zenodo_link'] = ZENODO_LINK - links = training.prepare_data_library_from_zenodo(kwds, tuto_dir) - file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" - assert file_link_prefix in links[0] - assert os.path.exists(datalib_fp) - # clean - shutil.rmtree("topics") - - -def test_get_tool_input(): - """Test :func:`planemo.training.get_tool_input`.""" - tool_desc = { - 'inputs': [ - {'name': "name1", 'content': 'c'}, - {'name': "name2", 'content': 'c'} - ] - } - tool_inp = training.get_tool_input(tool_desc) - assert "name1" in tool_inp - assert 'content' in tool_inp["name1"] - assert tool_inp["name1"]['content'] == 'c' - - -def check_tools(tools): - """Test the tool return from get_wf_tool_description.""" - assert 'FastQC' in tools - assert 'input_file' in tools['FastQC'] - - -def test_get_wf_tool_description(): - """Test :func:`planemo.training.get_wf_tool_description`.""" - kwds, topic_dir, tuto_dir = prepare_test() - assert is_galaxy_engine(**kwds) - with engine_context(CTX, **kwds) as galaxy_engine: - with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: - workflow_id = config.workflow_id(WF_FP) - wf = config.gi.workflows.export_workflow_dict(workflow_id) - wf['steps']['10'] = { - 'input_connections': [], - 'tool_id': 'no_input', - 'name': 'with_no_input' - } - wf['steps']['11'] = { - 'input_connections': [1], - 'tool_id': 'no_tool', - 'name': 'with_no_tool' - } - tools = training.get_wf_tool_description(wf, config.gi) - check_tools(tools) - assert 'with_no_input' not in tools - assert 'with_no_tool' in tools - - -def check_workflow(wf): - """Test the worflow return.""" - assert 'steps' in wf - assert '1' in wf['steps'] - assert 'name' in wf['steps']['1'] - - -def test_get_wf_tool_from_local_galaxy(): - """Test :func:`planemo.training.get_wf_tool_from_local_galaxy`.""" - kwds, topic_dir, tuto_dir = prepare_test() - wf, tools = training.get_wf_tool_from_local_galaxy(kwds, WF_FP, CTX) - check_tools(tools) - check_workflow(wf) - - -def test_get_wf_tools_from_running_galaxy(): - """Test :func:`planemo.training.get_wf_tools_from_running_galaxy`.""" - kwds, topic_dir, tuto_dir = prepare_test() - assert is_galaxy_engine(**kwds) - kwds['galaxy_url'] = 'http://%s:%s' % (kwds['host'], kwds['port']) - with engine_context(CTX, **kwds) as galaxy_engine: - with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: - workflow_id = config.workflow_id(WF_FP) - kwds['workflow_id'] = workflow_id - kwds['galaxy_api_key'] = config.user_api_key - wf = config.gi.workflows.export_workflow_dict(workflow_id) - wf, tools = training.get_wf_tools_from_running_galaxy(kwds) - check_tools(tools) - check_workflow(wf) - - -def test_get_input_tool_name(): - """Test :func:`planemo.training.get_input_tool_name`.""" - steps = {'1': {'name': 'Input dataset'}} - # test if step not found - tool_name = training.get_input_tool_name(2, steps) - assert tool_name == '' - # test if tool is input - assert training.get_input_tool_name(1, steps) == '(Input dataset)' - # test if tool is input - steps['1']['name'] = 'Input dataset collection' - assert training.get_input_tool_name(1, steps) == '(Input dataset collection)' - # test if other case - steps['1']['name'] = 'Tool name' - assert training.get_input_tool_name(1, steps) == '(output of **Tool name** {% icon tool %})' - - -def get_wf_a_tools(): - """Get workflow and tool of a workflow.""" - kwds, topic_dir, tuto_dir = prepare_test() - assert is_galaxy_engine(**kwds) - with engine_context(CTX, **kwds) as galaxy_engine: - with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: - workflow_id = config.workflow_id(WF_FP) - wf = config.gi.workflows.export_workflow_dict(workflow_id) - tools = training.get_wf_tool_description(wf, config.gi) - return (wf, tools) - - -def test_format_inputs(): - """Test :func:`planemo.training.format_inputs`.""" - wf, tools = get_wf_a_tools() - step = wf['steps']['3'] - step_inputs = step['input_connections'] - tool = tools[step['name']] - inputlist = training.format_inputs(step_inputs, tool['input_file'], wf['steps'], 1) - assert 'param-collection ' in inputlist - assert 'Input dataset collection' in inputlist - inputlist = training.format_inputs(step_inputs, tool['contaminants'], wf['steps'], 1) - assert 'param-file ' in inputlist - - -def test_get_wf_step_inputs(): - """Test :func:`planemo.training.get_wf_step_inputs`.""" - step_inp = { - 'tables_1|table': {'output_name': 'output', 'id': 2}, - 'add_to_database|withdb': {'output_name': 'output', 'id': 0}, - 'tables_0|table': {'output_name': 'output', 'id': 1}, - 'add_to_database|tab_0|tt': {'output_name': 'output', 'id': 0}, - 'tables_2|section|sect': {'output_name': 'output', 'id': 1}, - 'tables_3|tables_0|sect': {'output_name': 'output', 'id': 1} - } - step_inputs = training.get_wf_step_inputs(step_inp) - assert 'tables' in step_inputs - assert '0' in step_inputs['tables'] - assert 'table' in step_inputs['tables']['0'] - assert '2' in step_inputs['tables'] - assert 'section' in step_inputs['tables']['2'] - assert 'sect' in step_inputs['tables']['2']['section'] - assert 'output_name' in step_inputs['tables']['2']['section']['sect'] - assert 'add_to_database' in step_inputs - assert 'withdb' in step_inputs['add_to_database'] - assert 'tab' in step_inputs['add_to_database'] - assert '0' in step_inputs['add_to_database']['tab'] - assert 'tt' in step_inputs['add_to_database']['tab']['0'] - - -def test_json_load(): - """Test :func:`planemo.training.json_load`.""" - assert isinstance(training.json_load('{"name": "1"}'), dict) - assert isinstance(training.json_load("name"), str) - - -def test_get_lower_params(): - """Test :func:`planemo.training.get_lower_params`.""" - step_params = {'name': '1'} - assert 'name' in training.get_lower_params(step_params, 'n1') - assert training.get_lower_params(step_params, 'name') == '1' - assert 'name' not in training.get_lower_params('{"name": "1"}', 'name') - assert 'name' in training.get_lower_params('name:1', 'name') - - -def test_get_lower_inputs(): - """Test :func:`planemo.training.get_lower_inputs`.""" - step_inputs = {'name': '1'} - assert 'name' in training.get_lower_inputs(step_inputs, 'n1') - assert training.get_lower_inputs(step_inputs, 'name') == '1' - - -def test_format_section_param_desc(): - """Test :func:`planemo.training.format_section_param_desc`.""" - wf, tools = get_wf_a_tools() - step = wf['steps']['4'] - step_inputs = training.get_wf_step_inputs(step['input_connections']) - step_params = training.get_lower_params(step, 'tool_state') - tp_desc = tools[step['name']]['add_to_database'] - section_paramlist = training.format_section_param_desc( - step_params, - step_inputs, - tp_desc, - 0, - wf['steps']) - assert 'In *"Add tables to an existing database"*' in section_paramlist - assert 'icon param-collection' in section_paramlist - assert 'Input dataset collection' in section_paramlist - - -def test_format_conditional_param_desc(): - """Test :func:`planemo.training.format_conditional_param_desc`.""" - wf, tools = get_wf_a_tools() - step = wf['steps']['4'] - step_inputs = training.get_wf_step_inputs(step['input_connections']) - step_params = training.get_lower_params(step, 'tool_state') - tp_desc = tools[step['name']]['query_result'] - conditional_paramlist = training.format_conditional_param_desc( - step_params, - step_inputs, - tp_desc, - 0, - wf['steps']) - print(step_params) - print(tp_desc) - print(conditional_paramlist) - assert 'column headers' in conditional_paramlist - assert '`Yes`' in conditional_paramlist - assert 'column_header line' in conditional_paramlist - - -def test_format_repeat_param_desc(): - """Test :func:`planemo.training.format_repeat_param_desc`.""" - wf, tools = get_wf_a_tools() - step = wf['steps']['4'] - tp_desc = tools[step['name']]['tables'] - step_inputs = training.get_wf_step_inputs(step['input_connections']) - step_params = training.get_lower_params(step, 'tool_state') - repeat_paramlist = training.format_repeat_param_desc( - step_params, - step_inputs, - tp_desc, - 0, - wf['steps']) - assert 'Click on *"Insert Database Table"*' in repeat_paramlist - assert 'In *"1: Database Table"*' in repeat_paramlist - assert 'In *"1: Database Table"*' in repeat_paramlist - assert 'Click on *"Insert Filter Tabular Input Lines"*' in repeat_paramlist - assert 'In *"1: Filter Tabular Input Lines"*:' in repeat_paramlist - assert 'In *"2: Database Table"*:' in repeat_paramlist - - -def test_get_param_value(): - """Test :func:`planemo.training.get_param_value`.""" - # test same value - tp_desc = {'type': 'boolean', 'value': 'same'} - assert training.get_param_value('same', tp_desc) is None - # test boolean - tp_desc = {'type': 'boolean', 'value': 'True'} - assert training.get_param_value(True, tp_desc) is None - assert training.get_param_value(False, tp_desc) == 'No' - # test select - tp_desc = {'type': 'select', 'options': [['opt1', 'val1'], ['opt2', 'val2']], 'value': ''} - assert training.get_param_value('val1', tp_desc) == 'opt1' - # test select with extra quotes - tp_desc = {'type': 'select', 'options': [['opt1', 'val1'], ['opt2', 'val2']], 'value': ''} - assert training.get_param_value('"val1"', tp_desc) == 'opt1' - # test data_column - tp_desc = {'type': 'data_column', 'value': ''} - assert training.get_param_value('1', tp_desc) == 'c1' - # test integer - tp_desc = {'type': 'integer', 'value': ''} - assert training.get_param_value('1', tp_desc) == '1' - - -def test_format_param_desc(): - """Test :func:`planemo.training.format_param_desc`.""" - wf, tools = get_wf_a_tools() - step = wf['steps']['4'] - step_inputs = training.get_wf_step_inputs(step['input_connections']) - step_params = training.get_lower_params(step, 'tool_state') - # test section (add_to_database) - n = 'add_to_database' - tp_desc = tools[step['name']][n] - step_param = training.get_lower_params(step_params, n) - paramlist = training.format_param_desc( - step_param, - step_inputs, - tp_desc, - 0, - wf['steps'], - force_default=False) - assert 'In *"Add tables to an existing database"*' in paramlist - # test repeat (tables) - n = 'tables' - tp_desc = tools[step['name']][n] - step_param = training.get_lower_params(step_params, n) - paramlist = training.format_param_desc( - step_param, - step_inputs, - tp_desc, - 0, - wf['steps'], - force_default=False) - assert 'In *"1: Filter Tabular Input Lines"*:' in paramlist - # test boolean (save_db) - n = 'save_db' - tp_desc = tools[step['name']][n] - step_param = 'true' - paramlist = training.format_param_desc( - step_param, - step_inputs, - tp_desc, - 0, - wf['steps'], - force_default=False) - assert '`Yes`' in paramlist - # test conditional (query_result) - n = 'query_result' - tp_desc = tools[step['name']][n] - step_param = training.get_lower_params(step_params, n) - paramlist = training.format_param_desc( - step_param, - step_inputs, - tp_desc, - 0, - wf['steps'], - force_default=False) - assert 'Prefix character' in paramlist - # no type - exp_exception = "No type for the paramater name" - with assert_raises_regexp(Exception, exp_exception): - training.format_param_desc( - step_params, - step_inputs, - {'name': 'name'}, - 0, - wf['steps'], - force_default=False) - - -def test_get_param_desc(): - """Test :func:`planemo.training.get_param_desc`.""" - wf, tools = get_wf_a_tools() - step_3 = wf['steps']['3'] - step_inputs = training.get_wf_step_inputs(step_3['input_connections']) - step_params = training.get_lower_params(step_3, 'tool_state') - # not in workflow and should be there - step_4 = wf['steps']['4'] - tp_desc = tools[step_4['name']] - exp_exception = "workdb not in workflow" - with assert_raises_regexp(Exception, exp_exception): - training.get_param_desc( - step_params, - step_inputs, - tp_desc, - 0, - wf['steps'], - should_be_there=True) - # not in workflow - step_4 = wf['steps']['4'] - tp_desc = tools[step_4['name']] - paramlist = training.get_param_desc( - step_params, - step_inputs, - tp_desc, - 0, - wf['steps']) - assert paramlist == '' - # correct one - tp_desc = tools[step_3['name']] - paramlist = training.get_param_desc( - step_params, - step_inputs, - tp_desc, - 0, - wf['steps']) - assert 'param-collection' in paramlist - assert 'param-file' in paramlist - - -def test_get_handson_box(): - """Test :func:`planemo.training.get_handson_box`.""" - wf, tools = get_wf_a_tools() - # test normal step - hand_boxes = training.get_handson_box(wf['steps']['3'], wf['steps'], tools) - assert '### {% icon hands_on %}' in hand_boxes - assert '{% icon tool %} with the following parameters:' in hand_boxes - assert ': .hands_on' in hand_boxes - # test input step - hand_boxes = training.get_handson_box(wf['steps']['1'], wf['steps'], tools) - assert hand_boxes == '' - - -def test_init_tuto_metadata(): - """Test :func:`planemo.training.init_tuto_metadata`.""" - kwds, topic_dir, tuto_dir = prepare_test() - metadata = training.init_tuto_metadata(kwds) - assert metadata['title'] == kwds["tutorial_title"] - assert "contributor1" in metadata['contributors'] -# - -def test_get_tuto_body(): - """Test :func:`planemo.training.get_tuto_body`.""" - z_file_links = ['URL1', 'URL2'] - # with body - body = 'the body' - init_body = training.get_tuto_body(z_file_links, body) - assert "General introduction about the topic" in init_body - assert body in init_body - assert "> URL1" in init_body - # without body - init_body = training.get_tuto_body(z_file_links) - assert "Sub-step with **My Tool**" in init_body - assert "> 1. **My Tool** {% icon tool %} with the following parameters:" in init_body - assert '> - {% icon param-file %} *"Input file"*: File' in init_body - assert '> - *"Parameter"*: `a value`' in init_body - - -def test_write_hands_on_tutorial(): - """Test :func:`planemo.training.write_hands_on_tutorial`.""" - kwds, topic_dir, tuto_dir = prepare_test() - os.makedirs(tuto_dir) - metadata = training.init_tuto_metadata(kwds) - body = '' - training.write_hands_on_tutorial(metadata, body, tuto_dir) - - tuto_fp = os.path.join(tuto_dir, "tutorial.md") - assert os.path.exists(tuto_fp) - with open(tuto_fp, "r") as tuto_f: - tuto_content = tuto_f.read() - assert "layout: tutorial_hands_on" in tuto_content - assert kwds["tutorial_title"] in tuto_content - - -def test_create_hands_on_tutorial_from_workflow(): - """Test :func:`planemo.training.create_hands_on_tutorial_from_workflow`.""" - kwds, topic_dir, tuto_dir = prepare_test() - os.makedirs(tuto_dir) - assert is_galaxy_engine(**kwds) - kwds['galaxy_url'] = 'http://%s:%s' % (kwds['host'], kwds['port']) - with engine_context(CTX, **kwds) as galaxy_engine: - with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: - workflow_id = config.workflow_id(WF_FP) - kwds['workflow_id'] = workflow_id - kwds['galaxy_api_key'] = config.user_api_key - training.create_hands_on_tutorial_from_workflow(kwds, '', tuto_dir, CTX) - # tests - tuto_path = os.path.join(tuto_dir, "tutorial.md") - assert os.path.exists(tuto_path) - with open(tuto_path, 'r') as tuto: - tuto_content = tuto.read() - assert kwds["tutorial_title"] in tuto_content - assert '> ### Agenda' in tuto_content - assert '## Get data' in tuto_content - assert '{% icon tool %} with the following parameters:' in tuto_content - assert 'no_toc' in tuto_content - assert '# Conclusion' in tuto_content - # clean after - shutil.rmtree(topic_dir) - - -def test_add_workflow_file(): - """Test :func:`planemo.training.add_workflow_file`.""" - kwds, topic_dir, tuto_dir = prepare_test() - wf_dir = os.path.join(tuto_dir, "workflows") - os.makedirs(wf_dir) - wf_path = os.path.join(wf_dir, "init_workflow.ga") - # test with workflow on a running instance - assert is_galaxy_engine(**kwds) - kwds['galaxy_url'] = 'http://%s:%s' % (kwds['host'], kwds['port']) - with engine_context(CTX, **kwds) as galaxy_engine: - with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: - workflow_id = config.workflow_id(WF_FP) - kwds['workflow_id'] = workflow_id - kwds['galaxy_api_key'] = config.user_api_key - training.add_workflow_file(kwds, tuto_dir) - assert os.path.exists(wf_path) - os.remove(wf_path) - # test with local workflow - kwds["workflow"] = WF_FP - training.add_workflow_file(kwds, tuto_dir) - assert os.path.exists(wf_path) - # clean after - shutil.rmtree(topic_dir) - - -def test_create_tutorial(): - """Test :func:`planemo.training.create_tutorial`.""" - kwds, topic_dir, tuto_dir = prepare_test() - tuto_fp = os.path.join(tuto_dir, 'tutorial.md') - slide_fp = os.path.join(tuto_dir, 'slides.html') - data_library_fp = os.path.join(tuto_dir, 'data-library.yaml') - tour_folder = os.path.join(tuto_dir, "tours") - workflow_folder = os.path.join(tuto_dir, "workflows") - # wo zenodo and wo workflow - kwds["workflow"] = None - kwds["workflow_id"] = None - kwds["zenodo_link"] = None - kwds["slides"] = False - training.create_topic(kwds, topic_dir) - training.create_tutorial(kwds, tuto_dir, CTX) - assert os.path.exists(tuto_fp) - assert os.path.exists(tour_folder) - assert os.path.exists(workflow_folder) - assert not os.path.exists(data_library_fp) - with open(tuto_fp, 'r') as tuto_f: - tuto_content = tuto_f.read() - assert kwds["tutorial_title"] in tuto_content - assert "zenodo_link: ''" in tuto_content - assert '**My Tool** {% icon tool %}' in tuto_content - shutil.rmtree("topics") - shutil.rmtree("metadata") - # w zenodo and wo workflow - kwds["workflow"] = None - kwds["workflow_id"] = None - kwds["zenodo_link"] = ZENODO_LINK - kwds["slides"] = False - training.create_topic(kwds, topic_dir) - training.create_tutorial(kwds, tuto_dir, CTX) - with open(tuto_fp, 'r') as tuto_f: - tuto_content = tuto_f.read() - assert kwds["tutorial_title"] in tuto_content - assert 'zenodo_link: %s' % ZENODO_LINK in tuto_content - assert '**My Tool** {% icon tool %}' in tuto_content - assert os.path.exists(data_library_fp) - assert 'DOI: 10.5281/zenodo.1321885' in open(data_library_fp, 'r').read() - shutil.rmtree("topics") - shutil.rmtree("metadata") - # w zenodo and w workflow - kwds["workflow"] = WF_FP - kwds["workflow_id"] = None - kwds["zenodo_link"] = ZENODO_LINK - kwds["slides"] = False - training.create_topic(kwds, topic_dir) - training.create_tutorial(kwds, tuto_dir, CTX) - with open(tuto_fp, 'r') as tuto_f: - tuto_content = tuto_f.read() - assert kwds["tutorial_title"] in tuto_content - assert 'zenodo_link: %s' % ZENODO_LINK in tuto_content - assert '**FastQC** {% icon tool %} with the following parameters:' in tuto_content - assert 'DOI: 10.5281/zenodo.1321885' in open(data_library_fp, 'r').read() - assert os.path.exists(os.path.join(tuto_dir, 'workflows', 'init_workflow.ga')) - shutil.rmtree("topics") - shutil.rmtree("metadata") - # w slides - kwds["hands_on"] = False - kwds["workflow"] = None - kwds["workflow_id"] = None - kwds["zenodo_link"] = None - kwds["slides"] = True - training.create_topic(kwds, topic_dir) - training.create_tutorial(kwds, tuto_dir, CTX) - assert not os.path.exists(tuto_fp) - assert os.path.exists(slide_fp) - with open(slide_fp, 'r') as slide_f: - slide_content = slide_f.read() - assert kwds["tutorial_title"] in slide_content - assert 'layout: tutorial_slides' in slide_content - shutil.rmtree("topics") - shutil.rmtree("metadata") - - -def test_init(): - """Test :func:`planemo.training.init`.""" - kwds, topic_dir, tuto_dir = prepare_test() - metadata_fp = os.path.join(topic_dir, 'metadata.yaml') - tuto_fp = os.path.join(tuto_dir, "tutorial.md") - tuto_name = kwds['tutorial_name'] - # new topic - kwds['tutorial_name'] = None - kwds['slides'] = False - kwds['workflow'] = None - kwds['zenodo_link'] = None - kwds['workflow_id'] = None - training.init(CTX, kwds) - assert os.path.exists(metadata_fp) - assert not os.path.exists(tuto_fp) +KWDS = { + 'topic_name': 'my_new_topic', + 'topic_title': "New topic", + 'topic_target': "use", + 'topic_summary': "Topic summary", + 'tutorial_name': "new_tuto", + 'tutorial_title': "Title of tuto", + 'hands_on': True, + 'slides': True, + 'workflow': None, + 'workflow_id': None, + 'zenodo_link': None, + 'datatypes': os.path.join(TEST_DATA_DIR, "training_datatypes.yaml"), + 'templates': None, + # planemo configuation + 'conda_auto_init': True, + 'conda_auto_install': True, + 'conda_copy_dependencies': False, + 'conda_debug': False, + 'conda_dependency_resolution': False, + 'conda_ensure_channels': 'iuc,bioconda,conda-forge,defaults', + 'conda_exec': None, + 'conda_prefix': None, + 'conda_use_local': False, + 'brew_dependency_resolution': False, + 'daemon': False, + 'database_connection': None, + 'database_type': 'auto', + 'dependency_resolvers_config_file': None, + 'docker': False, + 'docker_cmd': 'docker', + 'docker_extra_volume': None, + 'docker_galaxy_image': 'quay.io/bgruening/galaxy', + 'docker_host': None, + 'docker_sudo': False, + 'docker_sudo_cmd': 'sudo', + 'engine': 'galaxy', + 'extra_tools': (), + 'file_path': None, + 'galaxy_api_key': None, + 'galaxy_branch': None, + 'galaxy_database_seed': None, + 'galaxy_email': 'planemo@galaxyproject.org', + 'galaxy_root': None, + 'galaxy_single_user': True, + 'galaxy_source': None, + 'galaxy_url': None, + 'host': '127.0.0.1', + 'ignore_dependency_problems': False, + 'install_galaxy': False, + 'job_config_file': None, + 'mulled_containers': False, + 'no_cleanup': False, + 'no_cache_galaxy': False, + 'no_dependency_resolution': True, + 'non_strict_cwl': False, + 'pid_file': None, + 'port': '9090', + 'postgres_database_host': None, + 'postgres_database_port': None, + 'postgres_database_user': 'postgres', + 'postgres_psql_path': 'psql', + 'profile': None, + 'shed_dependency_resolution': False, + 'shed_install': True, + 'shed_tool_conf': None, + 'shed_tool_path': None, + 'skip_venv': False, + 'test_data': None, + 'tool_data_table': None, + 'tool_dependency_dir': None +} + + +def test_training_init(): + """Test :func:`planemo.training.Training.init`.""" + train = Training(KWDS) + assert train.topics_dir == "topics" + assert train.topic is not None + assert train.tuto is None + + +def test_training_init_training(): + """Test :func:`planemo.training.Training.init_training`.""" + train = Training(KWDS) + # new topic, nothing else + train.kwds['tutorial_name'] = None + train.kwds['slides'] = None + train.kwds['workflow'] = None + train.kwds['workflow_id'] = None + train.kwds['zenodo_link'] = None + train.init_training(CTX) + assert os.path.exists(train.topic.dir) + assert not os.listdir(os.path.join(train.topic.dir, 'tutorials')) # no new topic, no tutorial name but hands-on - kwds['slides'] = True + train.kwds['slides'] = True exp_exception = "A tutorial name is needed to create the skeleton of a tutorial slide deck" with assert_raises_regexp(Exception, exp_exception): - training.init(CTX, kwds) + train.init_training(CTX) # no new topic, no tutorial name but workflow - kwds['workflow'] = WF_FP - kwds['slides'] = False + train.kwds['workflow'] = WF_FP + train.kwds['slides'] = False exp_exception = "A tutorial name is needed to create the skeleton of the tutorial from a workflow" with assert_raises_regexp(Exception, exp_exception): - training.init(CTX, kwds) + train.init_training(CTX) # no new topic, no tutorial name but zenodo - kwds['workflow'] = None - kwds['zenodo_link'] = ZENODO_LINK + train.kwds['workflow'] = None + train.kwds['zenodo_link'] = zenodo_link exp_exception = "A tutorial name is needed to add Zenodo information" with assert_raises_regexp(Exception, exp_exception): - training.init(CTX, kwds) + train.init_training(CTX) # no new topic, new tutorial - kwds['tutorial_name'] = tuto_name - kwds['workflow'] = None - kwds['zenodo_link'] = None - training.init(CTX, kwds) - assert os.path.exists(tuto_fp) - assert kwds['tutorial_title'] in open(tuto_fp, 'r').read() + train.kwds['tutorial_name'] = "new_tuto" + train.kwds['workflow'] = None + train.kwds['zenodo_link'] = None + train.init_training(CTX) + assert os.path.exists(train.tuto.dir) + assert os.path.exists(train.tuto.tuto_fp) + assert train.kwds['tutorial_title'] in open(train.tuto.tuto_fp, 'r').read() # clean after - shutil.rmtree("topics") + shutil.rmtree(train.topics_dir) shutil.rmtree("metadata") -def test_get_tuto_info(): - """Test :func:`planemo.training.get_tuto_info`.""" - metadata, body = training.get_tuto_info(TEST_DATA_DIR) - assert "A tutorial to test" in metadata["title"] - assert "https://zenodo.org" in metadata["zenodo_link"] - assert "What is the purpose of the tutorial?" in metadata["questions"][0] - assert "A learning objective" in metadata["objectives"][0] - assert "Take home message" in metadata["key_points"][0] - assert "the_best_contributor" in metadata["contributors"][0] - assert "# First section" in body +def create_existing_tutorial(exit_tuto_name, tuto_fp, topic): + exist_tuto_dir = os.path.join(topic.dir, 'tutorials', exit_tuto_name) + os.makedirs(exist_tuto_dir) + shutil.copyfile(tuto_fp, os.path.join(exist_tuto_dir, 'tutorial.md')) -def test_check_topic_tuto_exist(): - """Test :func:`planemo.training.check_topic_tuto_exist`.""" - kwds, topic_dir, tuto_dir = prepare_test() +def test_training_check_topic_init_tuto(): + """Test :func:`planemo.training.Training.check_topic_init_tuto`.""" + train = Training(KWDS) # no topic exp_exception = "The topic my_new_topic does not exists. It should be created" with assert_raises_regexp(Exception, exp_exception): - training.check_topic_tuto_exist(kwds) - # no tutorial - training.create_topic(kwds, topic_dir) - exp_exception = "The tutorial new_tuto does not exists. It should be created" - with assert_raises_regexp(Exception, exp_exception): - training.check_topic_tuto_exist(kwds) - # both exist - training.create_tutorial(kwds, tuto_dir, CTX) - n_topic_dir, n_tuto_dir = training.check_topic_tuto_exist(kwds) - assert n_topic_dir == topic_dir - assert n_tuto_dir == tuto_dir + train.check_topic_init_tuto() + # add topic + train.kwds['tutorial_name'] = None + train.kwds['slides'] = None + train.kwds['workflow'] = None + train.kwds['workflow_id'] = None + train.kwds['zenodo_link'] = None + train.init_training(CTX) + train.kwds['tutorial_name'] = 'existing_tutorial' + create_existing_tutorial('existing_tutorial', tuto_fp, train.topic) + train.check_topic_init_tuto() + assert train.tuto.name == train.kwds['tutorial_name'] + assert train.tuto.datatype_fp # clean after - shutil.rmtree("topics") + shutil.rmtree(train.topics_dir) shutil.rmtree("metadata") def test_fill_data_library(): """Test :func:`planemo.training.fill_data_library`.""" - kwds, topic_dir, tuto_dir = prepare_test() - training.init(CTX, kwds) - data_library_fp = os.path.join(tuto_dir, 'data-library.yaml') - tuto_fp = os.path.join(tuto_dir, 'tutorial.md') + train = Training(KWDS) + train.kwds['tutorial_name'] = None + train.kwds['slides'] = False + train.kwds['hands_on'] = False + train.init_training(CTX) + train.kwds['tutorial_name'] = 'existing_tutorial' + create_existing_tutorial('existing_tutorial', tuto_wo_zenodo_fp, train.topic) # no Zenodo link - kwds['zenodo_link'] = None - kwds['workflow'] = None + train.kwds['zenodo_link'] = None exp_exception = "A Zenodo link should be provided either in the metadata file or as argument of the command" with assert_raises_regexp(Exception, exp_exception): - training.fill_data_library(CTX, kwds) + train.fill_data_library(CTX) # with a given Zenodo link and no Zenodo in metadata - kwds['zenodo_link'] = ZENODO_LINK - training.fill_data_library(CTX, kwds) - assert 'DOI: 10.5281/zenodo.1321885' in open(data_library_fp, 'r').read() - assert 'zenodo_link: %s' % ZENODO_LINK in open(tuto_fp, 'r').read() + train.kwds['zenodo_link'] = zenodo_link + train.fill_data_library(CTX) + assert 'DOI: 10.5281/zenodo.1321885' in open(train.tuto.data_lib_fp, 'r').read() + assert 'zenodo_link: %s' % zenodo_link in open(train.tuto.tuto_fp, 'r').read() # with a given Zenodo link and Zenodo in metadata new_z_link = 'https://zenodo.org/record/1324204' - kwds['zenodo_link'] = new_z_link - training.fill_data_library(CTX, kwds) - assert 'DOI: 10.5281/zenodo.1324204' in open(data_library_fp, 'r').read() - assert 'zenodo_link: %s' % new_z_link in open(tuto_fp, 'r').read() + train.kwds['zenodo_link'] = new_z_link + train.tuto = None + train.fill_data_library(CTX) + assert 'DOI: 10.5281/zenodo.1324204' in open(train.tuto.data_lib_fp, 'r').read() + assert 'zenodo_link: %s' % new_z_link in open(train.tuto.tuto_fp, 'r').read() # with no given Zenodo link - kwds['zenodo_link'] = None - training.fill_data_library(CTX, kwds) - assert 'DOI: 10.5281/zenodo.1324204' in open(data_library_fp, 'r').read() - assert 'zenodo_link: %s' % new_z_link in open(tuto_fp, 'r').read() + train.kwds['zenodo_link'] = None + train.fill_data_library(CTX) + assert 'DOI: 10.5281/zenodo.1324204' in open(train.tuto.data_lib_fp, 'r').read() + assert 'zenodo_link: %s' % new_z_link in open(train.tuto.tuto_fp, 'r').read() # clean after - shutil.rmtree("topics") + shutil.rmtree(train.topics_dir) shutil.rmtree("metadata") def test_generate_tuto_from_wf(): """Test :func:`planemo.training.generate_tuto_from_wf`.""" - kwds, topic_dir, tuto_dir = prepare_test() - training.init(CTX, kwds) - tuto_fp = os.path.join(tuto_dir, 'tutorial.md') + train = Training(KWDS) + train.kwds['tutorial_name'] = None + train.kwds['slides'] = False + train.init_training(CTX) + train.kwds['tutorial_name'] = 'existing_tutorial' + create_existing_tutorial('existing_tutorial', tuto_fp, train.topic) # no workflow - kwds['workflow'] = None + train.kwds['workflow'] = None exp_exception = "A path to a local workflow or the id of a workflow on a running Galaxy instance should be provided" with assert_raises_regexp(Exception, exp_exception): - training.generate_tuto_from_wf(CTX, kwds) + train.generate_tuto_from_wf(CTX) # with workflow - kwds['workflow'] = WF_FP - training.generate_tuto_from_wf(CTX, kwds) - assert '**FastQC** {% icon tool %} with the following parameters:' in open(tuto_fp, 'r').read() - assert os.path.exists(os.path.join(tuto_dir, 'workflows', 'init_workflow.ga')) + train.kwds['workflow'] = WF_FP + train.generate_tuto_from_wf(CTX) + assert '**FastQC** {% icon tool %} with the following parameters:' in open(train.tuto.tuto_fp, 'r').read() + assert os.path.exists(train.tuto.wf_fp) # clean after - shutil.rmtree("topics") + shutil.rmtree(train.topics_dir) shutil.rmtree("metadata") diff --git a/tests/test_training_tool_input.py b/tests/test_training_tool_input.py new file mode 100644 index 000000000..355bd4691 --- /dev/null +++ b/tests/test_training_tool_input.py @@ -0,0 +1,292 @@ +"""Training:tool_input functions.""" +import json +import os + +from nose.tools import assert_raises_regexp + +from planemo.training.tool_input import ( + get_empty_input, + get_empty_param, + get_input_tool_name, + ToolInput +) +from .test_training import ( + wf, + wf_param_values +) +from .test_utils import ( + TEST_DATA_DIR +) + + +wf_steps = wf['steps'] +# load the output from +# gi.tools.show_tool('toolshed.g2.bx.psu.edu/repos/iuc/query_tabular/query_tabular/2.0.0', io_details=True) +with open(os.path.join(TEST_DATA_DIR, "training_query_tabular.json"), "r") as tool_desc_f: + tool_desc = json.load(tool_desc_f) +tool_inp_desc = tool_desc["inputs"] + + +def test_get_input_tool_name(): + """Test :func:`planemo.training.tool_input.get_input_tool_name`.""" + assert "Input dataset" in get_input_tool_name('1', wf_steps) + assert "output of" in get_input_tool_name('4', wf_steps) + assert get_input_tool_name('10', wf_steps) == '' + + +def test_get_empty_input(): + """Test :func:`planemo.training.tool_input.get_empty_input`.""" + assert '{% icon param-file %} *"Input file"*: File' in get_empty_input() + + +def test_get_empty_param(): + """Test :func:`planemo.training.tool_input.get_empty_param`.""" + assert '*"Parameter"*: `a value`' in get_empty_param() + + +def test_ToolInput_init(): + """Test :func:`planemo.training.tool_input.ToolInput.init`.""" + # test type exception + exp_exception = "No type for the parameter t" + with assert_raises_regexp(Exception, exp_exception): + ToolInput( + tool_inp_desc={'name': 't'}, + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=False, + force_default=False) + # test with param not in workflow and exception + exp_exception = "t not in workflow" + with assert_raises_regexp(Exception, exp_exception): + ToolInput( + tool_inp_desc={'name': 't', 'type': ''}, + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + # test with param not in workflow but no exception + tool_input = ToolInput( + tool_inp_desc={'name': 't', 'type': ''}, + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=False, + force_default=False) + assert "save_db" in tool_input.wf_param_values + # test with param in workflow + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[0], + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=False, + force_default=False) + assert "save_db" not in tool_input.wf_param_values + assert tool_input.wf_param_values == "workdb.sqlite" + + +def test_ToolInput_get_formatted_inputs(): + """Test :func:`planemo.training.tool_input.ToolInput.get_formatted_inputs`.""" + # test no input + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[1]["inputs"][0], + wf_param_values={}, + wf_steps=wf_steps, + level=1, + should_be_there=False, + force_default=False) + inputlist = tool_input.get_formatted_inputs() + assert inputlist == '' + # test collection + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[1]["inputs"][0], + wf_param_values=wf_param_values["add_to_database"], + wf_steps=wf_steps, + level=1, + should_be_there=False, + force_default=False) + inputlist = tool_input.get_formatted_inputs() + assert 'param-collection' in inputlist + assert '(Input dataset collection)' in inputlist + # test single input + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[2]["inputs"][0], + wf_param_values=wf_param_values["tables"][0], + wf_steps=wf_steps, + level=1, + should_be_there=False, + force_default=False) + inputlist = tool_input.get_formatted_inputs() + assert 'param-file' in inputlist + assert '(Input dataset)' in inputlist + + +def test_ToolInput_get_lower_param_desc(): + """Test :func:`planemo.training.tool_input.ToolInput.get_lower_param_desc`.""" + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[1], + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + sub_param_desc = tool_input.get_lower_param_desc() + assert '> - {% icon param-collection %}' in sub_param_desc + + +def test_ToolInput_get_formatted_section_desc(): + """Test :func:`planemo.training.tool_input.ToolInput.get_formatted_section_desc`.""" + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[1], + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + section_paramlist = tool_input.get_formatted_section_desc() + assert '> - In *"' in section_paramlist + assert '> - {%' in section_paramlist + + +def test_ToolInput_get_formatted_conditional_desc(): + """Test :func:`planemo.training.tool_input.ToolInput.get_formatted_conditional_desc`.""" + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[5], + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + conditional_paramlist = tool_input.get_formatted_conditional_desc() + assert '> - *"' in conditional_paramlist + assert '"*: `Yes`' in conditional_paramlist + assert '> - *"' in conditional_paramlist + + +def test_ToolInput_get_formatted_repeat_desc(): + """Test :func:`planemo.training.tool_input.ToolInput.get_formatted_repeat_desc`.""" + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[2], + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + repeat_desc = tool_input.get_formatted_repeat_desc() + assert '> - In *"' in repeat_desc + assert '> - Click on' in repeat_desc + assert '> - In *"1:' in repeat_desc + assert '> -' in repeat_desc + assert '> - In *"2:' in repeat_desc + + +def test_ToolInput_get_formatted_other_param_desc(): + """Test :func:`planemo.training.tool_input.ToolInput.get_formatted_other_param_desc`.""" + # test default value of the tool + tool_input = ToolInput( + tool_inp_desc={'value': 10, 'name': 't', 'type': ''}, + wf_param_values={'t': 10}, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + assert tool_input.get_formatted_other_param_desc() == '' + # test boolean parameter + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[3], + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + assert tool_input.get_formatted_other_param_desc() == '' + tool_input.wf_param_values = 'true' + assert '*: `Yes`' in tool_input.get_formatted_other_param_desc() + # test select parameter + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[5]['cases'][0]['inputs'][0], + wf_param_values=wf_param_values['query_result'], + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + assert '*: `&`' in tool_input.get_formatted_other_param_desc() + # test other parameter + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[4], + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=True) + assert '*: ``' in tool_input.get_formatted_other_param_desc() + + +def test_ToolInput_get_formatted_desc(): + """Test :func:`planemo.training.tool_input.ToolInput.get_formatted_desc`.""" + # test no param values + tool_input = ToolInput( + tool_inp_desc={'value': 10, 'name': 't', 'type': ''}, + wf_param_values={}, + wf_steps=wf_steps, + level=1, + should_be_there=False, + force_default=False) + assert tool_input.get_formatted_desc() == '' + # test data + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[2]["inputs"][0], + wf_param_values=wf_param_values["tables"][0], + wf_steps=wf_steps, + level=1, + should_be_there=False, + force_default=False) + inputlist = tool_input.get_formatted_inputs() + formatted_desc = tool_input.get_formatted_desc() + assert inputlist == formatted_desc + # test section + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[1], + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + section_paramlist = tool_input.get_formatted_section_desc() + formatted_desc = tool_input.get_formatted_desc() + assert section_paramlist == formatted_desc + # test conditional + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[5], + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + conditional_paramlist = tool_input.get_formatted_conditional_desc() + formatted_desc = tool_input.get_formatted_desc() + assert conditional_paramlist == formatted_desc + # test repeat + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[2], + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + repeat_desc = tool_input.get_formatted_repeat_desc() + formatted_desc = tool_input.get_formatted_desc() + assert repeat_desc == formatted_desc + # test other + tool_input = ToolInput( + tool_inp_desc=tool_inp_desc[3], + wf_param_values=wf_param_values, + wf_steps=wf_steps, + level=1, + should_be_there=True, + force_default=False) + param_desc = tool_input.get_formatted_other_param_desc() + formatted_desc = tool_input.get_formatted_desc() + assert param_desc == formatted_desc diff --git a/tests/test_training_topic.py b/tests/test_training_topic.py new file mode 100644 index 000000000..72ad61b9c --- /dev/null +++ b/tests/test_training_topic.py @@ -0,0 +1,156 @@ +"""Training:topic functions.""" +import os +import shutil + +from planemo.training.topic import Topic +from planemo.training.utils import load_yaml +from .test_utils import TEST_DATA_DIR + + +def test_topic_init(): + """Test :func:`planemo.training.topic.Topic.init`.""" + # test requirement with default parameter + topic = Topic() + assert topic.name == "new_topic" + assert topic.type == "use" + assert topic.title == "The new topic" + assert topic.summary == "Summary" + assert topic.docker_image == "" + assert "maintainers" in topic.maintainers + assert topic.parent_dir == "topics" + assert topic.dir == "topics/new_topic" + assert topic.requirements[0].link == "/introduction/" + assert topic.references[0].link == "link" + # test requirement with non default + topic = Topic(name="topic2", target="admin", title="The 2nd topic", summary="", parent_dir="dir") + assert topic.name == "topic2" + assert topic.type == "admin" + assert topic.title == "The 2nd topic" + assert topic.summary == "" + assert topic.parent_dir == "dir" + assert topic.dir == "dir/topic2" + assert len(topic.requirements) == 0 + assert len(topic.references) == 0 + + +def test_topic_init_from_kwds(): + """Test :func:`planemo.training.topic.Topic.init_from_kwds`.""" + topic = Topic() + topic.init_from_kwds({ + 'topic_name': "topic", + 'topic_title': "New topic", + 'topic_target': "admin", + 'topic_summary': "Topic summary" + }) + assert topic.name == "topic" + assert topic.type == "admin" + assert topic.title == "New topic" + assert topic.summary == "Topic summary" + assert topic.dir == "topics/topic" + assert len(topic.requirements) == 0 + assert len(topic.references) == 0 + + +def test_topic_init_from_metadata(): + """Test :func:`planemo.training.topic.Topic.init_from_metadata`.""" + topic = Topic() + os.makedirs(topic.dir) + shutil.copy(os.path.join(TEST_DATA_DIR, 'training_metadata.yaml'), topic.metadata_fp) + topic.init_from_metadata() + assert topic.name == 'test' + assert topic.title == 'Test' + assert topic.summary == 'Summary' + assert topic.requirements[0].title == 'Galaxy introduction' + assert 'maintainer1' in topic.maintainers + shutil.rmtree(topic.parent_dir) + + +def test_topic_get_requirements(): + """Test :func:`planemo.training.topic.Topic.get_requirements`.""" + topic = Topic() + reqs = topic.get_requirements() + assert len(reqs) == 1 + assert 'title' in reqs[0] + + +def test_topic_get_references(): + """Test :func:`planemo.training.topic.Topic.get_references`.""" + topic = Topic() + refs = topic.get_references() + assert len(refs) == 1 + assert 'authors' in refs[0] + + +def test_topic_export_metadata_to_ordered_dict(): + """Test :func:`planemo.training.topic.Topic.export_metadata_to_ordered_dict`.""" + topic = Topic() + metadata = topic.export_metadata_to_ordered_dict() + assert 'name' in metadata + assert metadata['name'] == "new_topic" + assert 'type' in metadata + assert 'title' in metadata + assert 'summary' in metadata + assert 'requirements' in metadata + assert 'docker_image' in metadata + assert 'maintainers' in metadata + assert 'references' in metadata + + +def test_topic_set_paths(): + """Test :func:`planemo.training.topic.Topic.set_paths`.""" + new_name = 'the_new_name' + topic = Topic() + topic.name = new_name + topic.set_paths() + assert new_name in topic.dir + assert new_name in topic.img_folder + assert new_name in topic.tuto_folder + assert new_name in topic.index_fp + assert new_name in topic.readme_fp + assert new_name in topic.metadata_fp + assert new_name in topic.docker_folder + assert new_name in topic.dockerfile_fp + assert new_name in topic.slides_folder + + +def test_topic_exists(): + """Test :func:`planemo.training.topic.Topic.exists`.""" + topic = Topic() + assert not topic.exists() + os.makedirs(topic.dir) + assert topic.exists() + shutil.rmtree(topic.parent_dir) + + +def test_topic_create_topic_structure(): + """Test :func:`planemo.training.topic.Topic.create_topic_structure`.""" + topic = Topic() + topic.create_topic_structure() + topic_name = "new_topic" + topic_title = "The new topic" + # check the folder and its structure + assert topic.exists() + assert os.path.exists(topic.img_folder) + assert os.path.exists(topic.tuto_folder) + # create the index.md and the topic name + assert os.path.exists(topic.index_fp) + assert topic_name in open(topic.index_fp, 'r').read() + # create the README.md and the topic name + assert os.path.exists(topic.readme_fp) + assert topic_title in open(topic.readme_fp, 'r').read() + # check metadata content + assert os.path.exists(topic.metadata_fp) + metadata = load_yaml(topic.metadata_fp) + assert metadata['name'] == topic_name + # check dockerfile + assert os.path.exists(topic.dockerfile_fp) + assert topic_name in open(topic.dockerfile_fp, 'r').read() + assert topic_title in open(topic.dockerfile_fp, 'r').read() + # check introduction slide + assert os.path.exists(topic.intro_slide_fp) + assert topic_title in open(topic.intro_slide_fp, 'r').read() + # check in metadata directory + assert os.path.exists(os.path.join("metadata", "%s.yaml" % topic_name)) + # clean + shutil.rmtree(topic.parent_dir) + shutil.rmtree("metadata") diff --git a/tests/test_training_tutorial.py b/tests/test_training_tutorial.py new file mode 100644 index 000000000..15c51598b --- /dev/null +++ b/tests/test_training_tutorial.py @@ -0,0 +1,440 @@ +"""Training:tutorial functions.""" +import os +import shutil + +from nose.tools import assert_raises_regexp + + +from planemo.engine import ( + engine_context, + is_galaxy_engine, +) +from planemo.training import ( + Training +) +from planemo.training.topic import Topic +from planemo.training.tutorial import ( + format_wf_steps, + get_galaxy_datatype, + get_hands_on_boxes_from_local_galaxy, + get_hands_on_boxes_from_running_galaxy, + get_wf_inputs, + get_wf_param_values, + get_zenodo_record, + Tutorial +) +from planemo.training.utils import save_to_yaml +from .test_training import ( + create_existing_tutorial, + CTX, + datatype_fp, + KWDS, + RUNNABLE, + tuto_fp, + wf, + WF_FP, + wf_param_values, + zenodo_link +) + +topic = Topic() +training = Training(KWDS) + + +def test_get_galaxy_datatype(): + """Test :func:`planemo.training.tutorial.get_galaxy_datatype`.""" + assert get_galaxy_datatype("csv", datatype_fp) == "csv" + assert get_galaxy_datatype("test", datatype_fp) == "strange_datatype" + assert "# Please add" in get_galaxy_datatype("unknown", datatype_fp) + + +def test_get_zenodo_record(): + """Test :func:`planemo.training.tutorial.get_zenodo_record`.""" + z_record, req_res = get_zenodo_record(zenodo_link) + file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" + assert z_record == "1321885" + assert 'files' in req_res + assert req_res['files'][0]['type'] in ['rdata', 'csv'] + assert file_link_prefix in req_res['files'][0]['links']['self'] + # check with wrong zenodo link + z_record, req_res = get_zenodo_record('https://zenodo.org/api/records/zenodooo') + assert z_record is None + assert 'files' in req_res + assert len(req_res['files']) == 0 + # using DOI + z_link = 'https://doi.org/10.5281/zenodo.1321885' + z_record, req_res = get_zenodo_record(z_link) + file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" + assert z_record == "1321885" + assert 'files' in req_res + assert req_res['files'][0]['type'] in ['rdata', 'csv'] + assert file_link_prefix in req_res['files'][0]['links']['self'] + + +def test_get_wf_inputs(): + """Test :func:`planemo.training.tutorial.get_wf_inputs`.""" + step_inp = { + 'tables_1|table': {'output_name': 'output', 'id': 2}, + 'add_to_database|withdb': {'output_name': 'output', 'id': 0}, + 'tables_0|table': {'output_name': 'output', 'id': 1}, + 'add_to_database|tab_0|tt': {'output_name': 'output', 'id': 0}, + 'tables_2|section|sect': {'output_name': 'output', 'id': 1}, + 'tables_3|tables_0|sect': {'output_name': 'output', 'id': 1} + } + step_inputs = get_wf_inputs(step_inp) + assert 'tables' in step_inputs + assert '0' in step_inputs['tables'] + assert 'table' in step_inputs['tables']['0'] + assert '2' in step_inputs['tables'] + assert 'section' in step_inputs['tables']['2'] + assert 'sect' in step_inputs['tables']['2']['section'] + assert 'output_name' in step_inputs['tables']['2']['section']['sect'] + assert 'add_to_database' in step_inputs + assert 'withdb' in step_inputs['add_to_database'] + assert 'tab' in step_inputs['add_to_database'] + assert '0' in step_inputs['add_to_database']['tab'] + assert 'tt' in step_inputs['add_to_database']['tab']['0'] + + +def test_get_wf_param_values(): + """Test :func:`planemo.training.tutorial.get_wf_param_values`.""" + wf_step = wf['steps']['4'] + wf_param_value_tests = get_wf_param_values(wf_step['tool_state'], get_wf_inputs(wf_step['input_connections'])) + assert wf_param_values == wf_param_value_tests + + +def test_format_wf_steps(): + """Test :func:`planemo.training.tutorial.format_wf_steps`.""" + assert is_galaxy_engine(**KWDS) + with engine_context(CTX, **KWDS) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: + workflow_id = config.workflow_id(WF_FP) + wf = config.gi.workflows.export_workflow_dict(workflow_id) + body = format_wf_steps(wf, config.gi) + assert '## Sub-step with **FastQC**' in body + assert '## Sub-step with **Query Tabular**' in body + assert '## Sub-step with **Select first**' in body + + +def test_get_hands_on_boxes_from_local_galaxy(): + """Test :func:`planemo.training.tutorial.get_hands_on_boxes_from_local_galaxy`.""" + tuto_body = get_hands_on_boxes_from_local_galaxy(KWDS, WF_FP, CTX) + assert '## Sub-step with **FastQC**' in tuto_body + assert '## Sub-step with **Query Tabular**' in tuto_body + assert '## Sub-step with **Select first**' in tuto_body + + +def test_get_hands_on_boxes_from_running_galaxy(): + """Test :func:`planemo.training.tutorial.get_hands_on_boxes_from_running_galaxy`.""" + assert is_galaxy_engine(**KWDS) + galaxy_url = 'http://%s:%s' % (KWDS['host'], KWDS['port']) + with engine_context(CTX, **KWDS) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: + wf_id = config.workflow_id(WF_FP) + tuto_body = get_hands_on_boxes_from_running_galaxy(wf_id, galaxy_url, config.user_api_key) + assert '## Sub-step with **FastQC**' in tuto_body + assert '## Sub-step with **Query Tabular**' in tuto_body + assert '## Sub-step with **Select first**' in tuto_body + + +def test_tutorial_init(): + """Test :func:`planemo.training.tutorial.tutorial.init`.""" + # with default parameter + tuto = Tutorial( + training=training, + topic=topic) + assert tuto.name == "new_tuto" + assert tuto.title == "The new tutorial" + assert tuto.zenodo_link == "" + assert tuto.hands_on + assert not tuto.slides + assert tuto.init_wf_id is None + assert tuto.init_wf_fp is None + assert tuto.datatype_fp == '' + assert "new_tuto" in tuto.dir + assert '## Sub-step with **My Tool**' in tuto.body + assert tuto.data_lib + # with non default parameter + tuto = Tutorial( + training=training, + topic=topic, + name="my_tuto", + title="The tutorial", + zenodo_link="URL") + assert tuto.name == "my_tuto" + assert tuto.title == "The tutorial" + assert tuto.zenodo_link == "URL" + assert "my_tuto" in tuto.dir + + +def test_tutorial_init_from_kwds(): + """Test :func:`planemo.training.tutorial.tutorial.init_from_kwds`.""" + kwds = { + 'tutorial_name': "my_tuto", + 'tutorial_title': "Title of tuto", + 'hands_on': True, + 'slides': True, + 'workflow': WF_FP, + 'workflow_id': 'id', + 'zenodo_link': None, + 'datatypes': datatype_fp + } + tuto = Tutorial( + training=training, + topic=topic) + tuto.init_from_kwds(kwds) + assert tuto.name == "my_tuto" + assert tuto.title == "Title of tuto" + assert tuto.zenodo_link == '' + assert "Which biological questions are addressed by the tutorial?" in tuto.questions + assert tuto.hands_on + assert tuto.slides + assert tuto.init_wf_id == 'id' + assert tuto.init_wf_fp == WF_FP + assert tuto.datatype_fp == datatype_fp + assert "my_tuto" in tuto.dir + + +def test_tutorial_init_from_existing_tutorial(): + """Test :func:`planemo.training.tutorial.tutorial.init_from_existing_tutorial`.""" + tuto = Tutorial( + training=training, + topic=topic) + # non existing tutorial + exp_exception = "The tutorial existing_tutorial does not exists. It should be created" + with assert_raises_regexp(Exception, exp_exception): + tuto.init_from_existing_tutorial('existing_tutorial') + # existing tutorial + create_existing_tutorial('existing_tutorial', tuto_fp, tuto.topic) + tuto.init_from_existing_tutorial('existing_tutorial') + assert tuto.title == 'A tutorial to test' + assert "A learning objective" in tuto.objectives + assert tuto.time_estimation == "1H" + assert 'the_best_contributor' in tuto.contributors + assert '# First section' in tuto.body + shutil.rmtree("topics") + + +def test_tutorial_init_data_lib(): + """Test :func:`planemo.training.tutorial.tutorial.init_data_lib`.""" + tuto = Tutorial( + training=training, + topic=topic) + tuto.init_data_lib() + assert tuto.data_lib['destination']['type'] == 'library' + assert tuto.data_lib['items'][0]['name'] == topic.title + assert tuto.data_lib['items'][0]['items'][0]['name'] == tuto.title + # from existing data library file + os.makedirs(tuto.dir) + tuto.data_lib = {} + tuto.init_data_lib() + assert tuto.data_lib['items'][0]['name'] == topic.title + assert tuto.data_lib['items'][0]['items'][0]['name'] == tuto.title + # other tutorial already there and add the new one + tuto.data_lib['items'][0]['items'][0]['name'] = 'Different tutorial' + save_to_yaml(tuto.data_lib, tuto.data_lib_fp) + tuto.init_data_lib() + assert tuto.data_lib['items'][0]['items'][0]['name'] == 'Different tutorial' + assert tuto.data_lib['items'][0]['items'][1]['name'] == tuto.title + shutil.rmtree("topics") + + +def test_tutorial_get_tuto_metata(): + """Test :func:`planemo.training.tutorial.tutorial.get_tuto_metata`.""" + tuto = Tutorial( + training=training, + topic=topic) + tuto.questions = ['q1', 'q2'] + metadata = tuto.get_tuto_metata() + assert 'title: The new tutorial' in metadata + assert '- q1' in metadata + + +def test_tutorial_set_dir_name(): + """Test :func:`planemo.training.tutorial.tutorial.set_dir_name`.""" + tuto = Tutorial( + training=training, + topic=topic) + tuto.name = "the_tuto" + tuto.set_dir_name() + assert tuto.name in tuto.dir + assert tuto.name in tuto.tuto_fp + assert tuto.name in tuto.slide_fp + assert tuto.name in tuto.data_lib_fp + assert tuto.name in tuto.wf_dir + assert tuto.name in tuto.wf_fp + + +def test_tutorial_exists(): + """Test :func:`planemo.training.tutorial.tutorial.exists`.""" + # default + tuto = Tutorial( + training=training, + topic=topic) + assert not tuto.exists() + # after dir creation + os.makedirs(tuto.dir) + assert tuto.exists() + shutil.rmtree("topics") + + +def test_tutorial_has_workflow(): + """Test :func:`planemo.training.tutorial.tutorial.has_workflow`.""" + # default + tuto = Tutorial( + training=training, + topic=topic) + assert not tuto.has_workflow() + # with wf filepath + tuto.init_wf_fp = WF_FP + assert tuto.has_workflow() + # with no wf filepah nor wf id + tuto.init_wf_fp = None + tuto.init_wf_id = '' + assert not tuto.has_workflow() + # with wf id + tuto.init_wf_id = 'ID' + assert tuto.has_workflow() + + +def test_tutorial_export_workflow_file(): + """Test :func:`planemo.training.tutorial.tutorial.export_workflow_file`.""" + tuto = Tutorial( + training=training, + topic=topic) + os.makedirs(tuto.wf_dir) + # with worflow fp + tuto.init_wf_fp = WF_FP + tuto.export_workflow_file() + assert os.path.exists(tuto.wf_fp) + # with workflow id + tuto.init_wf_fp = None + os.remove(tuto.wf_fp) + assert is_galaxy_engine(**KWDS) + galaxy_url = 'http://%s:%s' % (KWDS['host'], KWDS['port']) + with engine_context(CTX, **KWDS) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: + tuto.init_wf_id = config.workflow_id(WF_FP) + tuto.training.galaxy_url = galaxy_url + tuto.training.galaxy_api_key = config.user_api_key + tuto.export_workflow_file() + assert os.path.exists(tuto.wf_fp) + shutil.rmtree("topics") + + +def test_tutorial_get_files_from_zenodo(): + """Test :func:`planemo.training.tutorial.tutorial.get_files_from_zenodo`.""" + tuto = Tutorial( + training=training, + topic=topic, + zenodo_link=zenodo_link) + tuto.datatype_fp = datatype_fp + files, z_record = tuto.get_files_from_zenodo() + assert z_record == "1321885" + # test links + file_link_prefix = "https://zenodo.org/api/files/51a1b5db-ff05-4cda-83d4-3b46682f921f" + assert file_link_prefix in tuto.zenodo_file_links[0] + # test files dict + assert file_link_prefix in files[0]['url'] + assert files[0]['src'] == 'url' + assert files[0]['info'] == zenodo_link + assert "# Please add" in files[0]['ext'] + assert files[1]['ext'] == 'csv' + + +def test_tutorial_prepare_data_library_from_zenodo(): + """Test :func:`planemo.training.tutorial.tutorial.prepare_data_library_from_zenodo`.""" + # without zenodo link + tuto = Tutorial( + training=training, + topic=topic) + tuto.datatype_fp = datatype_fp + os.makedirs(tuto.wf_dir) + tuto.prepare_data_library_from_zenodo() + assert os.path.exists(tuto.data_lib_fp) + assert 'DOI' not in open(tuto.data_lib_fp, 'r').read() + # with zenodo link + tuto.zenodo_link = zenodo_link + tuto.prepare_data_library_from_zenodo() + assert "DOI: 10.5281/zenodo" in open(tuto.data_lib_fp, 'r').read() + shutil.rmtree("topics") + + +def test_tutorial_write_hands_on_tutorial(): + """Test :func:`planemo.training.tutorial.tutorial.write_hands_on_tutorial`.""" + tuto = Tutorial( + training=training, + topic=topic) + os.makedirs(tuto.wf_dir) + tuto.zenodo_file_links = ["URL1", "URL2"] + tuto.write_hands_on_tutorial() + assert os.path.exists(tuto.tuto_fp) + with open(tuto.tuto_fp, 'r') as tuto_f: + tuto_c = tuto_f.read() + assert 'layout: tutorial_hands_on' in tuto_c + assert '# Introduction' in tuto_c + assert 'URL1' in tuto_c + assert '# Conclusion' in tuto_c + shutil.rmtree("topics") + + +def test_tutorial_create_hands_on_tutorial(): + """Test :func:`planemo.training.tutorial.tutorial.create_hands_on_tutorial`.""" + tuto = Tutorial( + training=training, + topic=topic) + os.makedirs(tuto.wf_dir) + # with init_wf_id and no Galaxy URL + tuto.init_wf_id = 'ID' + tuto.training.galaxy_url = None + exp_exception = "No Galaxy URL given" + with assert_raises_regexp(Exception, exp_exception): + tuto.create_hands_on_tutorial(CTX) + # with init_wf_id and no Galaxy API key + tuto.init_wf_id = 'ID' + tuto.training.galaxy_url = 'http://%s:%s' % (KWDS['host'], KWDS['port']) + tuto.training.galaxy_api_key = None + exp_exception = "No API key to access the given Galaxy instance" + with assert_raises_regexp(Exception, exp_exception): + tuto.create_hands_on_tutorial(CTX) + # with init_wf_id + assert is_galaxy_engine(**KWDS) + with engine_context(CTX, **KWDS) as galaxy_engine: + with galaxy_engine.ensure_runnables_served([RUNNABLE]) as config: + tuto.init_wf_id = config.workflow_id(WF_FP) + tuto.training.galaxy_api_key = config.user_api_key + tuto.create_hands_on_tutorial(CTX) + assert os.path.exists(tuto.tuto_fp) + os.remove(tuto.tuto_fp) + # with init_wf_fp + tuto.init_wf_id = None + tuto.init_wf_fp = WF_FP + tuto.create_hands_on_tutorial(CTX) + assert os.path.exists(tuto.tuto_fp) + shutil.rmtree("topics") + + +def test_tutorial_create_tutorial(): + """Test :func:`planemo.training.tutorial.tutorial.create_tutorial`.""" + tuto = Tutorial( + training=training, + topic=topic) + tuto.init_from_kwds({ + 'tutorial_name': "my_tuto", + 'tutorial_title': "Title of tuto", + 'hands_on': True, + 'slides': True, + 'workflow': WF_FP, + 'workflow_id': None, + 'zenodo_link': zenodo_link, + 'datatypes': datatype_fp + }) + tuto.create_tutorial(CTX) + assert os.path.exists(tuto.dir) + assert os.path.exists(tuto.tour_dir) + assert os.path.exists(tuto.wf_dir) + assert os.path.exists(tuto.data_lib_fp) + assert os.path.exists(tuto.tuto_fp) + assert os.path.exists(tuto.slide_fp) + assert 'layout: tutorial_slides' in open(tuto.slide_fp, 'r').read() + shutil.rmtree("topics") diff --git a/tests/test_training_utils.py b/tests/test_training_utils.py new file mode 100644 index 000000000..d60df8a09 --- /dev/null +++ b/tests/test_training_utils.py @@ -0,0 +1,109 @@ +"""Training:utils functions.""" +import os + +from planemo.training.utils import ( + load_yaml, + Reference, + Requirement, + save_to_yaml +) +from .test_utils import TEST_DATA_DIR + +metadata_fp = os.path.join(TEST_DATA_DIR, "training_metadata.yaml") + + +def test_load_yaml(): + """Test :func:`planemo.training.utils.load_yaml`.""" + metadata = load_yaml(metadata_fp) + # test if name there + assert metadata["name"] == "test" + # test if order of material is conserved + assert metadata['maintainers'][0] == 'maintainer1' + + +def test_save_to_yaml(): + """Test :func:`planemo.training.utils.save_to_yaml`.""" + metadata = load_yaml(metadata_fp) + new_metadata_fp = "metadata.yaml" + save_to_yaml(metadata, new_metadata_fp) + assert os.path.exists(new_metadata_fp) + os.remove(new_metadata_fp) + + +def test_requirement_init(): + """Test :func:`planemo.training.utils.Requirement.init`.""" + # test requirement with default parameter + req = Requirement() + assert req.title == "" + assert req.type == "internal" + assert req.link == "/introduction/" + # test requirement with non default + req = Requirement(title="Introduction", req_type="external", link="URL") + assert req.title == "Introduction" + assert req.type == "external" + assert req.link == "URL" + + +def test_requirement_init_from_dict(): + """Test :func:`planemo.training.utils.Requirement.init_from_dict`.""" + req = Requirement() + req.init_from_dict({ + 'title': 'The Requirement', + 'type': 'external', + 'link': "http://URL" + }) + assert req.title == 'The Requirement' + assert req.type == 'external' + assert req.link == "http://URL" + + +def test_requirement_export_to_ordered_dict(): + """Test :func:`planemo.training.utils.Requirement.export_to_ordered_dict`.""" + req = Requirement() + exp_req = req.export_to_ordered_dict() + assert 'title' in exp_req + assert exp_req['title'] == "" + assert 'type' in exp_req + assert 'link' in exp_req + + +def test_reference_init(): + """Test :func:`planemo.training.utils.Reference.init`.""" + # test requirement with default parameter + ref = Reference() + assert ref.authors == "authors et al" + assert ref.title == "the title" + assert ref.link == "link" + assert ref.summary == "Why this reference is useful" + # test requirement with non default + ref = Reference(authors="the authors", title="a title", link="URL", summary="The summary") + assert ref.authors == "the authors" + assert ref.title == "a title" + assert ref.link == "URL" + assert ref.summary == "The summary" + + +def test_reference_init_from_dict(): + """Test :func:`planemo.training.utils.Reference.init_from_dict`.""" + ref = Reference() + ref.init_from_dict({ + 'authors': 'my author et al', + 'title': 'The Reference to read', + 'link': "http://URL", + 'summary': 'why we should read it' + }) + assert ref.authors == 'my author et al' + assert ref.title == 'The Reference to read' + assert ref.link == "http://URL" + assert ref.summary == "why we should read it" + + +def test_reference_export_to_ordered_dict(): + """Test :func:`planemo.training.utils.Reference.export_to_ordered_dict`.""" + ref = Reference() + exp_ref = ref.export_to_ordered_dict() + assert 'authors' in exp_ref + assert 'title' in exp_ref + assert exp_ref['title'] == "the title" + assert 'link' in exp_ref + assert 'summary' in exp_ref From de98fbbd4d8f2183b71ba7e542397069933994ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Fri, 7 Sep 2018 17:39:23 +0200 Subject: [PATCH 25/26] Update the documentation --- docs/commands.rst | 5 +- docs/commands/training_fill_data_library.rst | 27 +++ docs/commands/training_generate_from_wf.rst | 171 +++++++++++++++++ docs/commands/training_init.rst | 182 +++++++++++++++++++ docs/planemo.commands.rst | 24 +++ docs/planemo.rst | 1 + docs/planemo.training.rst | 46 +++++ 7 files changed, 454 insertions(+), 2 deletions(-) create mode 100644 docs/commands/training_fill_data_library.rst create mode 100644 docs/commands/training_generate_from_wf.rst create mode 100644 docs/commands/training_init.rst create mode 100644 docs/planemo.training.rst diff --git a/docs/commands.rst b/docs/commands.rst index 6fea08d5e..f62c172c0 100644 --- a/docs/commands.rst +++ b/docs/commands.rst @@ -23,7 +23,6 @@ documentation describes these commands. .. include:: commands/conda_search.rst .. include:: commands/config_init.rst .. include:: commands/container_register.rst -.. include:: commands/cwl_script.rst .. include:: commands/database_create.rst .. include:: commands/database_delete.rst .. include:: commands/database_list.rst @@ -41,7 +40,6 @@ documentation describes these commands. .. include:: commands/profile_list.rst .. include:: commands/project_init.rst .. include:: commands/pull_request.rst -.. include:: commands/run.rst .. include:: commands/serve.rst .. include:: commands/share_test.rst .. include:: commands/shed_build.rst @@ -58,6 +56,9 @@ documentation describes these commands. .. include:: commands/test_reports.rst .. include:: commands/tool_factory.rst .. include:: commands/tool_init.rst +.. include:: commands/training_fill_data_library.rst +.. include:: commands/training_generate_from_wf.rst +.. include:: commands/training_init.rst .. include:: commands/travis_init.rst .. include:: commands/virtualenv.rst .. include:: commands/workflow_convert.rst \ No newline at end of file diff --git a/docs/commands/training_fill_data_library.rst b/docs/commands/training_fill_data_library.rst new file mode 100644 index 000000000..f9d141429 --- /dev/null +++ b/docs/commands/training_fill_data_library.rst @@ -0,0 +1,27 @@ + +``training_fill_data_library`` command +====================================== + +This section is auto-generated from the help text for the planemo command +``training_fill_data_library``. This help message can be generated with ``planemo training_fill_data_library +--help``. + +**Usage**:: + + planemo training_fill_data_library [OPTIONS] TOOL_PATH + +**Help** + +Build training template from workflow. +**Options**:: + + + --topic_name TEXT Name (directory name) of the topic to create or in which + a tutorial should be created or updates [required] + --tutorial_name TEXT Name (directory name) of the tutorial to modify + [required] + --zenodo_link TEXT Zenodo URL with the input data + --datatypes PATH YAML file with the correspondance between Zenodo + extension and Galaxy datatypes + --help Show this message and exit. + diff --git a/docs/commands/training_generate_from_wf.rst b/docs/commands/training_generate_from_wf.rst new file mode 100644 index 000000000..d9659978a --- /dev/null +++ b/docs/commands/training_generate_from_wf.rst @@ -0,0 +1,171 @@ + +``training_generate_from_wf`` command +====================================== + +This section is auto-generated from the help text for the planemo command +``training_generate_from_wf``. This help message can be generated with ``planemo training_generate_from_wf +--help``. + +**Usage**:: + + planemo training_generate_from_wf [OPTIONS] TOOL_PATH + +**Help** + +Create tutorial skeleton from workflow. +**Options**:: + + + --topic_name TEXT Name (directory name) of the topic to create + or in which a tutorial should be created or + updates [required] + --tutorial_name TEXT Name (directory name) of the tutorial to + modify [required] + --workflow PATH Workflow of the tutorial (locally) + --galaxy_url TEXT URL of a Galaxy instance with the workflow + --galaxy_api_key TEXT API key on the Galaxy instance with the + workflow + --workflow_id TEXT ID of the workflow on the Galaxy instance + --galaxy_root DIRECTORY Root of development galaxy directory to + execute command with. + --galaxy_database_seed PATH Preseeded Galaxy sqlite database to target. + --extra_tools PATH Extra tool sources to include in Galaxy's tool + panel (file or directory). These will not be + linted/tested/etc... but they will be + available to workflows and for interactive + use. + --install_galaxy Download and configure a disposable copy of + Galaxy from github. + --galaxy_branch TEXT Branch of Galaxy to target (defaults to + master) if a Galaxy root isn't specified. + --galaxy_source TEXT Git source of Galaxy to target (defaults to + the official galaxyproject github source if a + Galaxy root isn't specified. + --skip_venv Do not create or source a virtualenv + environment for Galaxy, this should be used or + instance to preserve an externally configured + virtual environment or conda environment. + --no_cache_galaxy Skip caching of Galaxy source and dependencies + obtained with --install_galaxy. Not caching + this results in faster downloads (no git) - so + is better on throw away instances such with + TravisCI. + --no_cleanup Do not cleanup temp files created for and by + Galaxy. + --galaxy_email TEXT E-mail address to use when launching single- + user Galaxy server. + --docker / --no_docker Run Galaxy tools in Docker if enabled. + --docker_cmd TEXT Command used to launch docker (defaults to + docker). + --docker_sudo / --no_docker_sudo + Flag to use sudo when running docker. + --docker_host TEXT Docker host to target when executing docker + commands (defaults to localhost). + --docker_sudo_cmd TEXT sudo command to use when --docker_sudo is + enabled (defaults to sudo). + --mulled_containers, --biocontainers + Test tools against mulled containers (forces + --docker). + --job_config_file PATH Job configuration file for Galaxy to target. + --tool_dependency_dir DIRECTORY + Tool dependency dir for Galaxy to target. + --port INTEGER Port to serve Galaxy on (default is 9090). + --host TEXT Host to bind Galaxy to. Default is 127.0.0.1 + that is restricted to localhost connections + for security reasons set to 0.0.0.0 to bind + Galaxy to all ports including potentially + publicly accessible ones. + --engine [galaxy|docker_galaxy|external_galaxy] + Select an engine to serve aritfacts such as + tools and workflows. Defaults to a local + Galaxy, but running Galaxy within a Docker + container. + --non_strict_cwl Disable strict validation of CWL. + --docker_galaxy_image TEXT Docker image identifier for docker-galaxy- + flavor used if engine type is specified as + ``docker-galaxy``. Defaults to + quay.io/bgruening/galaxy. + --docker_extra_volume PATH Extra path to mount if --engine docker. + --test_data DIRECTORY test-data directory to for specified tool(s). + --tool_data_table PATH tool_data_table_conf.xml file to for specified + tool(s). + --dependency_resolvers_config_file PATH + Dependency resolver configuration for Galaxy + to target. + --brew_dependency_resolution Configure Galaxy to use plain brew dependency + resolution. + --shed_dependency_resolution Configure Galaxy to use brewed Tool Shed + dependency resolution. + --no_dependency_resolution Configure Galaxy with no dependency resolvers. + --conda_prefix DIRECTORY Conda prefix to use for conda dependency + commands. + --conda_exec PATH Location of conda executable. + --conda_debug Enable more verbose conda logging. + --conda_channels, --conda_ensure_channels TEXT + Ensure conda is configured with specified + comma separated list of channels. + --conda_use_local Use locally built packages while building + Conda environments. + --conda_dependency_resolution Configure Galaxy to use only conda for + dependency resolution. + --conda_copy_dependencies Conda dependency resolution for Galaxy will + copy dependencies instead of attempting to + link them. + --conda_auto_install / --no_conda_auto_install + Conda dependency resolution for Galaxy will + attempt to install requested but missing + packages. + --conda_auto_init / --no_conda_auto_init + Conda dependency resolution for Galaxy will + auto install conda itself using miniconda if + not availabe on conda_prefix. + --profile TEXT Name of profile (created with the + profile_create command) to use with this + command. + --postgres Use postgres database type. + --database_type [postgres|postgres_docker|sqlite|auto] + Type of database to use for profile - 'auto', + 'sqlite', 'postgres', and 'postgres_docker' + are available options. Use postgres to use an + existing postgres server you user can access + without a password via the psql command. Use + postgres_docker to have Planemo manage a + docker container running postgres. Data with + postgres_docker is not yet persisted past when + you restart the docker container launched by + Planemo so be careful with this option. + --postgres_psql_path TEXT Name or or path to postgres client binary + (psql). + --postgres_database_user TEXT Postgres username for managed development + databases. + --postgres_database_host TEXT Postgres host name for managed development + databases. + --postgres_database_port TEXT Postgres port for managed development + databases. + --file_path DIRECTORY Location for files created by Galaxy (e.g. + database/files). + --database_connection TEXT Database connection string to use for Galaxy. + --shed_tool_conf TEXT Location of shed tools conf file for Galaxy. + --shed_tool_path TEXT Location of shed tools directory for Galaxy. + --galaxy_single_user / --no_galaxy_single_user + By default Planemo will configure Galaxy to + run in single-user mode where there is just + one user and this user is automatically logged + it. Use --no_galaxy_single_user to prevent + Galaxy from running this way. + --daemon Serve Galaxy process as a daemon. + --pid_file PATH Location of pid file is executed with + --daemon. + --ignore_dependency_problems When installing shed repositories for + workflows, ignore dependency issues. These + likely indicate a problem but in some cases + may not prevent a workflow from successfully + executing. + --shed_install / --no_shed_install + By default Planemo will attempt to install + repositories needed for workflow testing. This + may not be appropriate for production servers + and so this can disabled by calling planemo + with --no_shed_install. + --help Show this message and exit. + diff --git a/docs/commands/training_init.rst b/docs/commands/training_init.rst new file mode 100644 index 000000000..b26df02db --- /dev/null +++ b/docs/commands/training_init.rst @@ -0,0 +1,182 @@ + +``training_init`` command +====================================== + +This section is auto-generated from the help text for the planemo command +``training_init``. This help message can be generated with ``planemo training_init +--help``. + +**Usage**:: + + planemo training_init [OPTIONS] TOOL_PATH + +**Help** + +Build training template from workflow. +**Options**:: + + + --topic_name TEXT Name (directory name) of the topic to create + or in which a tutorial should be created or + updates [required] + --topic_title TEXT Title of the topic to create + --topic_summary TEXT Summary of the topic + --topic_target [use|admin-dev|instructors] + Target audience for the topic + --templates PATH Directory with the training templates + --tutorial_name TEXT Name (directory name) of the tutorial to + create or to modify + --tutorial_title TEXT Title of the tutorial + --hands_on Add hands-on for the new tutorial + --slides Add slides for the new tutorial + --workflow PATH Workflow of the tutorial (locally) + --galaxy_url TEXT URL of a Galaxy instance with the workflow + --galaxy_api_key TEXT API key on the Galaxy instance with the + workflow + --workflow_id TEXT ID of the workflow on the Galaxy instance + --zenodo_link TEXT Zenodo URL with the input data + --datatypes PATH YAML file with the correspondance between + Zenodo extension and Galaxy datatypes + --galaxy_root DIRECTORY Root of development galaxy directory to + execute command with. + --galaxy_database_seed PATH Preseeded Galaxy sqlite database to target. + --extra_tools PATH Extra tool sources to include in Galaxy's tool + panel (file or directory). These will not be + linted/tested/etc... but they will be + available to workflows and for interactive + use. + --install_galaxy Download and configure a disposable copy of + Galaxy from github. + --galaxy_branch TEXT Branch of Galaxy to target (defaults to + master) if a Galaxy root isn't specified. + --galaxy_source TEXT Git source of Galaxy to target (defaults to + the official galaxyproject github source if a + Galaxy root isn't specified. + --skip_venv Do not create or source a virtualenv + environment for Galaxy, this should be used or + instance to preserve an externally configured + virtual environment or conda environment. + --no_cache_galaxy Skip caching of Galaxy source and dependencies + obtained with --install_galaxy. Not caching + this results in faster downloads (no git) - so + is better on throw away instances such with + TravisCI. + --no_cleanup Do not cleanup temp files created for and by + Galaxy. + --galaxy_email TEXT E-mail address to use when launching single- + user Galaxy server. + --docker / --no_docker Run Galaxy tools in Docker if enabled. + --docker_cmd TEXT Command used to launch docker (defaults to + docker). + --docker_sudo / --no_docker_sudo + Flag to use sudo when running docker. + --docker_host TEXT Docker host to target when executing docker + commands (defaults to localhost). + --docker_sudo_cmd TEXT sudo command to use when --docker_sudo is + enabled (defaults to sudo). + --mulled_containers, --biocontainers + Test tools against mulled containers (forces + --docker). + --job_config_file PATH Job configuration file for Galaxy to target. + --tool_dependency_dir DIRECTORY + Tool dependency dir for Galaxy to target. + --port INTEGER Port to serve Galaxy on (default is 9090). + --host TEXT Host to bind Galaxy to. Default is 127.0.0.1 + that is restricted to localhost connections + for security reasons set to 0.0.0.0 to bind + Galaxy to all ports including potentially + publicly accessible ones. + --engine [galaxy|docker_galaxy|external_galaxy] + Select an engine to serve aritfacts such as + tools and workflows. Defaults to a local + Galaxy, but running Galaxy within a Docker + container. + --non_strict_cwl Disable strict validation of CWL. + --docker_galaxy_image TEXT Docker image identifier for docker-galaxy- + flavor used if engine type is specified as + ``docker-galaxy``. Defaults to + quay.io/bgruening/galaxy. + --docker_extra_volume PATH Extra path to mount if --engine docker. + --test_data DIRECTORY test-data directory to for specified tool(s). + --tool_data_table PATH tool_data_table_conf.xml file to for specified + tool(s). + --dependency_resolvers_config_file PATH + Dependency resolver configuration for Galaxy + to target. + --brew_dependency_resolution Configure Galaxy to use plain brew dependency + resolution. + --shed_dependency_resolution Configure Galaxy to use brewed Tool Shed + dependency resolution. + --no_dependency_resolution Configure Galaxy with no dependency resolvers. + --conda_prefix DIRECTORY Conda prefix to use for conda dependency + commands. + --conda_exec PATH Location of conda executable. + --conda_debug Enable more verbose conda logging. + --conda_channels, --conda_ensure_channels TEXT + Ensure conda is configured with specified + comma separated list of channels. + --conda_use_local Use locally built packages while building + Conda environments. + --conda_dependency_resolution Configure Galaxy to use only conda for + dependency resolution. + --conda_copy_dependencies Conda dependency resolution for Galaxy will + copy dependencies instead of attempting to + link them. + --conda_auto_install / --no_conda_auto_install + Conda dependency resolution for Galaxy will + attempt to install requested but missing + packages. + --conda_auto_init / --no_conda_auto_init + Conda dependency resolution for Galaxy will + auto install conda itself using miniconda if + not availabe on conda_prefix. + --profile TEXT Name of profile (created with the + profile_create command) to use with this + command. + --postgres Use postgres database type. + --database_type [postgres|postgres_docker|sqlite|auto] + Type of database to use for profile - 'auto', + 'sqlite', 'postgres', and 'postgres_docker' + are available options. Use postgres to use an + existing postgres server you user can access + without a password via the psql command. Use + postgres_docker to have Planemo manage a + docker container running postgres. Data with + postgres_docker is not yet persisted past when + you restart the docker container launched by + Planemo so be careful with this option. + --postgres_psql_path TEXT Name or or path to postgres client binary + (psql). + --postgres_database_user TEXT Postgres username for managed development + databases. + --postgres_database_host TEXT Postgres host name for managed development + databases. + --postgres_database_port TEXT Postgres port for managed development + databases. + --file_path DIRECTORY Location for files created by Galaxy (e.g. + database/files). + --database_connection TEXT Database connection string to use for Galaxy. + --shed_tool_conf TEXT Location of shed tools conf file for Galaxy. + --shed_tool_path TEXT Location of shed tools directory for Galaxy. + --galaxy_single_user / --no_galaxy_single_user + By default Planemo will configure Galaxy to + run in single-user mode where there is just + one user and this user is automatically logged + it. Use --no_galaxy_single_user to prevent + Galaxy from running this way. + --daemon Serve Galaxy process as a daemon. + --pid_file PATH Location of pid file is executed with + --daemon. + --ignore_dependency_problems When installing shed repositories for + workflows, ignore dependency issues. These + likely indicate a problem but in some cases + may not prevent a workflow from successfully + executing. + --shed_install / --no_shed_install + By default Planemo will attempt to install + repositories needed for workflow testing. This + may not be appropriate for production servers + and so this can disabled by calling planemo + with --no_shed_install. + --help Show this message and exit. + diff --git a/docs/planemo.commands.rst b/docs/planemo.commands.rst index c034307fa..25b5adcf6 100644 --- a/docs/planemo.commands.rst +++ b/docs/planemo.commands.rst @@ -428,6 +428,30 @@ planemo.commands.cmd\_tool\_init module :undoc-members: :show-inheritance: +planemo.commands.cmd\_training\_fill\_data\_library module +---------------------------------------------------------- + +.. automodule:: planemo.commands.cmd_training_fill_data_library + :members: + :undoc-members: + :show-inheritance: + +planemo.commands.cmd\_training\_generate\_from\_wf module +--------------------------------------------------------- + +.. automodule:: planemo.commands.cmd_training_generate_from_wf + :members: + :undoc-members: + :show-inheritance: + +planemo.commands.cmd\_training\_init module +------------------------------------------- + +.. automodule:: planemo.commands.cmd_training_init + :members: + :undoc-members: + :show-inheritance: + planemo.commands.cmd\_travis\_before\_install module ---------------------------------------------------- diff --git a/docs/planemo.rst b/docs/planemo.rst index b74831ab4..61b8ccbab 100644 --- a/docs/planemo.rst +++ b/docs/planemo.rst @@ -18,6 +18,7 @@ Subpackages planemo.shed planemo.shed2tap planemo.test + planemo.training planemo.xml Submodules diff --git a/docs/planemo.training.rst b/docs/planemo.training.rst new file mode 100644 index 000000000..e8829d83f --- /dev/null +++ b/docs/planemo.training.rst @@ -0,0 +1,46 @@ +planemo.training package +======================== + +Submodules +---------- + +planemo.training.tool\_input module +----------------------------------- + +.. automodule:: planemo.training.tool_input + :members: + :undoc-members: + :show-inheritance: + +planemo.training.topic module +----------------------------- + +.. automodule:: planemo.training.topic + :members: + :undoc-members: + :show-inheritance: + +planemo.training.tutorial module +-------------------------------- + +.. automodule:: planemo.training.tutorial + :members: + :undoc-members: + :show-inheritance: + +planemo.training.utils module +----------------------------- + +.. automodule:: planemo.training.utils + :members: + :undoc-members: + :show-inheritance: + + +Module contents +--------------- + +.. automodule:: planemo.training + :members: + :undoc-members: + :show-inheritance: From ff390ff3a5157663359db94d867e988ec51ee6f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C3=A9r=C3=A9nice=20Batut?= Date: Tue, 11 Sep 2018 11:30:56 +0200 Subject: [PATCH 26/26] Fix test of workflow formatting and test_get_wf_param_values comparison --- planemo/training/tutorial.py | 15 ++++++++++----- tests/test_training_tutorial.py | 3 ++- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/planemo/training/tutorial.py b/planemo/training/tutorial.py index 0eba650cb..c237a1a4c 100644 --- a/planemo/training/tutorial.py +++ b/planemo/training/tutorial.py @@ -27,6 +27,7 @@ save_to_yaml ) +from pprint import pprint TUTO_HAND_ON_TEMPLATE = """--- layout: tutorial_hands_on @@ -526,7 +527,7 @@ def get_wf_inputs(step_inp): def get_wf_param_values(init_params, inp_connections): """Get the param values from a workflow step and format them into a hierarchical dictionary.""" - if not isinstance(init_params, str) or '\": \"' not in init_params: + if not isinstance(init_params, str) or '": ' not in init_params: form_params = init_params else: form_params = json.loads(init_params) @@ -534,11 +535,9 @@ def get_wf_param_values(init_params, inp_connections): if '__class__' in form_params and form_params['__class__'] == 'RuntimeValue': form_params = inp_connections else: - json_params = form_params - form_params = {} - for p in json_params: + for p in form_params: inp = inp_connections[p] if p in inp_connections else {} - form_params[p] = get_wf_param_values(json_params[p], inp) + form_params[p] = get_wf_param_values(form_params[p], inp) elif isinstance(form_params, list): json_params = form_params form_params = [] @@ -556,7 +555,9 @@ def format_wf_steps(wf, gi): steps = wf['steps'] for s in range(len(steps)): + print('format_wf_steps') wf_step = steps[str(s)] + pprint(wf_step) # get params in workflow wf_param_values = {} if wf_step['tool_state'] and wf_step['input_connections']: @@ -570,7 +571,11 @@ def format_wf_steps(wf, gi): tool_desc = {'inputs': []} # get formatted param description paramlist = '' + pprint(tool_desc) + pprint(wf_param_values) + print(type(wf_param_values)) for inp in tool_desc["inputs"]: + pprint(inp) tool_inp = ToolInput(inp, wf_param_values, steps, 1, should_be_there=True) paramlist += tool_inp.get_formatted_desc() # format the hands-on box diff --git a/tests/test_training_tutorial.py b/tests/test_training_tutorial.py index 15c51598b..2cc7e147f 100644 --- a/tests/test_training_tutorial.py +++ b/tests/test_training_tutorial.py @@ -100,7 +100,8 @@ def test_get_wf_param_values(): """Test :func:`planemo.training.tutorial.get_wf_param_values`.""" wf_step = wf['steps']['4'] wf_param_value_tests = get_wf_param_values(wf_step['tool_state'], get_wf_inputs(wf_step['input_connections'])) - assert wf_param_values == wf_param_value_tests + for k in wf_param_values: + assert k in wf_param_value_tests def test_format_wf_steps():