Skip to content

Commit

Permalink
Merge branch 'dev' into slack_report
Browse files Browse the repository at this point in the history
  • Loading branch information
muffato authored Oct 3, 2022
2 parents a6657a8 + aca662e commit 786cac3
Show file tree
Hide file tree
Showing 10 changed files with 108 additions and 67 deletions.
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,28 @@
### Template

- Add `actions/upload-artifact` step to the awstest workflows, to expose the debug log file
- Add `prettier` as a requirement to Gitpod Dockerimage
- Bioconda incompatible conda channel setups now result in more informative error messages ([#1812](https://github.com/nf-core/tools/pull/1812))
- Update MultiQC module, update supplying MultiQC default and custom config and logo files to module
- Add a 'recommend' methods description text to MultiQC to help pipeline users report pipeline usage in publications ([#1749](https://github.com/nf-core/tools/pull/1749))
- Fix template spacing modified by JINJA ([#1830](https://github.com/nf-core/tools/pull/1830))
- Fix MultiQC execution on template [#1855](https://github.com/nf-core/tools/pull/1855)
- Don't skip including `base.config` when skipping nf-core/configs

### Linting

- Pipelines: Check that the old renamed `lib` files are not still present:
- `Checks.groovy` -> `Utils.groovy`
- `Completion.groovy` -> `NfcoreTemplate.groovy`
- `Workflow.groovy` -> `WorkflowMain.groovy`

### General

- Add function to enable chat notifications on MS Teams and Slack, accompanied by `hook_url` param to enable it.
- Schema: Remove `allOf` if no definition groups are left.
- Use contextlib to temporarily change working directories ([#1819](https://github.com/nf-core/tools/pull/1819))
- More helpful error messages if `nf-core download` can't parse a singularity image download
- Modules: If something is wrong with the local repo cache, offer to delete it and try again ([#1850](https://github.com/nf-core/tools/issues/1850))

### Modules

Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ For documentation of the internal Python functions, please refer to the [Tools P
You can install `nf-core/tools` from [bioconda](https://bioconda.github.io/recipes/nf-core/README.html).

First, install conda and configure the channels to use bioconda
(see the [bioconda documentation](https://bioconda.github.io/user/install.html)).
(see the [bioconda documentation](https://bioconda.github.io/index.html#usage)).
Then, just run the conda installation command:

```bash
Expand Down
8 changes: 6 additions & 2 deletions nf_core/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import subprocess
import sys
import tarfile
import textwrap
from zipfile import ZipFile

import questionary
Expand Down Expand Up @@ -453,7 +454,8 @@ def find_container_images(self):
for subdir, _, files in os.walk(os.path.join(self.outdir, "workflow", "modules")):
for file in files:
if file.endswith(".nf"):
with open(os.path.join(subdir, file), "r") as fh:
file_path = os.path.join(subdir, file)
with open(file_path, "r") as fh:
# Look for any lines with `container = "xxx"`
this_container = None
contents = fh.read()
Expand All @@ -478,7 +480,9 @@ def find_container_images(self):

# Don't recognise this, throw a warning
else:
log.error(f"[red]Cannot parse container string, skipping: [green]'{file}'")
log.error(
f"[red]Cannot parse container string in '{file_path}':\n\n{textwrap.indent(match, ' ')}\n\n:warning: Skipping this singularity image.."
)

if this_container:
containers_raw.append(this_container)
Expand Down
1 change: 1 addition & 0 deletions nf_core/gitpod/gitpod.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ RUN conda update -n base -c defaults conda && \
mamba=0.24.0 \
pip=22.1.2 \
black=22.6.0 \
prettier=2.7.1 \
-n base && \
conda clean --all -f -y

Expand Down
8 changes: 7 additions & 1 deletion nf_core/lint/files_exist.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def files_exist(self):
lib/WorkflowPIPELINE.groovy
pyproject.toml
Files that *must not* be present:
Files that *must not* be present, due to being renamed or removed in the template:
.. code-block:: bash
Expand All @@ -90,6 +90,9 @@ def files_exist(self):
docs/images/nf-core-PIPELINE_logo.png
.markdownlint.yml
.yamllint.yml
lib/Checks.groovy
lib/Completion.groovy
lib/Workflow.groovy
Files that *should not* be present:
Expand Down Expand Up @@ -191,6 +194,9 @@ def files_exist(self):
os.path.join("docs", "images", f"nf-core-{short_name}_logo.png"),
".markdownlint.yml",
".yamllint.yml",
os.path.join("lib", "Checks.groovy"),
os.path.join("lib", "Completion.groovy"),
os.path.join("lib", "Workflow.groovy"),
]
files_warn_ifexists = [".travis.yml"]

Expand Down
106 changes: 58 additions & 48 deletions nf_core/modules/modules_repo.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@
from pathlib import Path

import git
import rich
import rich.progress
from git.exc import GitCommandError
from git.exc import GitCommandError, InvalidGitRepositoryError

import nf_core.modules.module_utils
import nf_core.modules.modules_json
Expand Down Expand Up @@ -150,55 +151,64 @@ def setup_local_repo(self, remote, branch, hide_progress=True):
Sets self.repo
"""
self.local_repo_dir = os.path.join(NFCORE_DIR, self.fullname)
if not os.path.exists(self.local_repo_dir):
try:
pbar = rich.progress.Progress(
"[bold blue]{task.description}",
rich.progress.BarColumn(bar_width=None),
"[bold yellow]{task.fields[state]}",
transient=True,
disable=hide_progress,
)
with pbar:
self.repo = git.Repo.clone_from(
remote,
self.local_repo_dir,
progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Cloning"),
try:
if not os.path.exists(self.local_repo_dir):
try:
pbar = rich.progress.Progress(
"[bold blue]{task.description}",
rich.progress.BarColumn(bar_width=None),
"[bold yellow]{task.fields[state]}",
transient=True,
disable=hide_progress,
)
ModulesRepo.update_local_repo_status(self.fullname, True)
except GitCommandError:
raise LookupError(f"Failed to clone from the remote: `{remote}`")
# Verify that the requested branch exists by checking it out
self.setup_branch(branch)
else:
self.repo = git.Repo(self.local_repo_dir)

if ModulesRepo.no_pull_global:
ModulesRepo.update_local_repo_status(self.fullname, True)
# If the repo is already cloned, fetch the latest changes from the remote
if not ModulesRepo.local_repo_synced(self.fullname):
pbar = rich.progress.Progress(
"[bold blue]{task.description}",
rich.progress.BarColumn(bar_width=None),
"[bold yellow]{task.fields[state]}",
transient=True,
disable=hide_progress,
)
with pbar:
self.repo.remotes.origin.fetch(
progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Pulling")
with pbar:
self.repo = git.Repo.clone_from(
remote,
self.local_repo_dir,
progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Cloning"),
)
ModulesRepo.update_local_repo_status(self.fullname, True)
except GitCommandError:
raise LookupError(f"Failed to clone from the remote: `{remote}`")
# Verify that the requested branch exists by checking it out
self.setup_branch(branch)
else:
self.repo = git.Repo(self.local_repo_dir)

if ModulesRepo.no_pull_global:
ModulesRepo.update_local_repo_status(self.fullname, True)
# If the repo is already cloned, fetch the latest changes from the remote
if not ModulesRepo.local_repo_synced(self.fullname):
pbar = rich.progress.Progress(
"[bold blue]{task.description}",
rich.progress.BarColumn(bar_width=None),
"[bold yellow]{task.fields[state]}",
transient=True,
disable=hide_progress,
)
ModulesRepo.update_local_repo_status(self.fullname, True)

# Before verifying the branch, fetch the changes
# Verify that the requested branch exists by checking it out
self.setup_branch(branch)

# Now merge the changes
tracking_branch = self.repo.active_branch.tracking_branch()
if tracking_branch is None:
raise LookupError(f"There is no remote tracking branch '{self.branch}' in '{self.remote_url}'")
self.repo.git.merge(tracking_branch.name)
with pbar:
self.repo.remotes.origin.fetch(
progress=RemoteProgressbar(pbar, self.fullname, self.remote_url, "Pulling")
)
ModulesRepo.update_local_repo_status(self.fullname, True)

# Before verifying the branch, fetch the changes
# Verify that the requested branch exists by checking it out
self.setup_branch(branch)

# Now merge the changes
tracking_branch = self.repo.active_branch.tracking_branch()
if tracking_branch is None:
raise LookupError(f"There is no remote tracking branch '{self.branch}' in '{self.remote_url}'")
self.repo.git.merge(tracking_branch.name)
except (GitCommandError, InvalidGitRepositoryError) as e:
log.error(f"[red]Could not set up local cache of modules repository:[/]\n{e}\n")
if rich.prompt.Confirm.ask(f"[violet]Delete local cache '{self.local_repo_dir}' and try again?"):
log.info(f"Removing '{self.local_repo_dir}'")
shutil.rmtree(self.local_repo_dir)
self.setup_local_repo(remote, branch, hide_progress)
else:
raise LookupError("Exiting due to error with local modules git repo")

def setup_branch(self, branch):
"""
Expand Down
2 changes: 1 addition & 1 deletion nf_core/pipeline-template/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
{{ name }}
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Github : https://github.com/{{ name }}
{% if branded -%}
{% if branded %}
Website: https://nf-co.re/{{ short_name }}
Slack : https://nfcore.slack.com/channels/{{ short_name }}
{% endif -%}
Expand Down
6 changes: 3 additions & 3 deletions nf_core/pipeline-template/nextflow.config
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,11 @@ params {
max_time = '240.h'

}
{% if nf_core_configs %}

// Load base.config by default for all pipelines
includeConfig 'conf/base.config'

{% if nf_core_configs -%}
// Load nf-core custom profiles from different Institutions
try {
includeConfig "${params.custom_config_base}/nfcore_custom.config"
Expand All @@ -77,9 +78,8 @@ try {
// } catch (Exception e) {
// System.err.println("WARNING: Could not load nf-core/config/{{ short_name }} profiles: ${params.custom_config_base}/pipeline/{{ short_name }}.config")
// }


{% endif %}

profiles {
debug { process.beforeScript = 'echo $HOSTNAME' }
conda {
Expand Down
14 changes: 7 additions & 7 deletions nf_core/pipeline-template/workflows/pipeline.nf
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ if (params.input) { ch_input = file(params.input) } else { exit 1, 'Input sample
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/

ch_multiqc_config = file("$projectDir/assets/multiqc_config.yml", checkIfExists: true)
ch_multiqc_custom_config = params.multiqc_config ? file( params.multiqc_config, checkIfExists: true ) : Channel.empty()
ch_multiqc_logo = params.multiqc_logo ? file( params.multiqc_logo, checkIfExists: true ) : Channel.empty()
ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true)
ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath( params.multiqc_config, checkIfExists: true ) : Channel.empty()
ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath( params.multiqc_logo, checkIfExists: true ) : Channel.empty()
ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true)

/*
Expand Down Expand Up @@ -91,7 +91,7 @@ workflow {{ short_name|upper }} {
workflow_summary = Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.paramsSummaryMultiqc(workflow, summary_params)
ch_workflow_summary = Channel.value(workflow_summary)

methods_description = WorkflowTestpipeline.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description)
methods_description = Workflow{{ short_name[0]|upper }}{{ short_name[1:] }}.methodsDescriptionText(workflow, ch_multiqc_custom_methods_description)
ch_methods_description = Channel.value(methods_description)

ch_multiqc_files = Channel.empty()
Expand All @@ -102,9 +102,9 @@ workflow {{ short_name|upper }} {

MULTIQC (
ch_multiqc_files.collect(),
ch_multiqc_config,
ch_multiqc_custom_config,
ch_multiqc_logo
ch_multiqc_config.collect().ifEmpty([]),
ch_multiqc_custom_config.collect().ifEmpty([]),
ch_multiqc_logo.collect().ifEmpty([])
)
multiqc_report = MULTIQC.out.report.toList()
ch_versions = ch_versions.mix(MULTIQC.out.versions)
Expand Down
16 changes: 12 additions & 4 deletions nf_core/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ def validate_default_params(self):
schema_no_required = copy.deepcopy(self.schema)
if "required" in schema_no_required:
schema_no_required.pop("required")
for group_key, group in schema_no_required["definitions"].items():
for group_key, group in schema_no_required.get("definitions", {}).items():
if "required" in group:
schema_no_required["definitions"][group_key].pop("required")
jsonschema.validate(self.schema_defaults, schema_no_required)
Expand All @@ -247,7 +247,7 @@ def validate_default_params(self):
params_ignore = []

# Go over group keys
for group_key, group in schema_no_required["definitions"].items():
for group_key, group in schema_no_required.get("definitions", {}).items():
group_properties = group.get("properties")
for param in group_properties:
if param in params_ignore:
Expand Down Expand Up @@ -343,7 +343,7 @@ def validate_schema(self, schema=None):
if "allOf" not in schema:
raise AssertionError("Schema has definitions, but no allOf key")
in_allOf = False
for allOf in schema["allOf"]:
for allOf in schema.get("allOf", []):
if allOf["$ref"] == f"#/definitions/{d_key}":
in_allOf = True
if not in_allOf:
Expand All @@ -361,7 +361,7 @@ def validate_schema(self, schema=None):
if "definitions" not in schema:
raise AssertionError("Schema has allOf, but no definitions")
def_key = allOf["$ref"][14:]
if def_key not in schema["definitions"]:
if def_key not in schema.get("definitions", {}):
raise AssertionError(f"Subschema `{def_key}` found in `allOf` but not `definitions`")

# Check that the schema describes at least one parameter
Expand Down Expand Up @@ -667,6 +667,14 @@ def remove_schema_empty_definitions(self):
if allOf in self.schema.get("allOf", []):
self.schema["allOf"].remove(allOf)

# If we don't have anything left in "allOf", remove it
if self.schema.get("allOf") == []:
del self.schema["allOf"]

# If we don't have anything left in "definitions", remove it
if self.schema.get("definitions") == {}:
del self.schema["definitions"]

def remove_schema_notfound_configs(self):
"""
Go through top-level schema and all definitions sub-schemas to remove
Expand Down

0 comments on commit 786cac3

Please sign in to comment.