Skip to content

Commit

Permalink
Merge pull request #458 from maxibor/patch
Browse files Browse the repository at this point in the history
Patch release: Fix #449 (wrong contigs from aDNA workflow going to binning)
  • Loading branch information
jfy133 committed Jun 19, 2023
2 parents c9468cb + 9f86e7d commit 61deef8
Show file tree
Hide file tree
Showing 6 changed files with 31 additions and 19 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,13 @@
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## v2.3.1 - [2023-06-19]

### `Fixed`

- [#458](https://github.com/nf-core/mag/pull/458) - Correct the major issue in ancient DNA workflow of binning refinement being performed on uncorrected contigs instead of aDNA consensus recalled contigs (issue [#449](https://github.com/nf-core/mag/issues/449))
- [#451](https://github.com/nf-core/mag/pull/451) - Fix results file overwriting in Ancient DNA workflow (reported by @alexhbnr, fix by @jfy133, and integrated by @maxibor in [#458](https://github.com/nf-core/mag/pull/458) )

## v2.3.0 - [2023/03/02]

### `Added`
Expand Down
12 changes: 9 additions & 3 deletions conf/modules.config
Original file line number Diff line number Diff line change
Expand Up @@ -455,6 +455,7 @@ process {
}

withName: FREEBAYES {
ext.prefix = { "${meta.assembler}-${meta.id}" }
ext.args = "-p ${params.freebayes_ploidy} -q ${params.freebayes_min_basequality} -F ${params.freebayes_minallelefreq}"
publishDir = [
path: { "${params.outdir}/Ancient_DNA/variant_calling/freebayes" },
Expand All @@ -464,8 +465,8 @@ process {
}

withName: BCFTOOLS_VIEW {
ext.prefix = { "${meta.assembler}-${meta.id}.filtered" }
ext.args = "-v snps,mnps -i 'QUAL>=${params.bcftools_view_high_variant_quality} || (QUAL>=${params.bcftools_view_medium_variant_quality} && FORMAT/AO>=${params.bcftools_view_minimal_allelesupport})'"
ext.prefix = { "${meta.id}.filtered" }
publishDir = [
path: { "${params.outdir}/Ancient_DNA/variant_calling/filtered" },
mode: params.publish_dir_mode,
Expand All @@ -474,6 +475,7 @@ process {
}

withName: BCFTOOLS_CONSENSUS {
ext.prefix = { "${meta.assembler}-${meta.id}" }
publishDir = [
path: {"${params.outdir}/Ancient_DNA/variant_calling/consensus" },
mode: params.publish_dir_mode,
Expand All @@ -482,6 +484,7 @@ process {
}

withName: BCFTOOLS_INDEX {
ext.prefix = { "${meta.assembler}-${meta.id}" }
ext.args = "-t"
publishDir = [
path: {"${params.outdir}/Ancient_DNA/variant_calling/index" },
Expand All @@ -491,21 +494,24 @@ process {
}

withName: PYDAMAGE_ANALYZE {
ext.prefix = { "${meta.assembler}-${meta.id}" }
publishDir = [
path: {"${params.outdir}/Ancient_DNA/pydamage/analyze/${meta.id}" },
path: {"${params.outdir}/Ancient_DNA/pydamage/analyze/" },
mode: params.publish_dir_mode
]
}

withName: PYDAMAGE_FILTER {
ext.prefix = { "${meta.assembler}-${meta.id}" }
ext.args = "-t ${params.pydamage_accuracy}"
publishDir = [
path: {"${params.outdir}/Ancient_DNA/pydamage/filter/${meta.id}" },
path: {"${params.outdir}/Ancient_DNA/pydamage/filter/" },
mode: params.publish_dir_mode
]
}

withName: SAMTOOLS_FAIDX {
ext.prefix = { "${meta.assembler}-${meta.id}" }
publishDir = [
path: {"${params.outdir}/Ancient_DNA/samtools/faidx" },
mode: params.publish_dir_mode,
Expand Down
10 changes: 5 additions & 5 deletions docs/output.md
Original file line number Diff line number Diff line change
Expand Up @@ -602,9 +602,9 @@ Optional, only running when parameter `-profile ancient_dna` is specified.
<summary>Output files</summary>

- `Ancient_DNA/pydamage/analyze`
- `[sample/group]/pydamage_results/pydamage_results.csv`: PyDamage raw result tabular file in `.csv` format. Format described here: [pydamage.readthedocs.io/en/0.62/output.html](https://pydamage.readthedocs.io/en/0.62/output.html)
- `[assembler]_[sample/group]/pydamage_results/pydamage_results.csv`: PyDamage raw result tabular file in `.csv` format. Format described here: [pydamage.readthedocs.io/en/0.62/output.html](https://pydamage.readthedocs.io/en/0.62/output.html)
- `Ancient_DNA/pydamage/filter`
- `[sample/group]/pydamage_results/pydamage_results.csv`: PyDamage filtered result tabular file in `.csv` format. Format described here: [pydamage.readthedocs.io/en/0.62/output.html](https://pydamage.readthedocs.io/en/0.62/output.html)
- `[assembler]_[sample/group]/pydamage_results/pydamage_results.csv`: PyDamage filtered result tabular file in `.csv` format. Format described here: [pydamage.readthedocs.io/en/0.62/output.html](https://pydamage.readthedocs.io/en/0.62/output.html)

</details>

Expand All @@ -616,11 +616,11 @@ Because of aDNA damage, _de novo_ assemblers sometimes struggle to call a correc
<summary>Output files</summary>

- `variant_calling/consensus`
- `[sample/group].fa`: contigs sequence with re-called consensus from read-to-contig alignment
- `[assembler]_[sample/group].fa`: contigs sequence with re-called consensus from read-to-contig alignment
- `variant_calling/unfiltered`
- `[sample/group].vcf.gz`: raw variant calls of the reads aligned back to the contigs.
- `[assembler]_[sample/group].vcf.gz`: raw variant calls of the reads aligned back to the contigs.
- `variant_calling/filtered`
- `[sample/group].filtered.vcf.gz`: quality filtered variant calls of the reads aligned back to the contigs.
- `[assembler]_[sample/group].filtered.vcf.gz`: quality filtered variant calls of the reads aligned back to the contigs.

</details>

Expand Down
2 changes: 1 addition & 1 deletion nextflow.config
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ manifest {
description = """Assembly, binning and annotation of metagenomes"""
mainScript = 'main.nf'
nextflowVersion = '!>=22.10.1'
version = '2.3.0'
version = '2.3.1'
doi = '10.1093/nargab/lqac007'
}

Expand Down
10 changes: 1 addition & 9 deletions subworkflows/local/binning_refinement.nf
Original file line number Diff line number Diff line change
Expand Up @@ -22,22 +22,14 @@ def getColNo(filename) {

workflow BINNING_REFINEMENT {
take:
contigs
ch_contigs_for_dastool // channel: [ val(meta), path(contigs) ]
bins // channel: [ val(meta), path(bins) ]
depths
reads

main:
ch_versions = Channel.empty()

// Drop unnecessary files
ch_contigs_for_dastool = contigs
.map {
meta, assembly, bams, bais ->
def meta_new = meta.clone()
[ meta_new, assembly ]
}

ch_bins_for_fastatocontig2bin = RENAME_PREDASTOOL(bins).renamed_bins
.branch {
metabat2: it[0]['binner'] == 'MetaBAT2'
Expand Down
9 changes: 8 additions & 1 deletion workflows/mag.nf
Original file line number Diff line number Diff line change
Expand Up @@ -581,7 +581,14 @@ workflow MAG {
// If any two of the binners are both skipped at once, do not run because DAS_Tool needs at least one
if ( params.refine_bins_dastool ) {

BINNING_REFINEMENT ( BINNING_PREPARATION.out.grouped_mappings, BINNING.out.bins, BINNING.out.metabat2depths, ch_short_reads )
if (params.ancient_dna) {
ch_contigs_for_binrefinement = ANCIENT_DNA_ASSEMBLY_VALIDATION.out.contigs_recalled
} else {
ch_contigs_for_binrefinement = BINNING_PREPARATION.out.grouped_mappings
.map{ meta, contigs, bam, bai -> [ meta, contigs ] }
}

BINNING_REFINEMENT ( ch_contigs_for_binrefinement, BINNING.out.bins, BINNING.out.metabat2depths, ch_short_reads )
ch_versions = ch_versions.mix(BINNING_REFINEMENT.out.versions)

if ( params.postbinning_input == 'raw_bins_only' ) {
Expand Down

0 comments on commit 61deef8

Please sign in to comment.