Skip to content

Commit

Permalink
Merge pull request #134 from nf-core/dev
Browse files Browse the repository at this point in the history
Prepare for bugfix release 1.5.1
  • Loading branch information
Leon-Bichmann committed Apr 24, 2020
2 parents 09a4005 + e76f857 commit 22e1bd6
Show file tree
Hide file tree
Showing 7 changed files with 38 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ jobs:
sudo mv nextflow /usr/local/bin/
- name: Pull docker image
run: |
docker pull nfcore/mhcquant:dev && docker tag nfcore/mhcquant:dev nfcore/mhcquant:1.5
docker pull nfcore/mhcquant:dev && docker tag nfcore/mhcquant:dev nfcore/mhcquant:1.5.1
- name: Run pipeline with test data
run: |
sudo nextflow run ${GITHUB_WORKSPACE} -profile test,docker --predict_class_1 --predict_class_2 --predict_RT
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
# nf-core/mhcquant: Changelog

## v1.5.1 nf-core/mhcquat "Flying Fish" - 2020/04/24

### `Fixed`

- set optimal config for cluster execution
- fix duplication of ids / mixing of channels

### `Dependencies`

### `Deprecated`

## v1.5 nf-core/mhcquat "Flying Fish" - 2020/04/18

### `Fixed`
Expand Down
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ COPY environment.yml /
RUN conda env create -f /environment.yml && conda clean -a

# Dump the details of the installed packages to a file for posterity
RUN conda env export --name nf-core-mhcquant-1.5 > nf-core-mhcquant-1.5.yml
RUN conda env export --name nf-core-mhcquant-1.5.1 > nf-core-mhcquant-1.5.1.yml

# Add conda installation dir and thirdparties to PATH (instead of doing 'conda activate')
ENV PATH /opt/conda/envs/nf-core-mhcquant-1.5/bin:$PATH
ENV PATH /opt/conda/envs/nf-core-mhcquant-1.5.1/bin:$PATH
23 changes: 12 additions & 11 deletions conf/base.config
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
process {

cpus = { check_max( 1 * task.attempt, 'cpus' ) }
memory = { check_max( 16.GB * task.attempt, 'memory' ) }
time = { check_max( 24.h * task.attempt, 'time' ) }
memory = { check_max( 4.GB * task.attempt, 'memory' ) }
time = { check_max( 2.h * task.attempt, 'time' ) }

errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' }
maxRetries = 1
Expand All @@ -28,21 +28,22 @@ process {
}
withLabel:process_medium {
cpus = { check_max( 6 * task.attempt, 'cpus' ) }
memory = { check_max( 42.GB * task.attempt, 'memory' ) }
time = { check_max( 12.h * task.attempt, 'time' ) }
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
time = { check_max( 2.h * task.attempt, 'time' ) }
}
withLabel:process_high {
cpus = { check_max( 12 * task.attempt, 'cpus' ) }
memory = { check_max( 84.GB * task.attempt, 'memory' ) }
time = { check_max( 24.h * task.attempt, 'time' ) }
withLabel:process_medium_long {
cpus = { check_max( 6 * task.attempt, 'cpus' ) }
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
time = { check_max( 8.h * task.attempt, 'time' ) }
}
withLabel:process_long {
time = { check_max( 48.h * task.attempt, 'time' ) }
withLabel:process_web {
time = { check_max( 6.h * task.attempt, 'time' ) }
errorStrategy = 'retry'
maxRetries = 10
}
withName:get_software_versions {
cache = false
}

}

params {
Expand Down
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# You can use this file to create a conda environment for this pipeline:
# conda env create -f environment.yml
name: nf-core-mhcquant-1.5
name: nf-core-mhcquant-1.5.1
channels:
- conda-forge
- bioconda
Expand Down
13 changes: 9 additions & 4 deletions main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -470,6 +470,8 @@ process output_documentation {
process generate_proteins_from_vcf {
publishDir "${params.outdir}/"

label 'process_web'

input:
set val(Sample), val(id), file(fasta_file_vcf), val(d), file(vcf_file) from input_fasta_vcf.combine(input_vcf, by:1)

Expand Down Expand Up @@ -557,7 +559,7 @@ process peak_picking {
process db_search_comet {
tag "${Sample}"

label 'process_high'
label 'process_medium_long'

input:
set val(Sample), val(id), val(Condition), file(mzml_file), val(d), file(fasta_decoy) from raws_converted.mix(input_mzmls.mix(input_mzmls_picked)).join(fastafile_decoy_1.mix(input_fasta_1), by:1, remainder:true)
Expand Down Expand Up @@ -603,7 +605,7 @@ process index_peptides {
set val(Sample), val(id), val(Condition), file(id_file), val(d), file(fasta_decoy) from id_files.join(fastafile_decoy_2.mix(input_fasta_2), by:1)

output:
set val("$id"), val("$Sample"), val("$Condition"), file("${Sample}_${Condition}_${id}_idx.idXML") into (id_files_idx, id_files_idx_original, id_files_idx_original_II)
set val("$id"), val("$Sample"), val("$Condition"), file("${Sample}_${Condition}_${id}_idx.idXML") into (id_files_idx, id_files_idx_original)

script:
"""
Expand Down Expand Up @@ -716,10 +718,13 @@ if(!params.skip_quantification){
.join(id_files_trafo_II.transpose().flatMap{ it -> [tuple(it[1].baseName.split('_-_')[0].toInteger(), it[0], it[1])]}, by: [0,1])
.set{joined_trafos_ids}

id_files_idx_original_II = Channel.empty()

} else {

joined_trafos_mzmls = Channel.empty()
joined_trafos_ids = Channel.empty()
id_files_idx_original_II = id_files_idx_original

}
/*
Expand Down Expand Up @@ -1323,7 +1328,7 @@ process predict_peptides_mhcflurry_class_1 {
process predict_possible_neoepitopes {
publishDir "${params.outdir}/"

label 'process_high'
label 'process_web'

input:
set val(id), val(Sample), val(alleles), file(vcf_file) from neoepitopes_class_1_alleles.join(input_vcf_neoepitope, by:[0,1], remainder:true)
Expand All @@ -1348,7 +1353,7 @@ process predict_possible_neoepitopes {
process predict_possible_class_2_neoepitopes {
publishDir "${params.outdir}/"

label 'process_high'
label 'process_web'

input:
set val(id), val(Sample), val(alleles_II), file(vcf_file) from peptides_class_2_alleles_II.join(input_vcf_neoepitope_II, by:[0,1], remainder:true)
Expand Down
4 changes: 2 additions & 2 deletions nextflow.config
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ params {

// Container slug. Stable releases should specify release tag!
// Developmental code should specify :dev
process.container = 'nfcore/mhcquant:1.5'
process.container = 'nfcore/mhcquant:1.5.1'

// Load base.config by default for all pipelines
includeConfig 'conf/base.config'
Expand Down Expand Up @@ -162,7 +162,7 @@ manifest {
description = 'Identify and quantify peptides from mass spectrometry raw data'
mainScript = 'main.nf'
nextflowVersion = '>=19.10.0'
version = '1.5'
version = '1.5.1'
}

// Function to ensure that resource requirements don't go beyond
Expand Down

0 comments on commit 22e1bd6

Please sign in to comment.