From 4a17c0cb58dcc41656328b655ae00318f50afc32 Mon Sep 17 00:00:00 2001 From: Julianus Pfeuffer Date: Thu, 28 Apr 2022 18:07:44 +0200 Subject: [PATCH] [FIX] AWS and conda (#163) * [FIX] AWS and conda * [CI] add conda tests * More fixes * remove debug * add mamba to gh actions? * channels? * remove debug var * No dev version * Update ci.yml * Update ci.yml * lint --- .github/workflows/ci.yml | 42 +++++++++++++++++-- .gitignore | 1 + conf/mambaci.config | 12 ++++++ conf/test.config | 1 + conf/test_dia.config | 1 + conf/test_full.config | 1 + conf/test_lfq.config | 1 + conf/test_localize.config | 1 + lib/WorkflowMain.groovy | 14 +++++++ modules/local/openms/consensusid/main.nf | 2 +- modules/local/openms/decoydatabase/main.nf | 2 +- .../local/openms/extractpsmfeatures/main.nf | 2 +- .../local/openms/falsediscoveryrate/main.nf | 2 +- modules/local/openms/filemerge/main.nf | 2 +- .../local/openms/idconflictresolver/main.nf | 2 +- modules/local/openms/idfilter/main.nf | 2 +- modules/local/openms/idmapper/main.nf | 2 +- modules/local/openms/idpep/main.nf | 2 +- modules/local/openms/idscoreswitcher/main.nf | 2 +- modules/local/openms/indexpeptides/main.nf | 2 +- modules/local/openms/isobaricanalyzer/main.nf | 2 +- modules/local/openms/msstatsconverter/main.nf | 2 +- modules/local/openms/mzmlindexing/main.nf | 2 +- modules/local/openms/openmspeakpicker/main.nf | 2 +- modules/local/openms/proteininference/main.nf | 2 +- .../local/openms/proteinquantifier/main.nf | 2 +- modules/local/openms/proteomicslfq/main.nf | 2 +- .../openms/thirdparty/luciphoradapter/main.nf | 7 +++- .../openms/thirdparty/percolator/main.nf | 2 +- .../thirdparty/searchenginemsgf/main.nf | 6 ++- modules/local/pmultiqc/main.nf | 2 +- modules/local/thermorawfileparser/main.nf | 18 +++++++- nextflow.config | 2 + subworkflows/local/create_input_channel.nf | 2 +- subworkflows/local/file_preparation.nf | 1 - subworkflows/local/id.nf | 19 +-------- workflows/lfq.nf | 5 ++- workflows/quantms.nf | 26 +++++++++++- workflows/tmt.nf | 4 +- 39 files changed, 151 insertions(+), 53 deletions(-) create mode 100644 conf/mambaci.config diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4c987039..b3393fea 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,12 +14,14 @@ jobs: NXF_ANSI_LOG: false CAPSULE_LOG: none TEST_PROFILE: ${{ matrix.test_profile }} + EXEC_PROFILE: ${{ matrix.exec_profile }} name: Run pipeline with test data # Only run on push if this is the nf-core dev branch (merged PRs) if: ${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/quantms') }} runs-on: ubuntu-latest strategy: + fail-fast: false matrix: # Nextflow versions include: @@ -30,6 +32,12 @@ jobs: - NXF_VER: "" NXF_EDGE: "1" test_profile: ["test", "test_lfq", "test_dia", "test_localize"] + exec_profile: ["docker", "conda"] + exclude: + - test_profile: test_dia + exec_profile: conda + - test_profile: test_localize + exec_profile: conda steps: - name: Check out pipeline code uses: actions/checkout@v2 @@ -44,18 +52,44 @@ jobs: wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + - name: Install mamba + if: matrix.exec_profile == 'conda' + uses: conda-incubator/setup-miniconda@v2 + with: + python-version: 3.9 + mamba-version: "*" + channels: conda-forge,defaults + + #- name: Install micromamba as mamba + # if: matrix.exec_profile == 'conda' + # run: | + # wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba + # mv bin/micromamba bin/mamba + # echo "$(pwd)/bin" >> $GITHUB_PATH + # echo "$(pwd)/mamba/bin" >> $GITHUB_PATH + # ./bin/mamba shell init -s bash -p ./mamba + - name: Run pipeline with test data + if: matrix.exec_profile != 'conda' + # TODO nf-core: You can customise CI pipeline run tests as required + # For example: adding multiple test runs with different parameters + # Remember that you can parallelise this by using strategy.matrix + run: | + nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results + + - name: Run pipeline with test data in conda profile (and single-threaded) + if: matrix.exec_profile == 'conda' # TODO nf-core: You can customise CI pipeline run tests as required # For example: adding multiple test runs with different parameters # Remember that you can parallelise this by using strategy.matrix run: | - nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,docker --outdir ${TEST_PROFILE}_results + nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE,mambaci --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results - name: Gather failed logs if: failure() || cancelled() run: | mkdir failed_logs - failed=$(grep "FAILED" ${TEST_PROFILE}_results/pipeline_info/execution_trace.txt | cut -f 2) + failed=$(grep "FAILED" ${TEST_PROFILE}_${EXEC_PROFILE}_results/pipeline_info/execution_trace.txt | cut -f 2) while read -r line ; do cp $(ls work/${line}*/*.log) failed_logs/ | true ; done <<< "$failed" - uses: actions/upload-artifact@v1 if: failure() || cancelled() @@ -67,8 +101,8 @@ jobs: if: always() name: Upload results with: - name: ${{ env.TEST_PROFILE }}_results - path: ${{ env.TEST_PROFILE }}_results + name: ${{ env.TEST_PROFILE }}_${{ env.EXEC_PROFILE }}_results + path: ${{ env.TEST_PROFILE }}_${{ env.EXEC_PROFILE }}_results - uses: actions/upload-artifact@v1 if: always() name: Upload log diff --git a/.gitignore b/.gitignore index bcfcc4a9..5b8dd1e1 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ testing* .idea/* *.log /build/ +results*/ diff --git a/conf/mambaci.config b/conf/mambaci.config new file mode 100644 index 00000000..a9bbeae6 --- /dev/null +++ b/conf/mambaci.config @@ -0,0 +1,12 @@ +// We need this because mamba is completely bugged +// https://github.com/mamba-org/mamba/issues/1429 +// and nextflow does not support micromamba +// https://github.com/mamba-org/mamba/issues/1654 +process { + executor = 'local' + maxForks = 1 +} +executor { + pollInterval = '5sec' + queueSize = 1 +} diff --git a/conf/test.config b/conf/test.config index 6be93226..bb1d959d 100644 --- a/conf/test.config +++ b/conf/test.config @@ -20,6 +20,7 @@ params { max_time = '6.h' outdir = "./results_iso" + tracedir = "${params.outdir}/pipeline_info" // Input data input = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/tmt_ci/PXD000001.sdrf.tsv' diff --git a/conf/test_dia.config b/conf/test_dia.config index 8ab5b74b..1b27d1f5 100644 --- a/conf/test_dia.config +++ b/conf/test_dia.config @@ -20,6 +20,7 @@ params { max_time = 48.h outdir = './results_dia' + tracedir = "${params.outdir}/pipeline_info" // Input data input = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/lfq_ci/PXD026600/PXD026600.sdrf.tsv' diff --git a/conf/test_full.config b/conf/test_full.config index 54af6b23..1a2e1f1b 100644 --- a/conf/test_full.config +++ b/conf/test_full.config @@ -15,6 +15,7 @@ params { config_profile_description = 'Full test dataset in isotopic labelling mode to check pipeline function and sanity of results' outdir = "./results_iso_full" + tracedir = "${params.outdir}/pipeline_info" // Input data for full size test input = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/tmt_ci/PXD000001.sdrf.tsv' diff --git a/conf/test_lfq.config b/conf/test_lfq.config index 41959ef2..4e4e61f0 100644 --- a/conf/test_lfq.config +++ b/conf/test_lfq.config @@ -20,6 +20,7 @@ params { max_time = 48.h outdir = "./results_lfq" + tracedir = "${params.outdir}/pipeline_info" // Input data labelling_type = "label free sample" diff --git a/conf/test_localize.config b/conf/test_localize.config index 4c92403a..b8c71108 100644 --- a/conf/test_localize.config +++ b/conf/test_localize.config @@ -20,6 +20,7 @@ params { max_time = 1.h outdir = "./results_localize" + tracedir = "${params.outdir}/pipeline_info" // Input data input = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/lfq_ci_phospho/test_phospho.sdrf' diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index 8fec508f..d782c776 100755 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -78,6 +78,20 @@ class WorkflowMain { System.exit(1) } + // Check input has been provided + if (!params.outdir) { + log.error "Please provide an outdir to the pipeline e.g. '--outdir ./results'" + System.exit(1) + } + + if (params.tracedir == "null/pipeline_info") + { + log.error """Error: Your tracedir is `null/pipeline_info`, this means you probably set outdir in a way that does not affect the default + `\$params.outdir/pipeline_info` (e.g., by specifying outdir in a profile instead of the commandline or through a `-params-file`. + Either set outdir in a correct way, or redefine tracedir as well (e.g., in your profile).""" + System.exit(1) + } + // check fasta database has been provided if (!params.database) { log.error "Please provide an fasta database to the pipeline e.g. '--database *.fasta'" diff --git a/modules/local/openms/consensusid/main.nf b/modules/local/openms/consensusid/main.nf index 936e2524..02c64fa6 100644 --- a/modules/local/openms/consensusid/main.nf +++ b/modules/local/openms/consensusid/main.nf @@ -4,7 +4,7 @@ process CONSENSUSID { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/decoydatabase/main.nf b/modules/local/openms/decoydatabase/main.nf index bdd7ea81..4a62d2a8 100644 --- a/modules/local/openms/decoydatabase/main.nf +++ b/modules/local/openms/decoydatabase/main.nf @@ -2,7 +2,7 @@ process DECOYDATABASE { label 'process_very_low' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/extractpsmfeatures/main.nf b/modules/local/openms/extractpsmfeatures/main.nf index 2bc7fa1f..3cc6e36b 100644 --- a/modules/local/openms/extractpsmfeatures/main.nf +++ b/modules/local/openms/extractpsmfeatures/main.nf @@ -3,7 +3,7 @@ process EXTRACTPSMFEATURES { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::bumbershoot bioconda::comet-ms bioconda::crux-toolkit=3.2 bioconda::fido=1.0 conda-forge::gnuplot bioconda::luciphor2=2020_04_03 bioconda::msgf_plus=2021.03.22 bioconda::openms=2.8.0 bioconda::pepnovo=20101117 bioconda::percolator=3.5 bioconda::sirius-csifingerid=4.0.1 bioconda::thermorawfileparser=1.3.4 bioconda::xtandem=15.12.15.2 bioconda::openms-thirdparty=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/falsediscoveryrate/main.nf b/modules/local/openms/falsediscoveryrate/main.nf index 9c0d6580..1978db9c 100644 --- a/modules/local/openms/falsediscoveryrate/main.nf +++ b/modules/local/openms/falsediscoveryrate/main.nf @@ -3,7 +3,7 @@ process FALSEDISCOVERYRATE { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/filemerge/main.nf b/modules/local/openms/filemerge/main.nf index bbb433bb..4267a402 100644 --- a/modules/local/openms/filemerge/main.nf +++ b/modules/local/openms/filemerge/main.nf @@ -3,7 +3,7 @@ process FILEMERGE { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/idconflictresolver/main.nf b/modules/local/openms/idconflictresolver/main.nf index 45149e7e..0a1efee6 100644 --- a/modules/local/openms/idconflictresolver/main.nf +++ b/modules/local/openms/idconflictresolver/main.nf @@ -2,7 +2,7 @@ process IDCONFLICTRESOLVER { label 'process_low' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/idfilter/main.nf b/modules/local/openms/idfilter/main.nf index 9f6d02ba..fa99d36d 100644 --- a/modules/local/openms/idfilter/main.nf +++ b/modules/local/openms/idfilter/main.nf @@ -4,7 +4,7 @@ process IDFILTER { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/idmapper/main.nf b/modules/local/openms/idmapper/main.nf index 246b937a..7bbaddc1 100644 --- a/modules/local/openms/idmapper/main.nf +++ b/modules/local/openms/idmapper/main.nf @@ -4,7 +4,7 @@ process IDMAPPER { label 'process_medium' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/idpep/main.nf b/modules/local/openms/idpep/main.nf index de9e23b6..f4ef1cb6 100644 --- a/modules/local/openms/idpep/main.nf +++ b/modules/local/openms/idpep/main.nf @@ -1,7 +1,7 @@ process IDPEP { label 'process_very_low' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/idscoreswitcher/main.nf b/modules/local/openms/idscoreswitcher/main.nf index 6c3e149e..6761f6fb 100644 --- a/modules/local/openms/idscoreswitcher/main.nf +++ b/modules/local/openms/idscoreswitcher/main.nf @@ -3,7 +3,7 @@ process IDSCORESWITCHER { label 'process_very_low' label 'process_single_thread' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/indexpeptides/main.nf b/modules/local/openms/indexpeptides/main.nf index 609043e5..3780d068 100644 --- a/modules/local/openms/indexpeptides/main.nf +++ b/modules/local/openms/indexpeptides/main.nf @@ -1,7 +1,7 @@ process INDEXPEPTIDES { label 'process_low' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/isobaricanalyzer/main.nf b/modules/local/openms/isobaricanalyzer/main.nf index 5847ce2d..ebde605c 100644 --- a/modules/local/openms/isobaricanalyzer/main.nf +++ b/modules/local/openms/isobaricanalyzer/main.nf @@ -2,7 +2,7 @@ process ISOBARICANALYZER { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/msstatsconverter/main.nf b/modules/local/openms/msstatsconverter/main.nf index 04433993..6748dced 100644 --- a/modules/local/openms/msstatsconverter/main.nf +++ b/modules/local/openms/msstatsconverter/main.nf @@ -1,7 +1,7 @@ process MSSTATSCONVERTER { label 'process_low' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/mzmlindexing/main.nf b/modules/local/openms/mzmlindexing/main.nf index 0da4e445..b4cae21c 100644 --- a/modules/local/openms/mzmlindexing/main.nf +++ b/modules/local/openms/mzmlindexing/main.nf @@ -2,7 +2,7 @@ process MZMLINDEXING { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/openmspeakpicker/main.nf b/modules/local/openms/openmspeakpicker/main.nf index f067fac6..2db083f6 100644 --- a/modules/local/openms/openmspeakpicker/main.nf +++ b/modules/local/openms/openmspeakpicker/main.nf @@ -2,7 +2,7 @@ process OPENMSPEAKPICKER { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/proteininference/main.nf b/modules/local/openms/proteininference/main.nf index 807190e5..70830bc9 100644 --- a/modules/local/openms/proteininference/main.nf +++ b/modules/local/openms/proteininference/main.nf @@ -1,7 +1,7 @@ process PROTEININFERENCE { label 'process_medium' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/proteinquantifier/main.nf b/modules/local/openms/proteinquantifier/main.nf index 3015135f..e347098a 100644 --- a/modules/local/openms/proteinquantifier/main.nf +++ b/modules/local/openms/proteinquantifier/main.nf @@ -1,7 +1,7 @@ process PROTEINQUANTIFIER { label 'process_medium' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/proteomicslfq/main.nf b/modules/local/openms/proteomicslfq/main.nf index 1cbc1656..e21f6091 100644 --- a/modules/local/openms/proteomicslfq/main.nf +++ b/modules/local/openms/proteomicslfq/main.nf @@ -1,7 +1,7 @@ process PROTEOMICSLFQ { label 'process_high' - conda (params.enable_conda ? "openms::openms=2.8.0.dev" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/thirdparty/luciphoradapter/main.nf b/modules/local/openms/thirdparty/luciphoradapter/main.nf index bea9a955..50001b15 100644 --- a/modules/local/openms/thirdparty/luciphoradapter/main.nf +++ b/modules/local/openms/thirdparty/luciphoradapter/main.nf @@ -2,7 +2,7 @@ process LUCIPHORADAPTER { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::bumbershoot bioconda::comet-ms bioconda::crux-toolkit=3.2 bioconda::fido=1.0 conda-forge::gnuplot bioconda::luciphor2=2020_04_03 bioconda::msgf_plus=2021.03.22 bioconda::openms=2.8.0 bioconda::pepnovo=20101117 bioconda::percolator=3.5 bioconda::sirius-csifingerid=4.0.1 bioconda::thermorawfileparser=1.3.4 bioconda::xtandem=15.12.15.2 bioconda::openms-thirdparty=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" @@ -17,9 +17,12 @@ process LUCIPHORADAPTER { path "*.log", emit: log script: + // The OpenMS adapters need the actuall jar file, not the executable/shell wrapper that (bio)conda creates luciphor_jar = '' - if (workflow.containerEngine) { + if (workflow.containerEngine || (task.executor == "awsbatch")) { luciphor_jar = "-executable \$(find /usr/local/share/luciphor2-*/luciphor2.jar -maxdepth 0)" + } else if (params.enable_conda) { + luciphor_jar = "-executable \$(find \$CONDA_PREFIX/share/luciphor2-*/luciphor2.jar -maxdepth 0)" } def args = task.ext.args ?: '' diff --git a/modules/local/openms/thirdparty/percolator/main.nf b/modules/local/openms/thirdparty/percolator/main.nf index d3a1d22d..4417c4eb 100644 --- a/modules/local/openms/thirdparty/percolator/main.nf +++ b/modules/local/openms/thirdparty/percolator/main.nf @@ -1,7 +1,7 @@ process PERCOLATOR { label 'process_medium' - conda (params.enable_conda ? "bioconda::bumbershoot bioconda::comet-ms bioconda::crux-toolkit=3.2 bioconda::fido=1.0 conda-forge::gnuplot bioconda::luciphor2=2020_04_03 bioconda::msgf_plus=2021.03.22 bioconda::openms=2.8.0 bioconda::pepnovo=20101117 bioconda::percolator=3.5 bioconda::sirius-csifingerid=4.0.1 bioconda::thermorawfileparser=1.3.4 bioconda::xtandem=15.12.15.2 bioconda::openms-thirdparty=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" diff --git a/modules/local/openms/thirdparty/searchenginemsgf/main.nf b/modules/local/openms/thirdparty/searchenginemsgf/main.nf index e69d1c04..869e0278 100644 --- a/modules/local/openms/thirdparty/searchenginemsgf/main.nf +++ b/modules/local/openms/thirdparty/searchenginemsgf/main.nf @@ -16,10 +16,12 @@ process SEARCHENGINEMSGF { path "*.log", emit: log script: - // find a way to add MSGFPlus.jar dependence + // The OpenMS adapters need the actuall jar file, not the executable/shell wrapper that (bio)conda creates msgf_jar = '' - if (workflow.containerEngine) { + if (workflow.containerEngine || (task.executor == "awsbatch")) { msgf_jar = "-executable \$(find /usr/local/share/msgf_plus-*/MSGFPlus.jar -maxdepth 0)" + } else if (params.enable_conda) { + msgf_jar = "-executable \$(find \$CONDA_PREFIX/share/msgf_plus-*/MSGFPlus.jar -maxdepth 0)" } def args = task.ext.args ?: '' diff --git a/modules/local/pmultiqc/main.nf b/modules/local/pmultiqc/main.nf index 35d2fd00..e072bef5 100644 --- a/modules/local/pmultiqc/main.nf +++ b/modules/local/pmultiqc/main.nf @@ -1,7 +1,7 @@ process PMULTIQC { label 'process_high' - conda (params.enable_conda ? "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.10" : null) + conda (params.enable_conda ? "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.11" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/pmultiqc:0.0.11--pyhdfd78af_0" } else { diff --git a/modules/local/thermorawfileparser/main.nf b/modules/local/thermorawfileparser/main.nf index 64794222..0860c384 100644 --- a/modules/local/thermorawfileparser/main.nf +++ b/modules/local/thermorawfileparser/main.nf @@ -8,7 +8,23 @@ process THERMORAWFILEPARSER { 'https://depot.galaxyproject.org/singularity/thermorawfileparser:1.3.4--ha8f3691_0' : 'quay.io/biocontainers/thermorawfileparser:1.3.4--ha8f3691_0' }" - stageInMode {task.attempt == 1 ? 'link' : (task.attempt == 2 ? 'symlink' : 'copy')} + stageInMode { + if (task.attempt == 1) { + if (executor == "awsbatch") { + 'symlink' + } else { + 'link' + } + } else if (task.attempt == 2) { + if (executor == "awsbatch") { + 'copy' + } else { + 'symlink' + } + } else { + 'copy' + } + } input: tuple val(meta), path(rawfile) diff --git a/nextflow.config b/nextflow.config index dc226019..141584c9 100644 --- a/nextflow.config +++ b/nextflow.config @@ -227,6 +227,7 @@ profiles { podman.enabled = false shifter.enabled = false charliecloud.enabled = false + conda.useMamba = true } docker { docker.enabled = true @@ -270,6 +271,7 @@ profiles { test_full { includeConfig 'conf/test_full.config' } test_lfq { includeConfig 'conf/test_lfq.config' } test_dia { includeConfig 'conf/test_dia.config' } + mambaci { includeConfig 'conf/mambaci.config' } } // Load module config after profile, so they can depend on overwritten input parameters specific for each profile. diff --git a/subworkflows/local/create_input_channel.nf b/subworkflows/local/create_input_channel.nf index a179a119..256335b7 100644 --- a/subworkflows/local/create_input_channel.nf +++ b/subworkflows/local/create_input_channel.nf @@ -132,7 +132,7 @@ def create_meta_channel(LinkedHashMap row, is_sdrf, enzymes, files, wrapper) { } } // Nothing to determing for dia. Only LFQ allowed there. - if (!meta.acquisition_method.equals("dia")) { + if (!meta.acquisition_method.equals("dia")) { if (wrapper.labelling_type.equals("")) { if (meta.labelling_type.contains("tmt") || meta.labelling_type.contains("itraq") || meta.labelling_type.contains("label free")) { wrapper.labelling_type = meta.labelling_type diff --git a/subworkflows/local/file_preparation.nf b/subworkflows/local/file_preparation.nf index c3b1d51e..c55a8c9a 100644 --- a/subworkflows/local/file_preparation.nf +++ b/subworkflows/local/file_preparation.nf @@ -63,6 +63,5 @@ workflow FILE_PREPARATION { emit: results = ch_results // channel: [val(mzml_id), indexedmzml] - version = ch_versions // channel: [ *.version.txt ] } diff --git a/subworkflows/local/id.nf b/subworkflows/local/id.nf index 6dfcee66..299ea16e 100644 --- a/subworkflows/local/id.nf +++ b/subworkflows/local/id.nf @@ -12,35 +12,21 @@ include { PSMRESCORING } from './psmrescoring' include { PSMFDRCONTROL } from './psmfdrcontrol' include { PHOSPHOSCORING } from './phosphoscoring' -if (params.database) { ch_db_for_decoy_creation = file(params.database, checkIfExists: true) } else { exit 1, 'No protein database provided' } - workflow ID { take: file_preparation_results + ch_database_wdecoy main: ch_software_versions = Channel.empty() - // - // MODULE: Generate decoy database - // - (searchengine_in_db, pepidx_in_db, plfq_in_db) = ( params.add_decoys - ? [ Channel.empty(), Channel.empty(), Channel.empty(), Channel.empty() ] - : [ Channel.fromPath(params.database), Channel.fromPath(params.database), Channel.fromPath(params.database) ] ) - if (params.add_decoys) { - DECOYDATABASE( - ch_db_for_decoy_creation - ) - searchengine_in_db = DECOYDATABASE.out.db_decoy - ch_software_versions = ch_software_versions.mix(DECOYDATABASE.out.version.ifEmpty(null)) - } // // SUBWORKFLOW: DatabaseSearchEngines // DATABASESEARCHENGINES ( file_preparation_results, - searchengine_in_db + ch_database_wdecoy ) ch_software_versions = ch_software_versions.mix(DATABASESEARCHENGINES.out.versions.ifEmpty(null)) @@ -79,6 +65,5 @@ workflow ID { emit: id_results = id_results psmrescoring_results = PSMRESCORING.out.results - searchengine_in_db = searchengine_in_db version = ch_software_versions } diff --git a/workflows/lfq.nf b/workflows/lfq.nf index 3f558ec1..743121de 100644 --- a/workflows/lfq.nf +++ b/workflows/lfq.nf @@ -28,6 +28,7 @@ workflow LFQ { take: file_preparation_results ch_expdesign + ch_database_wdecoy main: @@ -36,7 +37,7 @@ workflow LFQ { // // SUBWORKFLOWS: ID // - ID(file_preparation_results) + ID(file_preparation_results, ch_database_wdecoy) ch_software_versions = ch_software_versions.mix(ID.out.version.ifEmpty(null)) // @@ -51,7 +52,7 @@ workflow LFQ { PROTEOMICSLFQ(ch_plfq.mzmls.collect(), ch_plfq.ids.collect(), ch_expdesign, - ID.out.searchengine_in_db + ch_database_wdecoy ) ch_software_versions = ch_software_versions.mix(PROTEOMICSLFQ.out.version.ifEmpty(null)) diff --git a/workflows/quantms.nf b/workflows/quantms.nf index 2493ddbf..03b3b634 100644 --- a/workflows/quantms.nf +++ b/workflows/quantms.nf @@ -37,6 +37,7 @@ include { TMT } from './tmt' include { LFQ } from './lfq' include { DIA } from './dia' include { PMULTIQC as SUMMARYPIPELINE } from '../modules/local/pmultiqc/main' +include { DECOYDATABASE } from '../modules/local/openms/decoydatabase/main' // // SUBWORKFLOW: Consisting of a mix of local and nf-core/modules @@ -116,12 +117,33 @@ workflow QUANTMS { ch_pipeline_results = Channel.empty() ch_ids_pmultiqc = Channel.empty() - TMT(ch_fileprep_result.iso, CREATE_INPUT_CHANNEL.out.ch_expdesign) + // + // MODULE: Generate decoy database + // + if (params.database) { ch_db_for_decoy_creation = Channel.from(file(params.database, checkIfExists: true)) } else { exit 1, 'No protein database provided' } + + + CREATE_INPUT_CHANNEL.out.ch_meta_config_iso.mix(CREATE_INPUT_CHANNEL.out.ch_meta_config_lfq).first() // Only run if iso or lfq have at least one file + | combine( ch_db_for_decoy_creation ) // Combine it so now the channel has elements like [potential_trigger_channel_element, actual_db], [potential_trigger_channel_element, actual_db2], etc (there should only be one DB though) + | map { it[-1] } // Remove the "trigger" part + | set {ch_db_for_decoy_creation_or_null} + + searchengine_in_db = params.add_decoys ? Channel.empty() : Channel.fromPath(params.database) + if (params.add_decoys) { + DECOYDATABASE( + ch_db_for_decoy_creation_or_null + ) + searchengine_in_db = DECOYDATABASE.out.db_decoy + ch_versions = ch_versions.mix(DECOYDATABASE.out.version.ifEmpty(null)) + } + + + TMT(ch_fileprep_result.iso, CREATE_INPUT_CHANNEL.out.ch_expdesign, searchengine_in_db) ch_ids_pmultiqc = ch_ids_pmultiqc.mix(TMT.out.ch_pmultiqc_ids) ch_pipeline_results = ch_pipeline_results.mix(TMT.out.final_result) ch_versions = ch_versions.mix(TMT.out.versions.ifEmpty(null)) - LFQ(ch_fileprep_result.lfq, CREATE_INPUT_CHANNEL.out.ch_expdesign) + LFQ(ch_fileprep_result.lfq, CREATE_INPUT_CHANNEL.out.ch_expdesign, searchengine_in_db) ch_ids_pmultiqc = ch_ids_pmultiqc.mix(LFQ.out.ch_pmultiqc_ids) ch_pipeline_results = ch_pipeline_results.mix(LFQ.out.final_result) ch_versions = ch_versions.mix(LFQ.out.versions.ifEmpty(null)) diff --git a/workflows/tmt.nf b/workflows/tmt.nf index 12676d18..2f8700bd 100644 --- a/workflows/tmt.nf +++ b/workflows/tmt.nf @@ -27,15 +27,17 @@ workflow TMT { take: file_preparation_results ch_expdesign + ch_database_wdecoy main: ch_software_versions = Channel.empty() + ch_database_wdecoy.view() // // SUBWORKFLOWS: ID // - ID(file_preparation_results) + ID(file_preparation_results, ch_database_wdecoy) ch_software_versions = ch_software_versions.mix(ID.out.version.ifEmpty(null)) //