From 288c57c03df2c95d343e16e02e1f8158d273fdae Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Wed, 27 Apr 2022 10:12:42 +0000 Subject: [PATCH 01/32] [FIX] AWS and conda --- modules/local/openms/consensusid/main.nf | 2 +- modules/local/openms/decoydatabase/main.nf | 2 +- .../local/openms/falsediscoveryrate/main.nf | 2 +- modules/local/openms/filemerge/main.nf | 2 +- .../local/openms/idconflictresolver/main.nf | 2 +- modules/local/openms/idfilter/main.nf | 2 +- modules/local/openms/idmapper/main.nf | 2 +- modules/local/openms/idpep/main.nf | 2 +- modules/local/openms/idscoreswitcher/main.nf | 2 +- modules/local/openms/indexpeptides/main.nf | 2 +- modules/local/openms/isobaricanalyzer/main.nf | 2 +- modules/local/openms/msstatsconverter/main.nf | 2 +- modules/local/openms/mzmlindexing/main.nf | 2 +- modules/local/openms/openmspeakpicker/main.nf | 2 +- modules/local/openms/proteininference/main.nf | 2 +- modules/local/openms/proteinquantifier/main.nf | 2 +- modules/local/openms/proteomicslfq/main.nf | 2 +- .../openms/thirdparty/luciphoradapter/main.nf | 3 ++- .../openms/thirdparty/searchenginemsgf/main.nf | 3 ++- modules/local/thermorawfileparser/main.nf | 18 +++++++++++++++++- 20 files changed, 38 insertions(+), 20 deletions(-) diff --git a/modules/local/openms/consensusid/main.nf b/modules/local/openms/consensusid/main.nf index 936e2524..02c64fa6 100644 --- a/modules/local/openms/consensusid/main.nf +++ b/modules/local/openms/consensusid/main.nf @@ -4,7 +4,7 @@ process CONSENSUSID { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/decoydatabase/main.nf b/modules/local/openms/decoydatabase/main.nf index bdd7ea81..4a62d2a8 100644 --- a/modules/local/openms/decoydatabase/main.nf +++ b/modules/local/openms/decoydatabase/main.nf @@ -2,7 +2,7 @@ process DECOYDATABASE { label 'process_very_low' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/falsediscoveryrate/main.nf b/modules/local/openms/falsediscoveryrate/main.nf index 9c0d6580..1978db9c 100644 --- a/modules/local/openms/falsediscoveryrate/main.nf +++ b/modules/local/openms/falsediscoveryrate/main.nf @@ -3,7 +3,7 @@ process FALSEDISCOVERYRATE { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/filemerge/main.nf b/modules/local/openms/filemerge/main.nf index bbb433bb..4267a402 100644 --- a/modules/local/openms/filemerge/main.nf +++ b/modules/local/openms/filemerge/main.nf @@ -3,7 +3,7 @@ process FILEMERGE { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/idconflictresolver/main.nf b/modules/local/openms/idconflictresolver/main.nf index 45149e7e..0a1efee6 100644 --- a/modules/local/openms/idconflictresolver/main.nf +++ b/modules/local/openms/idconflictresolver/main.nf @@ -2,7 +2,7 @@ process IDCONFLICTRESOLVER { label 'process_low' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/idfilter/main.nf b/modules/local/openms/idfilter/main.nf index 9f6d02ba..fa99d36d 100644 --- a/modules/local/openms/idfilter/main.nf +++ b/modules/local/openms/idfilter/main.nf @@ -4,7 +4,7 @@ process IDFILTER { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/idmapper/main.nf b/modules/local/openms/idmapper/main.nf index 246b937a..7bbaddc1 100644 --- a/modules/local/openms/idmapper/main.nf +++ b/modules/local/openms/idmapper/main.nf @@ -4,7 +4,7 @@ process IDMAPPER { label 'process_medium' label 'openms' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/idpep/main.nf b/modules/local/openms/idpep/main.nf index de9e23b6..f4ef1cb6 100644 --- a/modules/local/openms/idpep/main.nf +++ b/modules/local/openms/idpep/main.nf @@ -1,7 +1,7 @@ process IDPEP { label 'process_very_low' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/idscoreswitcher/main.nf b/modules/local/openms/idscoreswitcher/main.nf index 6c3e149e..6761f6fb 100644 --- a/modules/local/openms/idscoreswitcher/main.nf +++ b/modules/local/openms/idscoreswitcher/main.nf @@ -3,7 +3,7 @@ process IDSCORESWITCHER { label 'process_very_low' label 'process_single_thread' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/indexpeptides/main.nf b/modules/local/openms/indexpeptides/main.nf index 609043e5..3780d068 100644 --- a/modules/local/openms/indexpeptides/main.nf +++ b/modules/local/openms/indexpeptides/main.nf @@ -1,7 +1,7 @@ process INDEXPEPTIDES { label 'process_low' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/isobaricanalyzer/main.nf b/modules/local/openms/isobaricanalyzer/main.nf index 5847ce2d..ebde605c 100644 --- a/modules/local/openms/isobaricanalyzer/main.nf +++ b/modules/local/openms/isobaricanalyzer/main.nf @@ -2,7 +2,7 @@ process ISOBARICANALYZER { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/msstatsconverter/main.nf b/modules/local/openms/msstatsconverter/main.nf index 04433993..6748dced 100644 --- a/modules/local/openms/msstatsconverter/main.nf +++ b/modules/local/openms/msstatsconverter/main.nf @@ -1,7 +1,7 @@ process MSSTATSCONVERTER { label 'process_low' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/mzmlindexing/main.nf b/modules/local/openms/mzmlindexing/main.nf index 0da4e445..b4cae21c 100644 --- a/modules/local/openms/mzmlindexing/main.nf +++ b/modules/local/openms/mzmlindexing/main.nf @@ -2,7 +2,7 @@ process MZMLINDEXING { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/openmspeakpicker/main.nf b/modules/local/openms/openmspeakpicker/main.nf index f067fac6..2db083f6 100644 --- a/modules/local/openms/openmspeakpicker/main.nf +++ b/modules/local/openms/openmspeakpicker/main.nf @@ -2,7 +2,7 @@ process OPENMSPEAKPICKER { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/proteininference/main.nf b/modules/local/openms/proteininference/main.nf index 807190e5..70830bc9 100644 --- a/modules/local/openms/proteininference/main.nf +++ b/modules/local/openms/proteininference/main.nf @@ -1,7 +1,7 @@ process PROTEININFERENCE { label 'process_medium' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/proteinquantifier/main.nf b/modules/local/openms/proteinquantifier/main.nf index 3015135f..e347098a 100644 --- a/modules/local/openms/proteinquantifier/main.nf +++ b/modules/local/openms/proteinquantifier/main.nf @@ -1,7 +1,7 @@ process PROTEINQUANTIFIER { label 'process_medium' - conda (params.enable_conda ? "openms::openms=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/proteomicslfq/main.nf b/modules/local/openms/proteomicslfq/main.nf index 1cbc1656..546e7d63 100644 --- a/modules/local/openms/proteomicslfq/main.nf +++ b/modules/local/openms/proteomicslfq/main.nf @@ -1,7 +1,7 @@ process PROTEOMICSLFQ { label 'process_high' - conda (params.enable_conda ? "openms::openms=2.8.0.dev" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0.dev" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/thirdparty/luciphoradapter/main.nf b/modules/local/openms/thirdparty/luciphoradapter/main.nf index bea9a955..9956a307 100644 --- a/modules/local/openms/thirdparty/luciphoradapter/main.nf +++ b/modules/local/openms/thirdparty/luciphoradapter/main.nf @@ -18,9 +18,10 @@ process LUCIPHORADAPTER { script: luciphor_jar = '' - if (workflow.containerEngine) { + if (workflow.containerEngine || (process.executor == "awsbatch")) { luciphor_jar = "-executable \$(find /usr/local/share/luciphor2-*/luciphor2.jar -maxdepth 0)" } + //TODO for conda this probably has to be different def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" diff --git a/modules/local/openms/thirdparty/searchenginemsgf/main.nf b/modules/local/openms/thirdparty/searchenginemsgf/main.nf index e69d1c04..1c8a9df7 100644 --- a/modules/local/openms/thirdparty/searchenginemsgf/main.nf +++ b/modules/local/openms/thirdparty/searchenginemsgf/main.nf @@ -18,9 +18,10 @@ process SEARCHENGINEMSGF { script: // find a way to add MSGFPlus.jar dependence msgf_jar = '' - if (workflow.containerEngine) { + if (workflow.containerEngine || (process.executor == "awsbatch")) { msgf_jar = "-executable \$(find /usr/local/share/msgf_plus-*/MSGFPlus.jar -maxdepth 0)" } + //TODO for conda this probably has to be different def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" diff --git a/modules/local/thermorawfileparser/main.nf b/modules/local/thermorawfileparser/main.nf index 64794222..3c671f34 100644 --- a/modules/local/thermorawfileparser/main.nf +++ b/modules/local/thermorawfileparser/main.nf @@ -8,7 +8,23 @@ process THERMORAWFILEPARSER { 'https://depot.galaxyproject.org/singularity/thermorawfileparser:1.3.4--ha8f3691_0' : 'quay.io/biocontainers/thermorawfileparser:1.3.4--ha8f3691_0' }" - stageInMode {task.attempt == 1 ? 'link' : (task.attempt == 2 ? 'symlink' : 'copy')} + stageInMode { + if (task.attempt == 1) { + if (process.executor == "awsbatch") { + 'symlink' + } else { + 'link' + } + } else if (task.attempt == 2) { + if (process.executor == "awsbatch") { + 'copy' + } else { + 'symlink' + } + } else { + 'copy' + } + } input: tuple val(meta), path(rawfile) From 61ec9663fdf57f46bc83a6f6e7446dd8016fa6a9 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Wed, 27 Apr 2022 10:15:24 +0000 Subject: [PATCH 02/32] [CI] add conda tests --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4c987039..95c41376 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,7 +29,7 @@ jobs: # Test latest edge release of Nextflow - NXF_VER: "" NXF_EDGE: "1" - test_profile: ["test", "test_lfq", "test_dia", "test_localize"] + test_profile: ["test,docker", "test_lfq,docker", "test_dia,docker", "test_localize,docker", "test_lfq,conda", "test_dia,conda"] steps: - name: Check out pipeline code uses: actions/checkout@v2 @@ -49,7 +49,7 @@ jobs: # For example: adding multiple test runs with different parameters # Remember that you can parallelise this by using strategy.matrix run: | - nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,docker --outdir ${TEST_PROFILE}_results + nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE --outdir ${TEST_PROFILE}_results - name: Gather failed logs if: failure() || cancelled() From 5dba980055489511f9b49eb1f4245dac03cdc9b5 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Wed, 27 Apr 2022 14:55:30 +0000 Subject: [PATCH 03/32] More fixes --- .../local/openms/extractpsmfeatures/main.nf | 2 +- .../openms/thirdparty/luciphoradapter/main.nf | 8 +++--- .../openms/thirdparty/percolator/main.nf | 2 +- .../thirdparty/searchenginemsgf/main.nf | 8 +++--- modules/local/pmultiqc/main.nf | 2 +- modules/local/thermorawfileparser/main.nf | 4 +-- nextflow.config | 1 + subworkflows/local/create_input_channel.nf | 2 +- subworkflows/local/file_preparation.nf | 2 +- subworkflows/local/id.nf | 19 ++------------ workflows/lfq.nf | 5 ++-- workflows/quantms.nf | 26 +++++++++++++++++-- workflows/tmt.nf | 4 ++- 13 files changed, 50 insertions(+), 35 deletions(-) diff --git a/modules/local/openms/extractpsmfeatures/main.nf b/modules/local/openms/extractpsmfeatures/main.nf index 2bc7fa1f..3cc6e36b 100644 --- a/modules/local/openms/extractpsmfeatures/main.nf +++ b/modules/local/openms/extractpsmfeatures/main.nf @@ -3,7 +3,7 @@ process EXTRACTPSMFEATURES { label 'process_single_thread' label 'openms' - conda (params.enable_conda ? "bioconda::bumbershoot bioconda::comet-ms bioconda::crux-toolkit=3.2 bioconda::fido=1.0 conda-forge::gnuplot bioconda::luciphor2=2020_04_03 bioconda::msgf_plus=2021.03.22 bioconda::openms=2.8.0 bioconda::pepnovo=20101117 bioconda::percolator=3.5 bioconda::sirius-csifingerid=4.0.1 bioconda::thermorawfileparser=1.3.4 bioconda::xtandem=15.12.15.2 bioconda::openms-thirdparty=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" diff --git a/modules/local/openms/thirdparty/luciphoradapter/main.nf b/modules/local/openms/thirdparty/luciphoradapter/main.nf index 9956a307..50001b15 100644 --- a/modules/local/openms/thirdparty/luciphoradapter/main.nf +++ b/modules/local/openms/thirdparty/luciphoradapter/main.nf @@ -2,7 +2,7 @@ process LUCIPHORADAPTER { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::bumbershoot bioconda::comet-ms bioconda::crux-toolkit=3.2 bioconda::fido=1.0 conda-forge::gnuplot bioconda::luciphor2=2020_04_03 bioconda::msgf_plus=2021.03.22 bioconda::openms=2.8.0 bioconda::pepnovo=20101117 bioconda::percolator=3.5 bioconda::sirius-csifingerid=4.0.1 bioconda::thermorawfileparser=1.3.4 bioconda::xtandem=15.12.15.2 bioconda::openms-thirdparty=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" @@ -17,11 +17,13 @@ process LUCIPHORADAPTER { path "*.log", emit: log script: + // The OpenMS adapters need the actuall jar file, not the executable/shell wrapper that (bio)conda creates luciphor_jar = '' - if (workflow.containerEngine || (process.executor == "awsbatch")) { + if (workflow.containerEngine || (task.executor == "awsbatch")) { luciphor_jar = "-executable \$(find /usr/local/share/luciphor2-*/luciphor2.jar -maxdepth 0)" + } else if (params.enable_conda) { + luciphor_jar = "-executable \$(find \$CONDA_PREFIX/share/luciphor2-*/luciphor2.jar -maxdepth 0)" } - //TODO for conda this probably has to be different def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" diff --git a/modules/local/openms/thirdparty/percolator/main.nf b/modules/local/openms/thirdparty/percolator/main.nf index d3a1d22d..4417c4eb 100644 --- a/modules/local/openms/thirdparty/percolator/main.nf +++ b/modules/local/openms/thirdparty/percolator/main.nf @@ -1,7 +1,7 @@ process PERCOLATOR { label 'process_medium' - conda (params.enable_conda ? "bioconda::bumbershoot bioconda::comet-ms bioconda::crux-toolkit=3.2 bioconda::fido=1.0 conda-forge::gnuplot bioconda::luciphor2=2020_04_03 bioconda::msgf_plus=2021.03.22 bioconda::openms=2.8.0 bioconda::pepnovo=20101117 bioconda::percolator=3.5 bioconda::sirius-csifingerid=4.0.1 bioconda::thermorawfileparser=1.3.4 bioconda::xtandem=15.12.15.2 bioconda::openms-thirdparty=2.8.0" : null) + conda (params.enable_conda ? "bioconda::openms-thirdparty=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms-thirdparty:2.8.0--h9ee0642_0' : 'quay.io/biocontainers/openms-thirdparty:2.8.0--h9ee0642_0' }" diff --git a/modules/local/openms/thirdparty/searchenginemsgf/main.nf b/modules/local/openms/thirdparty/searchenginemsgf/main.nf index 1c8a9df7..6cee7c97 100644 --- a/modules/local/openms/thirdparty/searchenginemsgf/main.nf +++ b/modules/local/openms/thirdparty/searchenginemsgf/main.nf @@ -16,12 +16,13 @@ process SEARCHENGINEMSGF { path "*.log", emit: log script: - // find a way to add MSGFPlus.jar dependence + // The OpenMS adapters need the actuall jar file, not the executable/shell wrapper that (bio)conda creates msgf_jar = '' - if (workflow.containerEngine || (process.executor == "awsbatch")) { + if (workflow.containerEngine || (task.executor == "awsbatch")) { msgf_jar = "-executable \$(find /usr/local/share/msgf_plus-*/MSGFPlus.jar -maxdepth 0)" + } else if (params.enable_conda) { + msgf_jar = "-executable \$(find \$CONDA_PREFIX/share/msgf_plus-*/MSGFPlus.jar -maxdepth 0)" } - //TODO for conda this probably has to be different def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" @@ -59,6 +60,7 @@ process SEARCHENGINEMSGF { il_equiv = params.IL_equivalent ? "-PeptideIndexing:IL_equivalent" : "" """ + ls -la \$CONDA_PREFIX MSGFPlusAdapter \\ -protocol $params.protocol \\ -in ${mzml_file} \\ diff --git a/modules/local/pmultiqc/main.nf b/modules/local/pmultiqc/main.nf index 35d2fd00..e072bef5 100644 --- a/modules/local/pmultiqc/main.nf +++ b/modules/local/pmultiqc/main.nf @@ -1,7 +1,7 @@ process PMULTIQC { label 'process_high' - conda (params.enable_conda ? "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.10" : null) + conda (params.enable_conda ? "conda-forge::pandas_schema conda-forge::lzstring bioconda::pmultiqc=0.0.11" : null) if (workflow.containerEngine == 'singularity' && !params.singularity_pull_docker_container) { container "https://depot.galaxyproject.org/singularity/pmultiqc:0.0.11--pyhdfd78af_0" } else { diff --git a/modules/local/thermorawfileparser/main.nf b/modules/local/thermorawfileparser/main.nf index 3c671f34..0860c384 100644 --- a/modules/local/thermorawfileparser/main.nf +++ b/modules/local/thermorawfileparser/main.nf @@ -10,13 +10,13 @@ process THERMORAWFILEPARSER { stageInMode { if (task.attempt == 1) { - if (process.executor == "awsbatch") { + if (executor == "awsbatch") { 'symlink' } else { 'link' } } else if (task.attempt == 2) { - if (process.executor == "awsbatch") { + if (executor == "awsbatch") { 'copy' } else { 'symlink' diff --git a/nextflow.config b/nextflow.config index dc226019..e68ab802 100644 --- a/nextflow.config +++ b/nextflow.config @@ -227,6 +227,7 @@ profiles { podman.enabled = false shifter.enabled = false charliecloud.enabled = false + conda.useMamba = true } docker { docker.enabled = true diff --git a/subworkflows/local/create_input_channel.nf b/subworkflows/local/create_input_channel.nf index a179a119..256335b7 100644 --- a/subworkflows/local/create_input_channel.nf +++ b/subworkflows/local/create_input_channel.nf @@ -132,7 +132,7 @@ def create_meta_channel(LinkedHashMap row, is_sdrf, enzymes, files, wrapper) { } } // Nothing to determing for dia. Only LFQ allowed there. - if (!meta.acquisition_method.equals("dia")) { + if (!meta.acquisition_method.equals("dia")) { if (wrapper.labelling_type.equals("")) { if (meta.labelling_type.contains("tmt") || meta.labelling_type.contains("itraq") || meta.labelling_type.contains("label free")) { wrapper.labelling_type = meta.labelling_type diff --git a/subworkflows/local/file_preparation.nf b/subworkflows/local/file_preparation.nf index c3b1d51e..778d61c5 100644 --- a/subworkflows/local/file_preparation.nf +++ b/subworkflows/local/file_preparation.nf @@ -42,6 +42,7 @@ workflow FILE_PREPARATION { } .set {branched_input_mzMLs} ch_results = ch_results.mix(branched_input_mzMLs.inputIndexedMzML) + ch_results.view() THERMORAWFILEPARSER( branched_input.raw ) ch_versions = ch_versions.mix(THERMORAWFILEPARSER.out.version) @@ -63,6 +64,5 @@ workflow FILE_PREPARATION { emit: results = ch_results // channel: [val(mzml_id), indexedmzml] - version = ch_versions // channel: [ *.version.txt ] } diff --git a/subworkflows/local/id.nf b/subworkflows/local/id.nf index 6dfcee66..299ea16e 100644 --- a/subworkflows/local/id.nf +++ b/subworkflows/local/id.nf @@ -12,35 +12,21 @@ include { PSMRESCORING } from './psmrescoring' include { PSMFDRCONTROL } from './psmfdrcontrol' include { PHOSPHOSCORING } from './phosphoscoring' -if (params.database) { ch_db_for_decoy_creation = file(params.database, checkIfExists: true) } else { exit 1, 'No protein database provided' } - workflow ID { take: file_preparation_results + ch_database_wdecoy main: ch_software_versions = Channel.empty() - // - // MODULE: Generate decoy database - // - (searchengine_in_db, pepidx_in_db, plfq_in_db) = ( params.add_decoys - ? [ Channel.empty(), Channel.empty(), Channel.empty(), Channel.empty() ] - : [ Channel.fromPath(params.database), Channel.fromPath(params.database), Channel.fromPath(params.database) ] ) - if (params.add_decoys) { - DECOYDATABASE( - ch_db_for_decoy_creation - ) - searchengine_in_db = DECOYDATABASE.out.db_decoy - ch_software_versions = ch_software_versions.mix(DECOYDATABASE.out.version.ifEmpty(null)) - } // // SUBWORKFLOW: DatabaseSearchEngines // DATABASESEARCHENGINES ( file_preparation_results, - searchengine_in_db + ch_database_wdecoy ) ch_software_versions = ch_software_versions.mix(DATABASESEARCHENGINES.out.versions.ifEmpty(null)) @@ -79,6 +65,5 @@ workflow ID { emit: id_results = id_results psmrescoring_results = PSMRESCORING.out.results - searchengine_in_db = searchengine_in_db version = ch_software_versions } diff --git a/workflows/lfq.nf b/workflows/lfq.nf index 3f558ec1..743121de 100644 --- a/workflows/lfq.nf +++ b/workflows/lfq.nf @@ -28,6 +28,7 @@ workflow LFQ { take: file_preparation_results ch_expdesign + ch_database_wdecoy main: @@ -36,7 +37,7 @@ workflow LFQ { // // SUBWORKFLOWS: ID // - ID(file_preparation_results) + ID(file_preparation_results, ch_database_wdecoy) ch_software_versions = ch_software_versions.mix(ID.out.version.ifEmpty(null)) // @@ -51,7 +52,7 @@ workflow LFQ { PROTEOMICSLFQ(ch_plfq.mzmls.collect(), ch_plfq.ids.collect(), ch_expdesign, - ID.out.searchengine_in_db + ch_database_wdecoy ) ch_software_versions = ch_software_versions.mix(PROTEOMICSLFQ.out.version.ifEmpty(null)) diff --git a/workflows/quantms.nf b/workflows/quantms.nf index 2493ddbf..03b3b634 100644 --- a/workflows/quantms.nf +++ b/workflows/quantms.nf @@ -37,6 +37,7 @@ include { TMT } from './tmt' include { LFQ } from './lfq' include { DIA } from './dia' include { PMULTIQC as SUMMARYPIPELINE } from '../modules/local/pmultiqc/main' +include { DECOYDATABASE } from '../modules/local/openms/decoydatabase/main' // // SUBWORKFLOW: Consisting of a mix of local and nf-core/modules @@ -116,12 +117,33 @@ workflow QUANTMS { ch_pipeline_results = Channel.empty() ch_ids_pmultiqc = Channel.empty() - TMT(ch_fileprep_result.iso, CREATE_INPUT_CHANNEL.out.ch_expdesign) + // + // MODULE: Generate decoy database + // + if (params.database) { ch_db_for_decoy_creation = Channel.from(file(params.database, checkIfExists: true)) } else { exit 1, 'No protein database provided' } + + + CREATE_INPUT_CHANNEL.out.ch_meta_config_iso.mix(CREATE_INPUT_CHANNEL.out.ch_meta_config_lfq).first() // Only run if iso or lfq have at least one file + | combine( ch_db_for_decoy_creation ) // Combine it so now the channel has elements like [potential_trigger_channel_element, actual_db], [potential_trigger_channel_element, actual_db2], etc (there should only be one DB though) + | map { it[-1] } // Remove the "trigger" part + | set {ch_db_for_decoy_creation_or_null} + + searchengine_in_db = params.add_decoys ? Channel.empty() : Channel.fromPath(params.database) + if (params.add_decoys) { + DECOYDATABASE( + ch_db_for_decoy_creation_or_null + ) + searchengine_in_db = DECOYDATABASE.out.db_decoy + ch_versions = ch_versions.mix(DECOYDATABASE.out.version.ifEmpty(null)) + } + + + TMT(ch_fileprep_result.iso, CREATE_INPUT_CHANNEL.out.ch_expdesign, searchengine_in_db) ch_ids_pmultiqc = ch_ids_pmultiqc.mix(TMT.out.ch_pmultiqc_ids) ch_pipeline_results = ch_pipeline_results.mix(TMT.out.final_result) ch_versions = ch_versions.mix(TMT.out.versions.ifEmpty(null)) - LFQ(ch_fileprep_result.lfq, CREATE_INPUT_CHANNEL.out.ch_expdesign) + LFQ(ch_fileprep_result.lfq, CREATE_INPUT_CHANNEL.out.ch_expdesign, searchengine_in_db) ch_ids_pmultiqc = ch_ids_pmultiqc.mix(LFQ.out.ch_pmultiqc_ids) ch_pipeline_results = ch_pipeline_results.mix(LFQ.out.final_result) ch_versions = ch_versions.mix(LFQ.out.versions.ifEmpty(null)) diff --git a/workflows/tmt.nf b/workflows/tmt.nf index 12676d18..2f8700bd 100644 --- a/workflows/tmt.nf +++ b/workflows/tmt.nf @@ -27,15 +27,17 @@ workflow TMT { take: file_preparation_results ch_expdesign + ch_database_wdecoy main: ch_software_versions = Channel.empty() + ch_database_wdecoy.view() // // SUBWORKFLOWS: ID // - ID(file_preparation_results) + ID(file_preparation_results, ch_database_wdecoy) ch_software_versions = ch_software_versions.mix(ID.out.version.ifEmpty(null)) // From 93866a7c4a79c600beb029925c41b0d6ba800c49 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Wed, 27 Apr 2022 14:58:01 +0000 Subject: [PATCH 04/32] remove debug --- subworkflows/local/file_preparation.nf | 1 - 1 file changed, 1 deletion(-) diff --git a/subworkflows/local/file_preparation.nf b/subworkflows/local/file_preparation.nf index 778d61c5..c55a8c9a 100644 --- a/subworkflows/local/file_preparation.nf +++ b/subworkflows/local/file_preparation.nf @@ -42,7 +42,6 @@ workflow FILE_PREPARATION { } .set {branched_input_mzMLs} ch_results = ch_results.mix(branched_input_mzMLs.inputIndexedMzML) - ch_results.view() THERMORAWFILEPARSER( branched_input.raw ) ch_versions = ch_versions.mix(THERMORAWFILEPARSER.out.version) From 8cc8dc648fe0f30a54f41d78647d97850d23f33d Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Wed, 27 Apr 2022 15:02:30 +0000 Subject: [PATCH 05/32] add mamba to gh actions? --- .github/workflows/ci.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 95c41376..93cbbe5f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -44,6 +44,12 @@ jobs: wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ + - name: Install mamba + uses: conda-incubator/setup-miniconda@v2 + with: + python-version: 3.8 + mamba-version: "*" + - name: Run pipeline with test data # TODO nf-core: You can customise CI pipeline run tests as required # For example: adding multiple test runs with different parameters From 5c2a9e81da550cf65e6d1599bd2ad8ac65705f48 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Wed, 27 Apr 2022 15:09:41 +0000 Subject: [PATCH 06/32] channels? --- .github/workflows/ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 93cbbe5f..3703e4bb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -47,8 +47,9 @@ jobs: - name: Install mamba uses: conda-incubator/setup-miniconda@v2 with: - python-version: 3.8 + python-version: 3.9 mamba-version: "*" + channels: conda-forge,defaults - name: Run pipeline with test data # TODO nf-core: You can customise CI pipeline run tests as required From e88e28e9978d9323f10c659e6ef45121d1647f10 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Wed, 27 Apr 2022 15:15:43 +0000 Subject: [PATCH 07/32] remove debug var --- modules/local/openms/thirdparty/searchenginemsgf/main.nf | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/local/openms/thirdparty/searchenginemsgf/main.nf b/modules/local/openms/thirdparty/searchenginemsgf/main.nf index 6cee7c97..869e0278 100644 --- a/modules/local/openms/thirdparty/searchenginemsgf/main.nf +++ b/modules/local/openms/thirdparty/searchenginemsgf/main.nf @@ -60,7 +60,6 @@ process SEARCHENGINEMSGF { il_equiv = params.IL_equivalent ? "-PeptideIndexing:IL_equivalent" : "" """ - ls -la \$CONDA_PREFIX MSGFPlusAdapter \\ -protocol $params.protocol \\ -in ${mzml_file} \\ From 2434e89992aa08d9e557dd7a6a05129a9e3c1c3a Mon Sep 17 00:00:00 2001 From: Julianus Pfeuffer Date: Wed, 27 Apr 2022 21:47:03 +0200 Subject: [PATCH 08/32] No dev version --- modules/local/openms/proteomicslfq/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/local/openms/proteomicslfq/main.nf b/modules/local/openms/proteomicslfq/main.nf index 546e7d63..e21f6091 100644 --- a/modules/local/openms/proteomicslfq/main.nf +++ b/modules/local/openms/proteomicslfq/main.nf @@ -1,7 +1,7 @@ process PROTEOMICSLFQ { label 'process_high' - conda (params.enable_conda ? "bioconda::openms=2.8.0.dev" : null) + conda (params.enable_conda ? "bioconda::openms=2.8.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/openms:2.8.0--h7ca0330_1' : 'quay.io/biocontainers/openms:2.8.0--h7ca0330_1' }" From 35298dbc6bcc33286f3d10ba86644370a670e3fc Mon Sep 17 00:00:00 2001 From: Julianus Pfeuffer Date: Thu, 28 Apr 2022 00:02:13 +0200 Subject: [PATCH 09/32] Update ci.yml --- .github/workflows/ci.yml | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3703e4bb..1ebcf2d5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,6 +14,7 @@ jobs: NXF_ANSI_LOG: false CAPSULE_LOG: none TEST_PROFILE: ${{ matrix.test_profile }} + EXEC_PROFILE: ${{ matrix.exec_profile }} name: Run pipeline with test data # Only run on push if this is the nf-core dev branch (merged PRs) @@ -29,7 +30,13 @@ jobs: # Test latest edge release of Nextflow - NXF_VER: "" NXF_EDGE: "1" - test_profile: ["test,docker", "test_lfq,docker", "test_dia,docker", "test_localize,docker", "test_lfq,conda", "test_dia,conda"] + test_profile: ["test", "test_lfq", "test_dia", "test_localize"] + exec_profile: ["docker","conda"] + exclude: + - test_profile: test_dia + exec_profile: conda + - test_profile: test_localize + exec_profile: conda steps: - name: Check out pipeline code uses: actions/checkout@v2 @@ -45,6 +52,7 @@ jobs: sudo mv nextflow /usr/local/bin/ - name: Install mamba + if: ${{ matrix.exec_profile }} == "conda" uses: conda-incubator/setup-miniconda@v2 with: python-version: 3.9 @@ -56,13 +64,13 @@ jobs: # For example: adding multiple test runs with different parameters # Remember that you can parallelise this by using strategy.matrix run: | - nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE --outdir ${TEST_PROFILE}_results + nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results - name: Gather failed logs if: failure() || cancelled() run: | mkdir failed_logs - failed=$(grep "FAILED" ${TEST_PROFILE}_results/pipeline_info/execution_trace.txt | cut -f 2) + failed=$(grep "FAILED" ${TEST_PROFILE}_${EXEC_PROFILE}_results/pipeline_info/execution_trace.txt | cut -f 2) while read -r line ; do cp $(ls work/${line}*/*.log) failed_logs/ | true ; done <<< "$failed" - uses: actions/upload-artifact@v1 if: failure() || cancelled() From 01358f7b6c860e597603b54d46af62c2d2afca92 Mon Sep 17 00:00:00 2001 From: Julianus Pfeuffer Date: Thu, 28 Apr 2022 00:11:34 +0200 Subject: [PATCH 10/32] Update ci.yml --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1ebcf2d5..d6054e3b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -82,8 +82,8 @@ jobs: if: always() name: Upload results with: - name: ${{ env.TEST_PROFILE }}_results - path: ${{ env.TEST_PROFILE }}_results + name: ${{ env.TEST_PROFILE }}_${{ env.EXEC_PROFILE }}_results + path: ${{ env.TEST_PROFILE }}_${{ env.EXEC_PROFILE }}_results - uses: actions/upload-artifact@v1 if: always() name: Upload log From 369d4507890dee9c8c44d4d7c27469b4ccabdd68 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 10:52:59 +0000 Subject: [PATCH 11/32] lint --- .github/workflows/ci.yml | 2 +- .gitignore | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d6054e3b..a9e43ba1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,7 +31,7 @@ jobs: - NXF_VER: "" NXF_EDGE: "1" test_profile: ["test", "test_lfq", "test_dia", "test_localize"] - exec_profile: ["docker","conda"] + exec_profile: ["docker", "conda"] exclude: - test_profile: test_dia exec_profile: conda diff --git a/.gitignore b/.gitignore index bcfcc4a9..5b8dd1e1 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ testing* .idea/* *.log /build/ +results*/ From 2db0598d019858e335ca90f69fc26d3abf5ffeec Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 11:22:24 +0000 Subject: [PATCH 12/32] try micromamba --- .github/workflows/ci.yml | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a9e43ba1..861bd4a4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,6 +21,7 @@ jobs: if: ${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/quantms') }} runs-on: ubuntu-latest strategy: + fail-fast: false matrix: # Nextflow versions include: @@ -51,19 +52,26 @@ jobs: wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - - name: Install mamba + #- name: Install mamba + # if: ${{ matrix.exec_profile }} == "conda" + # uses: conda-incubator/setup-miniconda@v2 + # with: + # python-version: 3.9 + # mamba-version: "*" + # channels: conda-forge,defaults + + - name: Install micromamba if: ${{ matrix.exec_profile }} == "conda" - uses: conda-incubator/setup-miniconda@v2 - with: - python-version: 3.9 - mamba-version: "*" - channels: conda-forge,defaults + run: | + wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba ./bin/micromamba shell init -s bash -p ~/micromamba + source ~/.bashrc - name: Run pipeline with test data # TODO nf-core: You can customise CI pipeline run tests as required # For example: adding multiple test runs with different parameters # Remember that you can parallelise this by using strategy.matrix run: | + alias mamba="micromamba" nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results - name: Gather failed logs From 5da459564dedc1d04e178a150f0352eef09bff45 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 11:24:19 +0000 Subject: [PATCH 13/32] fix micromamba install --- .github/workflows/ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 861bd4a4..ad70791e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -63,7 +63,8 @@ jobs: - name: Install micromamba if: ${{ matrix.exec_profile }} == "conda" run: | - wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba ./bin/micromamba shell init -s bash -p ~/micromamba + wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba + ./bin/micromamba shell init -s bash -p ~/micromamba source ~/.bashrc - name: Run pipeline with test data From 9b836d822858b3ad616154abf6cf924d1357a638 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 11:26:34 +0000 Subject: [PATCH 14/32] try renaming --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ad70791e..c3377c44 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -64,7 +64,8 @@ jobs: if: ${{ matrix.exec_profile }} == "conda" run: | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba - ./bin/micromamba shell init -s bash -p ~/micromamba + mv bin/micromamba bin/mamba + ./bin/mamba shell init -s bash -p ~/mamba source ~/.bashrc - name: Run pipeline with test data @@ -72,7 +73,6 @@ jobs: # For example: adding multiple test runs with different parameters # Remember that you can parallelise this by using strategy.matrix run: | - alias mamba="micromamba" nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results - name: Gather failed logs From 96b1f5a90f581dccf6c3e4896cf1b712b48ff77c Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 11:29:16 +0000 Subject: [PATCH 15/32] move init to same step? --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c3377c44..4f19a30c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -65,14 +65,14 @@ jobs: run: | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba mv bin/micromamba bin/mamba - ./bin/mamba shell init -s bash -p ~/mamba - source ~/.bashrc - name: Run pipeline with test data # TODO nf-core: You can customise CI pipeline run tests as required # For example: adding multiple test runs with different parameters # Remember that you can parallelise this by using strategy.matrix run: | + ./bin/mamba shell init -s bash -p ~/mamba + source ~/.bashrc nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results - name: Gather failed logs From f6f33bad1ac4d5a5622dc29ff5e9b668ddd40d06 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 11:37:00 +0000 Subject: [PATCH 16/32] changes --- .github/workflows/ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4f19a30c..7db96ba5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -70,8 +70,9 @@ jobs: # TODO nf-core: You can customise CI pipeline run tests as required # For example: adding multiple test runs with different parameters # Remember that you can parallelise this by using strategy.matrix + shell: bash run: | - ./bin/mamba shell init -s bash -p ~/mamba + ./bin/mamba shell init -s bash -p ./mamba source ~/.bashrc nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results From f1b2549ec7fc2f74ab90c7d5531a6f03d55485b4 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 11:48:35 +0000 Subject: [PATCH 17/32] try more --- .github/workflows/ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7db96ba5..2288cfd2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -65,6 +65,8 @@ jobs: run: | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba mv bin/micromamba bin/mamba + echo "export PATH=$(pwd)/bin:\$PATH" >> ~/.bashrc + mamba shell init -s bash -p ./mamba - name: Run pipeline with test data # TODO nf-core: You can customise CI pipeline run tests as required @@ -72,7 +74,6 @@ jobs: # Remember that you can parallelise this by using strategy.matrix shell: bash run: | - ./bin/mamba shell init -s bash -p ./mamba source ~/.bashrc nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results From 793f929ea2067772db45e553883fb6d6188bf166 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 11:52:27 +0000 Subject: [PATCH 18/32] ffsfsfdfsa --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2288cfd2..7137953e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -66,7 +66,7 @@ jobs: wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba mv bin/micromamba bin/mamba echo "export PATH=$(pwd)/bin:\$PATH" >> ~/.bashrc - mamba shell init -s bash -p ./mamba + ./bin/mamba shell init -s bash -p ./mamba - name: Run pipeline with test data # TODO nf-core: You can customise CI pipeline run tests as required From bc24ec30787924ad0e7941a47bb69507e8972112 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 11:56:14 +0000 Subject: [PATCH 19/32] dfsfasfas --- .github/workflows/ci.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7137953e..b01f3cb3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -53,7 +53,7 @@ jobs: sudo mv nextflow /usr/local/bin/ #- name: Install mamba - # if: ${{ matrix.exec_profile }} == "conda" + # if: ${{ matrix.exec_profile == "conda" }} # uses: conda-incubator/setup-miniconda@v2 # with: # python-version: 3.9 @@ -61,12 +61,13 @@ jobs: # channels: conda-forge,defaults - name: Install micromamba - if: ${{ matrix.exec_profile }} == "conda" + if: ${{ matrix.exec_profile == "conda" }} run: | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba mv bin/micromamba bin/mamba echo "export PATH=$(pwd)/bin:\$PATH" >> ~/.bashrc ./bin/mamba shell init -s bash -p ./mamba + cat ~/.bashrc - name: Run pipeline with test data # TODO nf-core: You can customise CI pipeline run tests as required From 0be9aa190eeb043598f4707d8c82fa893161e86c Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 11:58:30 +0000 Subject: [PATCH 20/32] wth --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b01f3cb3..96ad8edf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -72,7 +72,7 @@ jobs: - name: Run pipeline with test data # TODO nf-core: You can customise CI pipeline run tests as required # For example: adding multiple test runs with different parameters - # Remember that you can parallelise this by using strategy.matrix + # Remember that you can parallelize this by using strategy.matrix shell: bash run: | source ~/.bashrc From c5a45eee22005a57501e3c87fbe01118d29aa9c4 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 12:02:32 +0000 Subject: [PATCH 21/32] fdsfas --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 96ad8edf..eb94cddf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -53,7 +53,7 @@ jobs: sudo mv nextflow /usr/local/bin/ #- name: Install mamba - # if: ${{ matrix.exec_profile == "conda" }} + # if: matrix.exec_profile == "conda" # uses: conda-incubator/setup-miniconda@v2 # with: # python-version: 3.9 @@ -61,7 +61,7 @@ jobs: # channels: conda-forge,defaults - name: Install micromamba - if: ${{ matrix.exec_profile == "conda" }} + if: matrix.exec_profile == "conda" run: | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba mv bin/micromamba bin/mamba From 6c8574678cd842b04ef6b2404382f54c41cc571d Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 12:05:38 +0000 Subject: [PATCH 22/32] quotes???????????????????????????? --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eb94cddf..cd062d99 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -53,7 +53,7 @@ jobs: sudo mv nextflow /usr/local/bin/ #- name: Install mamba - # if: matrix.exec_profile == "conda" + # if: matrix.exec_profile == 'conda' # uses: conda-incubator/setup-miniconda@v2 # with: # python-version: 3.9 @@ -61,7 +61,7 @@ jobs: # channels: conda-forge,defaults - name: Install micromamba - if: matrix.exec_profile == "conda" + if: matrix.exec_profile == 'conda' run: | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba mv bin/micromamba bin/mamba From 1a150691b86e23f0ae63fdbae85925e24c2c20fd Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 12:08:25 +0000 Subject: [PATCH 23/32] omg.. --- .github/workflows/ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cd062d99..5a533354 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -76,6 +76,8 @@ jobs: shell: bash run: | source ~/.bashrc + echo $PATH + mamba -v nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results - name: Gather failed logs From 2f76f2c3cc245be3a210c63ef0909017fdccad8f Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 12:12:00 +0000 Subject: [PATCH 24/32] now? --- .github/workflows/ci.yml | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5a533354..bf6b63ff 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -65,19 +65,14 @@ jobs: run: | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba mv bin/micromamba bin/mamba - echo "export PATH=$(pwd)/bin:\$PATH" >> ~/.bashrc - ./bin/mamba shell init -s bash -p ./mamba - cat ~/.bashrc + echo "export PATH=$(pwd)/bin:$(pwd)/mamba/bin:\$PATH" >> $GITHUB_PATH + mamba shell init -s bash -p ./mamba - name: Run pipeline with test data # TODO nf-core: You can customise CI pipeline run tests as required # For example: adding multiple test runs with different parameters # Remember that you can parallelize this by using strategy.matrix - shell: bash run: | - source ~/.bashrc - echo $PATH - mamba -v nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results - name: Gather failed logs From b2e9d9ff207df66459825da5223c59634827088e Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 12:14:22 +0000 Subject: [PATCH 25/32] now --- .github/workflows/ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bf6b63ff..8bc6f425 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -65,7 +65,8 @@ jobs: run: | wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba mv bin/micromamba bin/mamba - echo "export PATH=$(pwd)/bin:$(pwd)/mamba/bin:\$PATH" >> $GITHUB_PATH + echo "$(pwd)/bin" >> $GITHUB_PATH + echo "$(pwd)/mamba/bin" >> $GITHUB_PATH mamba shell init -s bash -p ./mamba - name: Run pipeline with test data From b29f5a2ab18947433aef67f73a76f1268a2281cf Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 12:19:10 +0000 Subject: [PATCH 26/32] dsadasdasdasdasads --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8bc6f425..a981aafa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -67,6 +67,7 @@ jobs: mv bin/micromamba bin/mamba echo "$(pwd)/bin" >> $GITHUB_PATH echo "$(pwd)/mamba/bin" >> $GITHUB_PATH + echo $GITHUB_PATH mamba shell init -s bash -p ./mamba - name: Run pipeline with test data From 0f395f2e834ae8f73814e197511eb0311e20c8b2 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 12:20:09 +0000 Subject: [PATCH 27/32] dsadasdasad --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a981aafa..f6e65967 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -67,7 +67,7 @@ jobs: mv bin/micromamba bin/mamba echo "$(pwd)/bin" >> $GITHUB_PATH echo "$(pwd)/mamba/bin" >> $GITHUB_PATH - echo $GITHUB_PATH + cat $GITHUB_PATH mamba shell init -s bash -p ./mamba - name: Run pipeline with test data From 7a89f89e700689f90e59699694f8dcc1429cc753 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 12:22:46 +0000 Subject: [PATCH 28/32] f gh --- .github/workflows/ci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f6e65967..23a63dde 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -67,8 +67,7 @@ jobs: mv bin/micromamba bin/mamba echo "$(pwd)/bin" >> $GITHUB_PATH echo "$(pwd)/mamba/bin" >> $GITHUB_PATH - cat $GITHUB_PATH - mamba shell init -s bash -p ./mamba + ./bin/mamba shell init -s bash -p ./mamba - name: Run pipeline with test data # TODO nf-core: You can customise CI pipeline run tests as required From a71d4b79a2c04c7d07278ddd2266fae16a680ef4 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 14:46:47 +0000 Subject: [PATCH 29/32] minor fixes and try singlethread for mamba --- .github/workflows/ci.yml | 41 ++++++++++++++++++++++++--------------- conf/mambaci.config | 12 ++++++++++++ conf/test.config | 1 + conf/test_dia.config | 1 + conf/test_full.config | 1 + conf/test_lfq.config | 1 + conf/test_localize.config | 1 + lib/WorkflowMain.groovy | 14 +++++++++++++ nextflow.config | 5 +++-- 9 files changed, 59 insertions(+), 18 deletions(-) create mode 100644 conf/mambaci.config diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 23a63dde..b3393fea 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -52,30 +52,39 @@ jobs: wget -qO- get.nextflow.io | bash sudo mv nextflow /usr/local/bin/ - #- name: Install mamba - # if: matrix.exec_profile == 'conda' - # uses: conda-incubator/setup-miniconda@v2 - # with: - # python-version: 3.9 - # mamba-version: "*" - # channels: conda-forge,defaults - - - name: Install micromamba + - name: Install mamba if: matrix.exec_profile == 'conda' - run: | - wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba - mv bin/micromamba bin/mamba - echo "$(pwd)/bin" >> $GITHUB_PATH - echo "$(pwd)/mamba/bin" >> $GITHUB_PATH - ./bin/mamba shell init -s bash -p ./mamba + uses: conda-incubator/setup-miniconda@v2 + with: + python-version: 3.9 + mamba-version: "*" + channels: conda-forge,defaults + + #- name: Install micromamba as mamba + # if: matrix.exec_profile == 'conda' + # run: | + # wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba + # mv bin/micromamba bin/mamba + # echo "$(pwd)/bin" >> $GITHUB_PATH + # echo "$(pwd)/mamba/bin" >> $GITHUB_PATH + # ./bin/mamba shell init -s bash -p ./mamba - name: Run pipeline with test data + if: matrix.exec_profile != 'conda' # TODO nf-core: You can customise CI pipeline run tests as required # For example: adding multiple test runs with different parameters - # Remember that you can parallelize this by using strategy.matrix + # Remember that you can parallelise this by using strategy.matrix run: | nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results + - name: Run pipeline with test data in conda profile (and single-threaded) + if: matrix.exec_profile == 'conda' + # TODO nf-core: You can customise CI pipeline run tests as required + # For example: adding multiple test runs with different parameters + # Remember that you can parallelise this by using strategy.matrix + run: | + nextflow run ${GITHUB_WORKSPACE} -profile $TEST_PROFILE,$EXEC_PROFILE,mambaci --outdir ${TEST_PROFILE}_${EXEC_PROFILE}_results + - name: Gather failed logs if: failure() || cancelled() run: | diff --git a/conf/mambaci.config b/conf/mambaci.config new file mode 100644 index 00000000..03b9d9fc --- /dev/null +++ b/conf/mambaci.config @@ -0,0 +1,12 @@ +## We need this because mamba is completely bugged +## https://github.com/mamba-org/mamba/issues/1429 +## and nextflow does not support micromamba +## https://github.com/mamba-org/mamba/issues/1654 +process { + executor = 'local' + maxForks = 1 +} +executor { + pollInterval = '3sec' + queueSize = 1 +} diff --git a/conf/test.config b/conf/test.config index 6be93226..bb1d959d 100644 --- a/conf/test.config +++ b/conf/test.config @@ -20,6 +20,7 @@ params { max_time = '6.h' outdir = "./results_iso" + tracedir = "${params.outdir}/pipeline_info" // Input data input = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/tmt_ci/PXD000001.sdrf.tsv' diff --git a/conf/test_dia.config b/conf/test_dia.config index 8ab5b74b..1b27d1f5 100644 --- a/conf/test_dia.config +++ b/conf/test_dia.config @@ -20,6 +20,7 @@ params { max_time = 48.h outdir = './results_dia' + tracedir = "${params.outdir}/pipeline_info" // Input data input = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/lfq_ci/PXD026600/PXD026600.sdrf.tsv' diff --git a/conf/test_full.config b/conf/test_full.config index 54af6b23..1a2e1f1b 100644 --- a/conf/test_full.config +++ b/conf/test_full.config @@ -15,6 +15,7 @@ params { config_profile_description = 'Full test dataset in isotopic labelling mode to check pipeline function and sanity of results' outdir = "./results_iso_full" + tracedir = "${params.outdir}/pipeline_info" // Input data for full size test input = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/tmt_ci/PXD000001.sdrf.tsv' diff --git a/conf/test_lfq.config b/conf/test_lfq.config index 41959ef2..4e4e61f0 100644 --- a/conf/test_lfq.config +++ b/conf/test_lfq.config @@ -20,6 +20,7 @@ params { max_time = 48.h outdir = "./results_lfq" + tracedir = "${params.outdir}/pipeline_info" // Input data labelling_type = "label free sample" diff --git a/conf/test_localize.config b/conf/test_localize.config index 4c92403a..b8c71108 100644 --- a/conf/test_localize.config +++ b/conf/test_localize.config @@ -20,6 +20,7 @@ params { max_time = 1.h outdir = "./results_localize" + tracedir = "${params.outdir}/pipeline_info" // Input data input = 'https://raw.githubusercontent.com/nf-core/test-datasets/quantms/testdata/lfq_ci_phospho/test_phospho.sdrf' diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy index 8fec508f..d782c776 100755 --- a/lib/WorkflowMain.groovy +++ b/lib/WorkflowMain.groovy @@ -78,6 +78,20 @@ class WorkflowMain { System.exit(1) } + // Check input has been provided + if (!params.outdir) { + log.error "Please provide an outdir to the pipeline e.g. '--outdir ./results'" + System.exit(1) + } + + if (params.tracedir == "null/pipeline_info") + { + log.error """Error: Your tracedir is `null/pipeline_info`, this means you probably set outdir in a way that does not affect the default + `\$params.outdir/pipeline_info` (e.g., by specifying outdir in a profile instead of the commandline or through a `-params-file`. + Either set outdir in a correct way, or redefine tracedir as well (e.g., in your profile).""" + System.exit(1) + } + // check fasta database has been provided if (!params.database) { log.error "Please provide an fasta database to the pipeline e.g. '--database *.fasta'" diff --git a/nextflow.config b/nextflow.config index e68ab802..753da5f6 100644 --- a/nextflow.config +++ b/nextflow.config @@ -217,7 +217,7 @@ profiles { docker.enabled = false singularity.enabled = false conda.createTimeout = '1 h' - conda.useMamba = true + conda.useMicromamba = true process.executor = 'lsf' } conda { @@ -227,7 +227,7 @@ profiles { podman.enabled = false shifter.enabled = false charliecloud.enabled = false - conda.useMamba = true + conda.useMicromamba = true } docker { docker.enabled = true @@ -271,6 +271,7 @@ profiles { test_full { includeConfig 'conf/test_full.config' } test_lfq { includeConfig 'conf/test_lfq.config' } test_dia { includeConfig 'conf/test_dia.config' } + mambaci { includeConfig 'conf/mambaci.config' } } // Load module config after profile, so they can depend on overwritten input parameters specific for each profile. From 0e782c63426089c48c877049faad679a4c067942 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 14:49:53 +0000 Subject: [PATCH 30/32] lint --- conf/mambaci.config | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/conf/mambaci.config b/conf/mambaci.config index 03b9d9fc..37255427 100644 --- a/conf/mambaci.config +++ b/conf/mambaci.config @@ -1,10 +1,10 @@ -## We need this because mamba is completely bugged -## https://github.com/mamba-org/mamba/issues/1429 -## and nextflow does not support micromamba -## https://github.com/mamba-org/mamba/issues/1654 +// We need this because mamba is completely bugged +// https://github.com/mamba-org/mamba/issues/1429 +// and nextflow does not support micromamba +// https://github.com/mamba-org/mamba/issues/1654 process { - executor = 'local' - maxForks = 1 + executor = 'local' + maxForks = 1 } executor { pollInterval = '3sec' From de6447af9742685b387969e0692ba420d1960b11 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 15:31:11 +0000 Subject: [PATCH 31/32] use mamba again --- nextflow.config | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nextflow.config b/nextflow.config index 753da5f6..141584c9 100644 --- a/nextflow.config +++ b/nextflow.config @@ -217,7 +217,7 @@ profiles { docker.enabled = false singularity.enabled = false conda.createTimeout = '1 h' - conda.useMicromamba = true + conda.useMamba = true process.executor = 'lsf' } conda { @@ -227,7 +227,7 @@ profiles { podman.enabled = false shifter.enabled = false charliecloud.enabled = false - conda.useMicromamba = true + conda.useMamba = true } docker { docker.enabled = true From 8a4c526f46a7bf780f6aa6b7aed05eba24438cc6 Mon Sep 17 00:00:00 2001 From: jpfeuffer Date: Thu, 28 Apr 2022 15:53:50 +0000 Subject: [PATCH 32/32] bigger pollinterval? --- conf/mambaci.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conf/mambaci.config b/conf/mambaci.config index 37255427..a9bbeae6 100644 --- a/conf/mambaci.config +++ b/conf/mambaci.config @@ -7,6 +7,6 @@ process { maxForks = 1 } executor { - pollInterval = '3sec' + pollInterval = '5sec' queueSize = 1 }