From c6deebe44a1b7a913dccda7e3fe3c586b7b1acb5 Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Tue, 9 Apr 2019 13:54:55 +0200 Subject: [PATCH 001/124] Corrects blacklist filtering --- bin/sync | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bin/sync b/bin/sync index c3b37528f8..97ff6e5b8d 100755 --- a/bin/sync +++ b/bin/sync @@ -55,7 +55,7 @@ def filter_blacklisted_pipelines_from_list(pipelines, blacklisted_pipelines): def fetch_black_listed_pipelines_from_file(file_path): with open(file_path) as fh: blacklist = json.load(fh) - return blacklist + return blacklist.get('pipelines') def fetch_nfcore_workflows_from_website(url): @@ -97,10 +97,12 @@ def main(): assert os.environ['NF_CORE_BOT'] blacklisted_pipeline_names = fetch_black_listed_pipelines_from_file(PATH_PARENT_DIR + "/blacklist.json") + print(blacklisted_pipeline_names) pipelines = fetch_nfcore_workflows_from_website(NF_CORE_PIPELINE_INFO) filtered_pipelines = filter_blacklisted_pipelines_from_list(pipelines, blacklisted_pipeline_names) + print(filtered_pipelines) for pipeline in filtered_pipelines: print("Update template branch for pipeline '{pipeline}'... ".format(pipeline=pipeline['name'])) From b8076378fa3b0513475532973f8ee2443075cced Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Tue, 9 Apr 2019 14:21:57 +0200 Subject: [PATCH 002/124] Uses meta information from dev branch instead of master --- bin/syncutils/template.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/bin/syncutils/template.py b/bin/syncutils/template.py index ddada5cfb0..69fc1bca91 100644 --- a/bin/syncutils/template.py +++ b/bin/syncutils/template.py @@ -42,8 +42,8 @@ def context_from_nextflow(self, nf_project_dir): Returns: A cookiecutter-readable context (Python dictionary) """ # Check if we are on "master" (main pipeline code) - if self.repo.active_branch.name != "master": - self.repo.git.checkout("origin/master", b="master") + if self.repo.active_branch.name != "dev": + self.repo.git.checkout("origin/dev", b="dev") # Fetch the config variables from the Nextflow pipeline config = utils.fetch_wf_config(wf_path=nf_project_dir) @@ -66,8 +66,6 @@ def update_child_template(self, templatedir, target_dir, context=None): shutil.rmtree(os.path.join(target_dir, f)) except: os.remove(os.path.join(target_dir, f)) - print(target_dir) - print(context.get('author')) # Create the new template structure nf_core.create.PipelineCreate( name=context.get('pipeline_name'), From 267b9ceab8a9dfa8fc1020ad7e8fee84534b9ddf Mon Sep 17 00:00:00 2001 From: maxibor Date: Tue, 9 Apr 2019 15:56:29 +0200 Subject: [PATCH 003/124] add bwa and bt2 index to igenomes config --- .../conf/igenomes.config | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/igenomes.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/igenomes.config index d19e61f4b1..392f250734 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/igenomes.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/igenomes.config @@ -16,132 +16,177 @@ params { fasta = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Homo_sapiens/Ensembl/GRCh37/Sequence/BWAIndex/" } 'GRCm38' { bed12 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCh37/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCh37/Sequence/BWAIndex/" } 'TAIR10' { bed12 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Arabidopsis_thaliana/Ensembl/TAIR10/Sequence/BWAIndex/" } 'EB2' { bed12 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Bacillus_subtilis_168/Ensembl/EB2/Sequence/BWAIndex/" } 'UMD3.1' { bed12 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Bos_taurus/Ensembl/UMD3.1/Sequence/BWAIndex/" + } 'WBcel235' { bed12 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Caenorhabditis_elegans/Ensembl/WBcel235/Sequence/BWAIndex/" } 'CanFam3.1' { bed12 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Canis_familiaris/Ensembl/CanFam3.1/Sequence/BWAIndex/" } 'GRCz10' { bed12 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Danio_rerio/Ensembl/GRCz10/Sequence/BWAIndex/" } 'BDGP6' { bed12 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Drosophila_melanogaster/Ensembl/BDGP6/Sequence/BWAIndex/" } 'EquCab2' { bed12 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Equus_caballus/Ensembl/EquCab2/Sequence/BWAIndex/" } 'EB1' { bed12 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Escherichia_coli_K_12_DH10B/Ensembl/EB1/Sequence/BWAIndex/" } 'Galgal4' { bed12 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Gallus_gallus/Ensembl/Galgal4/Sequence/BWAIndex/" } 'Gm01' { bed12 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Glycine_max/Ensembl/Gm01/Sequence/BWAIndex/" } 'Mmul_1' { bed12 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Macaca_mulatta/Ensembl/Mmul_1/Sequence/BWAIndex/" } 'IRGSP-1.0' { bed12 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Oryza_sativa_japonica/Ensembl/IRGSP-1.0/Sequence/BWAIndex/" } 'CHIMP2.1.4' { bed12 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Pan_troglodytes/Ensembl/CHIMP2.1.4/Sequence/BWAIndex/" } 'Rnor_6.0' { bed12 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Rattus_norvegicus/Ensembl/Rnor_6.0/Sequence/BWAIndex/" } 'R64-1-1' { bed12 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Saccharomyces_cerevisiae/Ensembl/R64-1-1/Sequence/BWAIndex/" } 'EF2' { bed12 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Schizosaccharomyces_pombe/Ensembl/EF2/Sequence/BWAIndex/" } 'Sbi1' { bed12 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Sorghum_bicolor/Ensembl/Sbi1/Sequence/BWAIndex/" } 'Sscrofa10.2' { bed12 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Sus_scrofa/Ensembl/Sscrofa10.2/Sequence/BWAIndex/" } 'AGPv3' { bed12 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.bed" fasta = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/WholeGenomeFasta/genome.fa" gtf = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Annotation/Genes/genes.gtf" star = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/STARIndex/" + bowtie2 = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/Bowtie2Index/" + bwa = "${params.igenomes_base}/Zea_mays/Ensembl/AGPv3/Sequence/BWAIndex/" } } } From ee5e9292014cf2015f617d2b5cca7ba1e97b741b Mon Sep 17 00:00:00 2001 From: maxibor Date: Tue, 9 Apr 2019 16:53:45 +0200 Subject: [PATCH 004/124] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d6e3847e01..9947f7ba4b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,7 @@ * The order of conda channels is now correct, avoiding occasional erroneous errors that packages weren't found ([#207](https://github.com/nf-core/tools/issues/207)) * Add reporting of ignored errored process * As a solution for [#103](https://github.com/nf-core/tools/issues/103)) +* Add Bowtie2 and BWA in iGenome config file template ## [v1.5](https://github.com/nf-core/tools/releases/tag/1.5) - 2019-03-13 Iron Shark From 8e32e69f66ec967bfdf9261bc2e2b02fd918a44b Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Tue, 9 Apr 2019 18:05:16 +0200 Subject: [PATCH 005/124] Enables single pipeline sync via command line --- bin/sync | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/bin/sync b/bin/sync index 97ff6e5b8d..6dda0ed626 100755 --- a/bin/sync +++ b/bin/sync @@ -97,12 +97,14 @@ def main(): assert os.environ['NF_CORE_BOT'] blacklisted_pipeline_names = fetch_black_listed_pipelines_from_file(PATH_PARENT_DIR + "/blacklist.json") - print(blacklisted_pipeline_names) pipelines = fetch_nfcore_workflows_from_website(NF_CORE_PIPELINE_INFO) - filtered_pipelines = filter_blacklisted_pipelines_from_list(pipelines, blacklisted_pipeline_names) - print(filtered_pipelines) + if len(sys.argv) > 1: + pipeline_to_sync = sys.argv[1] + filtered_pipelines = [pipeline for pipeline in pipelines if pipeline_to_sync in pipeline.get('name')] + else: + filtered_pipelines = filter_blacklisted_pipelines_from_list(pipelines, blacklisted_pipeline_names) for pipeline in filtered_pipelines: print("Update template branch for pipeline '{pipeline}'... ".format(pipeline=pipeline['name'])) From ef8d98730cda06eab901223e4b246c0cea66f19b Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Tue, 9 Apr 2019 18:07:57 +0200 Subject: [PATCH 006/124] Updates changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d6e3847e01..798d4edf33 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ # nf-core/tools: Changelog +## v1.7dev + +#### Syncing +* Can now sync a targeted pipeline via command-line + ## v1.6 #### Syncing From adce02e19b21d0f56e5d8bc1c0b4b72881caf5b1 Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Wed, 10 Apr 2019 08:48:15 +0200 Subject: [PATCH 007/124] Corrects markdown error --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 798d4edf33..b1628bc5d7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ ## v1.7dev #### Syncing -* Can now sync a targeted pipeline via command-line +* Can now sync a targeted pipeline via command-line ## v1.6 From 58f124d7a9723618f98e12f91ac24d41b825d1f8 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Mon, 15 Apr 2019 15:16:29 +0200 Subject: [PATCH 008/124] Fix typo in tools readme --- CHANGELOG.md | 3 +++ README.md | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 36a923ba93..91326837ef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,9 @@ #### Syncing * Can now sync a targeted pipeline via command-line +#### Other +* Fix small typo in central readme of tools for future releases + ## v1.6 #### Syncing diff --git a/README.md b/README.md index 90ee8b1f12..d0001217f3 100644 --- a/README.md +++ b/README.md @@ -297,7 +297,7 @@ The `create` subcommand makes a new workflow using the nf-core base template. With a given pipeline name, description and author, it makes a starter pipeline which follows nf-core best practices. After creating the files, the command initialises the folder as a git repository and makes an initial commit. This first "vanilla" commit which is identical to the output from the templating tool is important, as it allows us to keep your pipeline in sync with the base template in the future. -See the [nf-core syncing docs](http://nf-co.re/sync) for more information. +See the [nf-core syncing docs](https://nf-co.re/developers/sync) for more information. ```console $ nf-core create From 27b9669bd4b36ec55032c397d02f890b7ead91a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Patrick=20H=C3=BCther?= Date: Tue, 23 Apr 2019 08:50:44 +0200 Subject: [PATCH 009/124] Specify yaml Loader argument Since PyYAML 5.1, a warning is thrown when yaml.load() is called without a loader argument. More info: https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation --- nf_core/lint.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/lint.py b/nf_core/lint.py index cd90406464..f5a02d79c5 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -261,7 +261,7 @@ def pf(file_path): # Load and parse files for later if 'environment.yml' in self.files: with open(os.path.join(self.path, 'environment.yml'), 'r') as fh: - self.conda_config = yaml.load(fh) + self.conda_config = yaml.load(fh, Loader=yaml.BaseLoader) def check_docker(self): """Checks that Dockerfile contains the string ``FROM``.""" @@ -446,7 +446,7 @@ def check_ci_config(self): fn = os.path.join(self.path, cf) if os.path.isfile(fn): with open(fn, 'r') as fh: - ciconf = yaml.load(fh) + ciconf = yaml.load(fh, Loader=yaml.BaseLoader) # Check that we have the master branch protection travisMasterCheck = '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])' try: From 025aca5aaa578bf7804c4f2889008ad9cddfab15 Mon Sep 17 00:00:00 2001 From: phue Date: Tue, 23 Apr 2019 10:59:10 +0000 Subject: [PATCH 010/124] add Loader to remaining yaml.load() calls --- CHANGELOG.md | 1 + nf_core/licences.py | 2 +- tests/test_lint.py | 4 ++-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 91326837ef..5c64375b8b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ #### Other * Fix small typo in central readme of tools for future releases +* Added yaml `Loader=` parameter to fix PyYAML warning that was thrown because of a possible [exploit](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation) ## v1.6 diff --git a/nf_core/licences.py b/nf_core/licences.py index 293c5e11b6..b2f5f8ea03 100644 --- a/nf_core/licences.py +++ b/nf_core/licences.py @@ -44,7 +44,7 @@ def fetch_conda_licences(self): raise LookupError("Couldn't find pipeline nf-core/{}".format(self.pipeline)) lint_obj = nf_core.lint.PipelineLint(self.pipeline) - lint_obj.conda_config = yaml.load(response.text) + lint_obj.conda_config = yaml.load(response.text, Loader=yaml.BaseLoader) # Check conda dependency list for dep in lint_obj.conda_config.get('dependencies', []): try: diff --git a/tests/test_lint.py b/tests/test_lint.py index c179547019..edd5b2934b 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -260,7 +260,7 @@ def test_conda_env_pass(self): lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.files = ['environment.yml'] with open(os.path.join(PATH_WORKING_EXAMPLE, 'environment.yml'), 'r') as fh: - lint_obj.conda_config = yaml.load(fh) + lint_obj.conda_config = yaml.load(fh, Loader=yaml.BaseLoader) lint_obj.pipeline_name = 'tools' lint_obj.config['manifest.version'] = '0.4' lint_obj.check_conda_env_yaml() @@ -272,7 +272,7 @@ def test_conda_env_fail(self): lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.files = ['environment.yml'] with open(os.path.join(PATH_WORKING_EXAMPLE, 'environment.yml'), 'r') as fh: - lint_obj.conda_config = yaml.load(fh) + lint_obj.conda_config = yaml.load(fh, Loader=yaml.BaseLoader) lint_obj.conda_config['dependencies'] = ['fastqc', 'multiqc=0.9', 'notapackaage=0.4'] lint_obj.pipeline_name = 'not_tools' lint_obj.config['manifest.version'] = '0.23' From 471659d26ef4e8c0bd4bb966be522d6bb5299787 Mon Sep 17 00:00:00 2001 From: Nurlan Kerimov <34273025+kerimoff@users.noreply.github.com> Date: Thu, 25 Apr 2019 17:47:45 +0300 Subject: [PATCH 011/124] Fixed some 404 end links FIxed https://nf-co.re/developers/sync and https://nf-co.re/developers/adding_pipelines links --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 90ee8b1f12..84c3f8b3e8 100644 --- a/README.md +++ b/README.md @@ -297,7 +297,7 @@ The `create` subcommand makes a new workflow using the nf-core base template. With a given pipeline name, description and author, it makes a starter pipeline which follows nf-core best practices. After creating the files, the command initialises the folder as a git repository and makes an initial commit. This first "vanilla" commit which is identical to the output from the templating tool is important, as it allows us to keep your pipeline in sync with the base template in the future. -See the [nf-core syncing docs](http://nf-co.re/sync) for more information. +See the [nf-core syncing docs](https://nf-co.re/developers/sync) for more information. ```console $ nf-core create @@ -332,7 +332,7 @@ git push --set-upstream origin master You can then continue to edit, commit and push normally as you build your pipeline. -Please see the [nf-core documentation](https://nf-co.re/adding_pipelines) for a full walkthrough of how to create a new nf-core workflow. +Please see the [nf-core documentation](https://nf-co.re/developers/adding_pipelines) for a full walkthrough of how to create a new nf-core workflow. Note that if the required arguments for `nf-core create` are not given, it will interactively prompt for them. If you prefer, you can supply them as command line arguments. See `nf-core create --help` for more information. From 8946dc988792c844f4018c993fdef5711705b67a Mon Sep 17 00:00:00 2001 From: phue Date: Sun, 28 Apr 2019 19:43:06 +0200 Subject: [PATCH 012/124] use yaml.safe_load() to avoid unsafe loading of yaml --- CHANGELOG.md | 2 +- nf_core/licences.py | 2 +- nf_core/lint.py | 4 ++-- tests/test_lint.py | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5c64375b8b..d6573a119e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,7 @@ #### Other * Fix small typo in central readme of tools for future releases -* Added yaml `Loader=` parameter to fix PyYAML warning that was thrown because of a possible [exploit](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation) +* Switched to yaml.safe_load() to fix PyYAML warning that was thrown because of a possible [exploit](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation) ## v1.6 diff --git a/nf_core/licences.py b/nf_core/licences.py index b2f5f8ea03..9fceeb011e 100644 --- a/nf_core/licences.py +++ b/nf_core/licences.py @@ -44,7 +44,7 @@ def fetch_conda_licences(self): raise LookupError("Couldn't find pipeline nf-core/{}".format(self.pipeline)) lint_obj = nf_core.lint.PipelineLint(self.pipeline) - lint_obj.conda_config = yaml.load(response.text, Loader=yaml.BaseLoader) + lint_obj.conda_config = yaml.safe_load(response.text) # Check conda dependency list for dep in lint_obj.conda_config.get('dependencies', []): try: diff --git a/nf_core/lint.py b/nf_core/lint.py index f5a02d79c5..ace64ea9de 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -261,7 +261,7 @@ def pf(file_path): # Load and parse files for later if 'environment.yml' in self.files: with open(os.path.join(self.path, 'environment.yml'), 'r') as fh: - self.conda_config = yaml.load(fh, Loader=yaml.BaseLoader) + self.conda_config = yaml.safe_load(fh) def check_docker(self): """Checks that Dockerfile contains the string ``FROM``.""" @@ -446,7 +446,7 @@ def check_ci_config(self): fn = os.path.join(self.path, cf) if os.path.isfile(fn): with open(fn, 'r') as fh: - ciconf = yaml.load(fh, Loader=yaml.BaseLoader) + ciconf = yaml.safe_load(fh) # Check that we have the master branch protection travisMasterCheck = '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])' try: diff --git a/tests/test_lint.py b/tests/test_lint.py index edd5b2934b..273f2232af 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -260,7 +260,7 @@ def test_conda_env_pass(self): lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.files = ['environment.yml'] with open(os.path.join(PATH_WORKING_EXAMPLE, 'environment.yml'), 'r') as fh: - lint_obj.conda_config = yaml.load(fh, Loader=yaml.BaseLoader) + lint_obj.conda_config = yaml.safe_load(fh) lint_obj.pipeline_name = 'tools' lint_obj.config['manifest.version'] = '0.4' lint_obj.check_conda_env_yaml() @@ -272,7 +272,7 @@ def test_conda_env_fail(self): lint_obj = nf_core.lint.PipelineLint(PATH_WORKING_EXAMPLE) lint_obj.files = ['environment.yml'] with open(os.path.join(PATH_WORKING_EXAMPLE, 'environment.yml'), 'r') as fh: - lint_obj.conda_config = yaml.load(fh, Loader=yaml.BaseLoader) + lint_obj.conda_config = yaml.safe_load(fh) lint_obj.conda_config['dependencies'] = ['fastqc', 'multiqc=0.9', 'notapackaage=0.4'] lint_obj.pipeline_name = 'not_tools' lint_obj.config['manifest.version'] = '0.23' From 1e767869a6213cf799259581a3da7621520fa64c Mon Sep 17 00:00:00 2001 From: drpatelh Date: Thu, 2 May 2019 22:41:56 +0100 Subject: [PATCH 013/124] Add nf-core citation --- README.md | 5 +++++ .../{{cookiecutter.name_noslash}}/README.md | 9 +++++++++ 2 files changed, 14 insertions(+) diff --git a/README.md b/README.md index 84c3f8b3e8..fcd7414cb0 100644 --- a/README.md +++ b/README.md @@ -429,3 +429,8 @@ INFO: Updating version in Singularity ``` To change the required version of Nextflow instead of the pipeline version number, use the flag `--nextflow`. + +## Citation + +If you use `nf-core tools` in your work, please cite it as follows: +Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tomasso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index db24d0d1c4..b345988eb9 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -28,3 +28,12 @@ The {{ cookiecutter.name }} pipeline comes with documentation about the pipeline ## Credits {{ cookiecutter.name }} was originally written by {{ cookiecutter.author }}. + + +## Citation + + + + +You can cite `nf-core` as follows: +Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tomasso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). From ecc54fa838ae2371f4595f081ba6543f648ab0f8 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Thu, 2 May 2019 22:44:07 +0100 Subject: [PATCH 014/124] Change CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d6573a119e..0f9ec18f99 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ #### Other * Fix small typo in central readme of tools for future releases * Switched to yaml.safe_load() to fix PyYAML warning that was thrown because of a possible [exploit](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation) +* Add `nf-core` citation ## v1.6 From 1f88c31e36ef5829da144e7de2e635964ee44529 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 3 May 2019 09:26:32 +0100 Subject: [PATCH 015/124] Changed Paolos name by Deed Poll --- README.md | 4 ++-- .../pipeline-template/{{cookiecutter.name_noslash}}/README.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index fcd7414cb0..277ea8f15a 100644 --- a/README.md +++ b/README.md @@ -432,5 +432,5 @@ To change the required version of Nextflow instead of the pipeline version numbe ## Citation -If you use `nf-core tools` in your work, please cite it as follows: -Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tomasso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). +If you use `nf-core tools` in your work, please cite the `nf-core` preprint as follows: +Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tommasso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index b345988eb9..29d5f71988 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -36,4 +36,4 @@ The {{ cookiecutter.name }} pipeline comes with documentation about the pipeline You can cite `nf-core` as follows: -Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tomasso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). +Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tommasso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). From c0e7a4d4cc6796f46937b1a657a0e8269ecc5592 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 3 May 2019 09:31:04 +0100 Subject: [PATCH 016/124] Add pre-print --- .../pipeline-template/{{cookiecutter.name_noslash}}/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index 29d5f71988..8e68f5f4a1 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -35,5 +35,5 @@ The {{ cookiecutter.name }} pipeline comes with documentation about the pipeline -You can cite `nf-core` as follows: +You can cite the `nf-core` pre-print as follows: Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tommasso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). From 13435be7fdf4994ebb82b54c1868482c76637967 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 3 May 2019 09:34:49 +0100 Subject: [PATCH 017/124] I dont speak Italian --- README.md | 2 +- .../pipeline-template/{{cookiecutter.name_noslash}}/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 277ea8f15a..0a9e43f257 100644 --- a/README.md +++ b/README.md @@ -433,4 +433,4 @@ To change the required version of Nextflow instead of the pipeline version numbe ## Citation If you use `nf-core tools` in your work, please cite the `nf-core` preprint as follows: -Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tommasso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). +Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tommaso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index 8e68f5f4a1..cd760421b7 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -36,4 +36,4 @@ The {{ cookiecutter.name }} pipeline comes with documentation about the pipeline You can cite the `nf-core` pre-print as follows: -Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tommasso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). +Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tommaso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). From fbe18711374c7821d0575c7616dd6a6afb1df16e Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Sat, 4 May 2019 18:04:00 +0200 Subject: [PATCH 018/124] Drop atacseq --- bin/blacklist.json | 2 -- 1 file changed, 2 deletions(-) diff --git a/bin/blacklist.json b/bin/blacklist.json index 515750fb99..92904e5191 100644 --- a/bin/blacklist.json +++ b/bin/blacklist.json @@ -1,7 +1,5 @@ { "pipelines": [ - "atacseq", - "bcellmagic", "chipseq", "epitopeprediction", "exoseq", From 2f31aa6272aef121d32415b215240e1ef9427f17 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Sat, 4 May 2019 18:04:15 +0200 Subject: [PATCH 019/124] Drop nascent --- bin/blacklist.json | 1 - 1 file changed, 1 deletion(-) diff --git a/bin/blacklist.json b/bin/blacklist.json index 92904e5191..c644fbe49f 100644 --- a/bin/blacklist.json +++ b/bin/blacklist.json @@ -3,7 +3,6 @@ "chipseq", "epitopeprediction", "exoseq", - "nascent", "neutronstar", "smrnaseq", "vipr" From 5d107efe5d47ae35299d65c0fd696e068157b541 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Sat, 4 May 2019 18:04:28 +0200 Subject: [PATCH 020/124] Add Sarek to blacklist --- bin/blacklist.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bin/blacklist.json b/bin/blacklist.json index c644fbe49f..e4d39ae675 100644 --- a/bin/blacklist.json +++ b/bin/blacklist.json @@ -5,6 +5,7 @@ "exoseq", "neutronstar", "smrnaseq", - "vipr" + "vipr", + "sarek" ] } \ No newline at end of file From cadfacbcd41104bfb73dc7daa10f411a117ee410 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Sat, 4 May 2019 18:06:02 +0200 Subject: [PATCH 021/124] Add multiple missing pipelines to blacklist --- bin/blacklist.json | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/bin/blacklist.json b/bin/blacklist.json index e4d39ae675..d3571a76ae 100644 --- a/bin/blacklist.json +++ b/bin/blacklist.json @@ -6,6 +6,13 @@ "neutronstar", "smrnaseq", "vipr", - "sarek" + "sarek", + "neutronstar", + "proteomicslfq", + "clinvap", + "lncpipe", + "ddamsproteomics", + "scrnaseq", + "guideseq" ] } \ No newline at end of file From 8c30edc246cec67c4035fc2c88b3c624f37fd322 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Sat, 4 May 2019 18:15:48 +0200 Subject: [PATCH 022/124] Updated CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f9ec18f99..dfbd37d3c5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ #### Syncing * Can now sync a targeted pipeline via command-line +* Updated Blacklist of synced pipelines #### Other * Fix small typo in central readme of tools for future releases From b32306483ffcc84c2ef790b300094c9642f0460e Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Sun, 5 May 2019 16:53:01 +0200 Subject: [PATCH 023/124] Make linting happy --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dfbd37d3c5..3686380e22 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ #### Syncing * Can now sync a targeted pipeline via command-line -* Updated Blacklist of synced pipelines +* Updated Blacklist of synced pipelines #### Other * Fix small typo in central readme of tools for future releases From 961b24e40a8229492eaec09dd0324e0888f7e80a Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Mon, 6 May 2019 09:47:07 +0200 Subject: [PATCH 024/124] Add TEMPLATE branch to create command --- nf_core/create.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/nf_core/create.py b/nf_core/create.py index d2dbbc979b..c1991bb24b 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -100,6 +100,9 @@ def git_init_pipeline(self): """ logging.info("Initialising pipeline git repository") repo = git.Repo.init(self.outdir) - repo.git.add(A=True) + repo.git.add(A=True) repo.index.commit("initial template build from nf-core/tools, version {}".format(nf_core.__version__)) - logging.info("Done. Remember to add a remote and push to GitHub:\n cd {}\n git remote add origin git@github.com:USERNAME/REPO_NAME.git\n git push".format(self.outdir)) + #Add TEMPLATE branch to git repository + repo.git.branch('TEMPLATE') + logging.info("Done. Remember to add a remote and push to GitHub:\n cd {}\n git remote add origin git@github.com:USERNAME/REPO_NAME.git\n git push --all origin".format(self.outdir)) + logging.info("This will also push your newly created TEMPLATE branch for syncing.") From a9ef277183164bc78860ed9d7060861b9060b474 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Mon, 6 May 2019 09:48:01 +0200 Subject: [PATCH 025/124] Add Changelog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3686380e22..52db1cdf94 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,9 @@ ## v1.7dev +#### Tools helper code +* The tools `create` command now sets up a TEMPLATE branch for syncing + #### Syncing * Can now sync a targeted pipeline via command-line * Updated Blacklist of synced pipelines From 745972b83545cacadc5dbaacbe38f66e51b25123 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Mon, 6 May 2019 09:49:06 +0200 Subject: [PATCH 026/124] Bump to 1.7dev --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 8d7cd3f396..b616ae51d2 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import setup, find_packages import sys -version = '1.6' +version = '1.7dev' with open('README.md') as f: readme = f.read() From 020e7f5b708e067a2233c89ba7f64e4d490ff143 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 10 May 2019 12:35:41 +0100 Subject: [PATCH 027/124] Remove chipseq from blacklist --- CHANGELOG.md | 1 + bin/blacklist.json | 5 ++--- .../pipeline-template/{{cookiecutter.name_noslash}}/main.nf | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3686380e22..b927303dfd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ #### Syncing * Can now sync a targeted pipeline via command-line * Updated Blacklist of synced pipelines +* Removed `chipseq` from Blacklist of synced pipelines #### Other * Fix small typo in central readme of tools for future releases diff --git a/bin/blacklist.json b/bin/blacklist.json index d3571a76ae..47959b9e41 100644 --- a/bin/blacklist.json +++ b/bin/blacklist.json @@ -1,9 +1,8 @@ { "pipelines": [ - "chipseq", "epitopeprediction", "exoseq", - "neutronstar", + "neutronstar", "smrnaseq", "vipr", "sarek", @@ -15,4 +14,4 @@ "scrnaseq", "guideseq" ] -} \ No newline at end of file +} diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index a3d90900ad..6388251c7a 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -49,7 +49,7 @@ def helpMessage() { * SET UP CONFIGURATION VARIABLES */ -// Show help emssage +// Show help message if (params.help){ helpMessage() exit 0 From 4290a195ed1a096f2b9ca3accb9c9a49b163d200 Mon Sep 17 00:00:00 2001 From: Martin Proks Date: Wed, 15 May 2019 14:40:07 +0200 Subject: [PATCH 028/124] specified conda channels in template (#333) --- .../{{cookiecutter.name_noslash}}/environment.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml index f5e90f73b3..3d9eadf736 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml @@ -7,5 +7,5 @@ channels: - defaults dependencies: # TODO nf-core: Add required software dependencies here - - fastqc=0.11.8 - - multiqc=1.7 + - bioconda::fastqc=0.11.8 + - bioconda::multiqc=1.7 From 0b46c589deab73131f3f6aad2d50bf521c270611 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Wed, 22 May 2019 12:49:06 +0100 Subject: [PATCH 029/124] Fix email notification bug --- CHANGELOG.md | 1 + nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b927303dfd..cc74143175 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ * Can now sync a targeted pipeline via command-line * Updated Blacklist of synced pipelines * Removed `chipseq` from Blacklist of synced pipelines +* Fixed issue ([#314](https://github.com/nf-core/tools/issues/314)) #### Other * Fix small typo in central readme of tools for future releases diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index 6388251c7a..161f6b741c 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -367,7 +367,7 @@ workflow.onComplete { c_green = params.monochrome_logs ? '' : "\033[0;32m"; c_red = params.monochrome_logs ? '' : "\033[0;31m"; - if (workflow.stats.ignoredCountFmt > 0 && workflow.success) { + if (workflow.stats.ignoredCountFmt != "0" && workflow.success) { log.info "${c_purple}Warning, pipeline completed, but with errored process(es) ${c_reset}" log.info "${c_red}Number of ignored errored process(es) : ${workflow.stats.ignoredCountFmt} ${c_reset}" log.info "${c_green}Number of successfully ran process(es) : ${workflow.stats.succeedCountFmt} ${c_reset}" From 7358622cac2050a708466af22f5e174f6b2fdbb9 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Wed, 22 May 2019 13:56:56 +0100 Subject: [PATCH 030/124] Update CHANGELOG --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cc74143175..6e44bb74aa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,7 +6,7 @@ * Can now sync a targeted pipeline via command-line * Updated Blacklist of synced pipelines * Removed `chipseq` from Blacklist of synced pipelines -* Fixed issue ([#314](https://github.com/nf-core/tools/issues/314)) +* Fixed issue [#314](https://github.com/nf-core/tools/issues/314) #### Other * Fix small typo in central readme of tools for future releases From 0493b4184fb82f842c77611a57051bf95df5eebc Mon Sep 17 00:00:00 2001 From: drpatelh Date: Wed, 22 May 2019 16:51:07 +0100 Subject: [PATCH 031/124] Update code based on Paolas comment --- .../pipeline-template/{{cookiecutter.name_noslash}}/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index 161f6b741c..a8f64e5822 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -367,10 +367,10 @@ workflow.onComplete { c_green = params.monochrome_logs ? '' : "\033[0;32m"; c_red = params.monochrome_logs ? '' : "\033[0;31m"; - if (workflow.stats.ignoredCountFmt != "0" && workflow.success) { + if (workflow.stats.ignoredCount > 0 && workflow.success) { log.info "${c_purple}Warning, pipeline completed, but with errored process(es) ${c_reset}" - log.info "${c_red}Number of ignored errored process(es) : ${workflow.stats.ignoredCountFmt} ${c_reset}" - log.info "${c_green}Number of successfully ran process(es) : ${workflow.stats.succeedCountFmt} ${c_reset}" + log.info "${c_red}Number of ignored errored process(es) : ${workflow.stats.ignoredCount} ${c_reset}" + log.info "${c_green}Number of successfully ran process(es) : ${workflow.stats.succeedCount} ${c_reset}" } if(workflow.success){ From 5ffd8493a3382fbd7b19dd18b05c6ec6fe2d4a1d Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 31 May 2019 00:46:20 +0100 Subject: [PATCH 032/124] Add nf-core logo --- README.md | 3 +-- docs/images/nfcore-tools_logo.png | Bin 0 -> 14038 bytes 2 files changed, 1 insertion(+), 2 deletions(-) create mode 100755 docs/images/nfcore-tools_logo.png diff --git a/README.md b/README.md index 0a9e43f257..190da47a11 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,5 @@ -# ![nf-core/tools](docs/images/nf-core-logo.png) +# ![nf-core/tools](docs/images/nfcore-tools_logo.png) -## [nf-core/tools](https://github.com/nf-core/tools) [![Build Status](https://travis-ci.org/nf-core/tools.svg?branch=master)](https://travis-ci.org/nf-core/tools) [![codecov](https://codecov.io/gh/nf-core/tools/branch/master/graph/badge.svg)](https://codecov.io/gh/nf-core/tools) [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg?style=flat-square)](http://bioconda.github.io/recipes/nf-core/README.html) diff --git a/docs/images/nfcore-tools_logo.png b/docs/images/nfcore-tools_logo.png new file mode 100755 index 0000000000000000000000000000000000000000..9a0659d9d1a50498fce7ea4b1855eee4387cbbea GIT binary patch literal 14038 zcma)jWmsF!6D|{2DDJ^2?rt~z{qLvy z>E=mJlC!&WW_IVy%+9w5Itp- zbx~ojKvbL0u>WXoN`{_ra47cwKJbK)Cl1(2QZIP}FCAAqFJDU!TR2}|Urq-Xkf*h! zn=PlShkfpe7#SQK4V0Z33va1f9xYL+(AU&Nfi&7Pf>hi}jtj&$ZzV(-72qvZ4?Xf-^QS+$**DQFg>_(gO;`)~tDUX3%Zb^ul3-(A z@cdiU)wU^$2}y;eOjm|NJn$|{ZD{lYZ#`@=Rjzd?ZPEN3of zP-DM@<=-p>m@KSziYHBbQaa?_2wpXha|j@yb-~BS;|}#Bd(a4>n&Mpw1zBZHB~^1aRN>g}#lPkSG|C>jvm^K@a2KsZE;7w8Qa<_f;P44lZM z7VKO|AxbS~$MG;EbHpPp5ad4d`3p0`z(H^I&r;sL7(S~1ava|~j&U$;&@*G@9GriJ?6;YqwA(j|a1(OG8jX~&`BjI^ANJ-wZi{+G4U^)0Pz@Cfvd>$qZEl~QkK3V8 zy?zVg(P}HQovzeZEGkLgZG98h(5!mOOO=(Kn3WD3ZBmZVL^UhHVn+NPJC+gY`|6+ebLpk*v7$s} zX)53i2oG&r4YErdcSe{6`}1K)e!t!zp=wYLfsMmi^-ubK4HTgJaB&(C&7Eh%zG)S} zP+Tdd$Mo0i^%tZZ-(6Be4)>GQcKjZ3=(DI6g)h4bOFC|TC|}dUOXpVv76#v+HTfv&z^5-0)zF*+9i22XT(~)e zA(ay?*B_Bj6@gF#)gh9pe4XqI!&D-KS3UthKhI?Ur;=3Hr4;9{IRp_l@!4=jIZ7;L z2S0tttmqUL9tG|~m3O3vPB8l2Kzsn6EAGi@D<8l|I7VH4ZAvNcS1MiaL+-xkxgF~C zp2m-sTJJ?kAc(D|TJQO)e&u~??bKORJ-j7vuTw8mX+))mi-eP{{%NAC_p6n*iD?z9 zpPD#NN>~wRmCds3xf@3@3&wa)0wibRR;vtclePdCMQ_?Ez1%vg0ADDgDvE|(%$2EPKApZ z=xfHF3-!SQ`Hp)O^*e=?&G#n(6>1=NLpybJnYVqf-5U$*l*0q;%}hc88w-^a{5J#r zO-%9pb5Dzq!5o6OW#|xH3y%?JSCro{Yta3+Q21Oc?p|Wbc{8Y0%29Q;JK;2_zWKvy zhAcRN|5(M$C}5-j#4K~srr{`n^68_#Ew3>=0UU@skJ}cT@4=JnH41Kdkm*LLpF&%ni01A0tTiAVr z0KR92zR()Y$g-pd65~qH`eh(!pdwG?+&PDLkf{nhKWgv}%CF8-36%!?th(mP;4m}n zaGw9Ymf*~7?w~*8FncYBZ&q%a?N(~|reX|6vJ!hu<+@XC&x%mbt%S1H=2FR_GzGW6=R1S>!INjX zu;G^K6DGd=3-2Xl|JfZ|KCJVPHt?lkjZUkL-*|SL+qXXF?GOW~v$_XV`u=tIFvt31 zCGd#(kK<-yH<$xp3w6L7!jGoqV@?D-uC{NDq;Ap-oF0bAqL4+VKH~q>W236f^Omdn zkU<;!d!~@18Qw547-=%cxMcLMTGE&ACtI*iM7N@8^26&gvFbO(R+`gsUL(z&CMTB+ zU!X-u?lV>A*VvlcPTCWpi=P&Wosuy4i%Im1fh1aTJa)5wAQ68 zs7r-ZB#T5ZZqsEXng!v>LtDIG`*8hm{D`wq*x?;7$se$RBm8j)H}bV*d1tX>x^#jC zZD2vLK6r6Ny&0DP5CC|b@3~P}8!mYF;_AwAhE@-kh0~4oL~cOlp2rBmgIGfPM|3=? zjm#8!@C_omxikTZ^daG&%0Q0g0@0x7bMd zv{l}hztyo6h7Cc37_6&~j6-LU*Rwn4r$??T^UDuls+)lF8$lvb%;7;d+T>Ty$eU$_ ziDBPl1(#}b!~}puRG|!a9i6;~3j$ewcs?NfhaVBJ-i}yvI0*iZ9RHL_b2x6v7E&kc z@<`yO8`0n==(W4J>ZtYyrM(%GxYh{73m{JDv31Z;?q5e?O3#fGM<=I z*c1unpzyqW9)E9#{w;8BTd@D+n4VYPXt!NC01hfwReb+o;WCQ6ZTFEaq`oDi~etNDaB_r!W4v}sj+|2xcmx)vjyAnu>T_Bw8)$GTy_WTuVf z{4sGR6S;J0YK;AkMXA{g!Uc|QUQr|e^a=9eX+>>9)@7{AUr3L$6(IznR_dXvgos8n zb`Ngri_8A}6G^BS&d#PM1Pem@^4;N&SSkNzx>x}pc?Ez^#^q6hJIk+YyV05_{Isxn zB%;u95q)u(O{3?2!OosK4eGo05^k^lu+`|Tnn`L+J#Z$Tl7b6ofQ{yKuygUq97KPj zrOUu6^$_2uA%+G&5|s-gme*XDUbov+MTJ{}mqaj=9sR9PhN_IXCjT%oLEDEWU5k0~ z=pX&Wgh*p;UBKM6AAFytZX&3qga%(f{G5vIw2TH1CVl^M)~4!1_2C1Lteu8PC|H$) z!VE5!KFy+|H*j}bf^e(cl**TTG`1Gv3{iGR+3a^)x4c*zoU0a3a!FQJ>TBgHKgr6} z@uRLk2KUi_^50BRK;FAX4wmt-V-Lh8wICfLLU$u8=FdyXg8BrigpG1Mp3DHbZRWPW z2&~g$L=EG;{~h-n-AdAo4oEiIuc4(SiCzgzO~ZB4k6)f1i(ELFei(Uf&29~?7g!)}rmr}QC5zw2WJpzT0 z5(Dpe&+^;DKPaGBh)l)cDJJ^j{R^CkO&aLHMFO7OL#`TKnvYZWeG5H%4`%ZF&tC<< z-VXN!*bwRKNj~PwQ>WKeRVte#GXPeNO@VM04N&BQ&!X6bOWw^1oPZ=AJjr;`&lH#n z=+g}MO#3>5=vz!a?-tcjt{f;fKo0B(R59EcBP3(o=EEw|i0g(M5+rjBFhiy<}Z~^e=|R6!03sMy{XOx3Kwnow9M6-)z^Id z=?t37kZ5qZG$UFb+gUP5K`_3JaZ4(3Zu8bT|FCZUCtucI3)3XO!gCfss3v^%yzAuw ze;u#X@{pcI8^W!&V6v|MuuBNrmXx8KHTMqw+*N4-G zKX@SGn^BTrCvN*mB~^tbMAH&Abb+v-DT9Jhk~K~`KEd~v00rFCYu&^qj1*QskJ?)lKwFX_b#S^0dQ zU_lX1)6lLF6#@IH#Sxc}VR`}oURZ9`z=^=eo3yK_!_0i!nrDES zJ(xmTLM|pCw@Ku-t-oc_hud4p0^ej?WyLEdAr*Kud^%&3={$&Gpyo6cn}>r=cof2A zC#M|Rr^p*&?(VgR2z;dbX_y5~mM_Pjh<#U#nm&=!jpQVHKici6zPq&4l1*B;w#{KT z`J9@7R{H*fP5A2RitYP@m5dYbSYl0<<206!GfP9RyPB>mPd{)nt4UU8UXH^prpI@J z)=tcUE5Kh6Jq+l+#9gDqP}VL=g+?|$=Hg!XWetzMj=p;{J1T3B;6^U^5#|m36$H@W zA#W5lD-F4QmN*<7ZwF-a{jK8@nP>^~-<+l5RwED52wp0V)R=p|aU)J4xRNTkYsxPW zq_~Y(*DK9);)FTJ=|lhaL|w^g_g4l{KwHE*S}7s3F=^>9tM>_e{3A}$fkK66eI>72 z5VZc!q!q;B&QX>~a|MPxK1=Wz!)t+$*qYDCU4Lix6OS8Nz29bhlX&jk!69@#3f9XC zScQb@w9YPB;A_n;OeFqY2%a%JR#^ib*$1Sj>pcbBfi-0q5WmHZQ%sjUG3ywsp~97` z{bVQcy)=*KN3rxrfyk(D#t@H-Ue7UxwlKpRG7@4LPCJ&jwdkdaQEbWEAa$XLQ-HcS zl-{=vBol=lhB}X*&?dNxoj#5nMAy0u2kAab-x;QeM4O-^*d~>7%)^Hi(g1`#cS-`j zG7c)WnHhZI4lhw&o>~cR`Wgk)n{Su+&pFjzxp|Nuzf$1+g9ao zRFdg0u5S?TmS}2XQ$CBfeEi7Ek3RnHOY0Irv1+k69m{d!ar*+>6MS6 zb8l|9;W;&?)R9G$%i-0hp(MjMt_@k2l-SdMjHS>RRShnI3&(9^#%7J@<7LMOArk~; zO6f#+X{Bf?F=#5W++_~H;}}#G*n8)(a*9r^QySYr{TddNdH5eVMg{a|wSl>4eo-jy zIDdzJHBAjO->Or9Oe9x4D=11tCCQY3AHN{b1bWFUMC>^-oecWF#KZx9?m)+-l~gl+ zrA%@NV;Kg}#@#CgC5N@Rt(fc6uG)TiAD3U?X+=yDpfi-CFfq@Pzx4QnlhW*Een}k# z;pwYyqWzHf;ky{zPyKN%l0O+~7iwL~*9^n$HJ(0h8lg+1KE!Up$0)4%^(IQ2N2d>! zC-v)b^tS3%A2e>vGTOFw&N+o7Hw=+ja}QWYgsHN_UPR$~#z3 zu>kC%GWmsZ=0qa%ACH@~KgA6t@Y0TC{Hh9nuXOu~>o9pR$b?{SiFtz}d>J!sS^P&m zwjb+o5TLkjE@#~FGm3KEmyH@?uDLi?8Y&h^!Zw?`Ri?5!=PRIW z8rBETD}3`Jm-DIB!DGK8xGtu$n9mE>BMQ!^B7(Z*lGzo7kGkOZC2u$IqJm1Q|AgRf zWX7$5<3hC!`P%reQ>SQ1E($jQQ1K4c;mMG1LvHe`YA9UzxdbpRLzXQ8aOwIPv*|Uy z49bgQi7ywRA{H_s$cr)}EVo62RSD}bdB@q56@*S+>oec_T#50HO6b?{u(%nrA2XIJ zMBrdmMBCDjmeSw_F1~M)z+0Yi)CHeX2|>bqHQi>YJ+xE0F3+x4$b=AnMuosC;z8G=85c0r=CA5RtFiOU>{h+UL_ z>z{Y!SVgW-jeex#rwq(=>%-0V#~m7w`m}f1#&aX7^o4_XxvF70P)^6ZF%=;_wFX0< zzW^7pFrNNH!(ow7qa@aRWpM}gR7{F0upY_afGr8xgG+|V2eA8q$^dWjt?Xt%Q`kfw z+0g5k{m!)2dKi1_YwECP+k4Ly982m&=aHQ?`(lQ!M=4w9!|(5_LoD&)XY20Y;k|Vz zJuLDYt_?@+U_$!+sfKR7QR2a|*qCVERt^5nl>4oVWyK<47<@sZVCY%&vB`R0!31;{ zG1Pm1>CK!RudP99m|kf|gcN>gaXx-%WVDP{-q7vRCRV#ZOUNc8dY@6X;aVKWTJgdn z)9|xtc|4Q)=3K4sdP;-n}RPX0>4g0&Zhje)!l{4&!B)E#qIED966Fvo>KxobYeyzJ4=# zONe>+o@Yg3q5f@G=qIK&7Nc6h90np)G*PK6fn*qAgI6Jay3K5Ob|_A?^yfbL+gK8Vw$5<;*{f_FOl*CkzK!r zrQbcfN5|Z?wA!7dMd=UsPOPo}$)jDMM)GQ?5B1kTPoux+A=IBGItW^#8x5DepA&1$ zij|P(`??s;uep8ka9atUBtr)aet(0j9{0B7sAsBj7|S8}+U?1)m-{13#8OY&Nd?7K z`)G`x#jT^ZRC!8-v^KDM9~j1-Op&1>do`Yw2OVB&Hr?b&q5J?Wx){G6SZrBWBrtg} zgaw5rZ@!j#Pq({KnkO?z7@_y7-LX`-CXW?cB8{hb0=1eBA%)c!WNP9fgLbI`pSgF3N^tvkn5mPt2IdpgX{3`BIa8(6u|iQZs@v4B?X7#77E~c{}+FQ`(MpL zawv$@W%lhfX9dq_2J34>ph6dpZf^(YOB40vxFQWzhn^wD0b@*l2YZ3eC*#J{cb@LK z>J7v-6>va-8blojQ-DV5z>>+KI!tDxbpG~Lrx?_}9OK^hS{}DjnDLoGY1>JJB6ZjH z5;cvt*VH&Mw~%la=N%OXFlQi+|O6gG8pIBoT|eJCNhnky+j)V3ugq| z5+e3mX-UQPa>KJXQ_~CpM8k#uEh3ri0 zqxNM@fgsa(LsP9~d;vm7&U)8eF-|7w|K#vIG~--?e?_?Q#;MSjSE9ZM7%ArAa1TNk zrxKbwC!y)fgbGnv^Ke6D*_E3}{D#j-97;RQCG7f-FrL0|oi{p|PV0rClJe4<+~Dwr z9^zlAQ;Svs*k;25#3@RM&)BCl4-Q>B_|C8xNVBkE1rDfZJJ(?#Mt$zU3(LBmMUgcH zALXrPV~C%S0#2#zUoV_t$FKC`#2(ASs4N-zQfL0wSla$$>y^1C*Wa(RZW|d@ndyhA@jMcqa3In=k6emTFjq<_7}@mh?E4kg1@r(`(7M4_!-8#d zLB89rCp4upd)jhc+;b<8g`{OYz`r7zz=RPjk6Aq(g}&9(oSTDjN6X9 z3seT4Wf#W!-jf!jzecwQVcmWAS6JUe=Af#FRW{i|)Lb1NnkMs%4KR~3=YZtK;^qv? zM0g>cgk!g<(rP9v`iq#U79ddln(o+aCg})$<^Aq?#Xw4ky9{qWVf8q7<;s)nR3kjK z4P0xdvL@YS$;*coJ_*|-X`4`uVu#<39abXtlKSHMdl8-t#-aD?qnQ&uF_yNd%8!sd z`rn36ddm;aTIzR<#lW=Fg@}}9At#5O>w8f2-@vZ0R`=AES~8);Imsf83RBK(zRk^8 zO;VG*@mN-|`B)#5C;odrIWF3+{tHYp2Q%q|o3=>pGg6!F+tV%VQoKY!Nir!7dM)B_ zIhUlGe!-+gHp?-oa;+-V8o#ZBn+UdSstiw6X((yM={rgjSa=}!Ex9&AH8h@Qcykm% zQYz=~{2&F!h|!Uxq7kZ*s(w6|FM{G`uY}CX=t!+tBR2O~xNcO{AEgE!^M_eRGU3Io z-so_Z>3lOPkxQWO86O-Rk>Worz!9Zg1| z6il^t2lk-@NeJ@J-k8#=W-_d%8nmQAN7TFZ6WO>QxtBegf7>>`88)~?f<|GEI(is6 zyPu8y9JWEYq>IFxW$KYtc7~b3DGtSQc5IXvsmmSfp!yM%9mJyUlROE>9YA*?d$Fg` z8S1^Nml@p9eE#i@SbkH|ds>}$w}>xFFUH^WXf#iw=xmdk%BFGO3>kq?%^gMFy|ICQ z(vtApWE@&hO!inN0r}jKFIN^@I&0T2&e(X=j@VgP);^y6Sn_3O+A~lQz4CaOeXQ+!?_-k+adV^2Hfmi;1)J!}a_87sMonk_U&0%miOSW9t!Q z6k{^MvAw5!#_v&=lrt0j#0cAi>xD;;L_NIyemfi0l0KngBN@sblMYh-%>yx`ZF^3d z>!GuoiyV7+2yPj=F?;8;G$4`qs61HL2j+PWq-y#d8ewwpS*SSS*ypeMN0-}taRX)O z=xovYX|P;(4QZt42O!jP1d=PB)B;B~M+A)&J9m~cqTU6Z(oqk2CgJP~VaXxC9Gl;g zson8*kcY~T#NQK^=B|6c(z2EszW9}r+mUzxu#F8}$bnY=v=_neX-?c?fj7@Hj8&ENZN)D<+(QS5ISV0AC|JTY@fXr(~~?{Uc& zjMeGc>dAiPVhpLjHWz$iLaO3F{4Cbv!bJK!u*eGkK!>IgP(*Ss2uhZYH4e4?3+`|C zmR~F39D$nP74|4DCm|)z0Sa-C{Zc|92;W<5oB$8B;UC}U_upzhgf^I%-tL6UXU`yc z4nvz)cCx+^o81#yALnEBxmv!Y9tup{4~!-T1lQ@e;V7I=1tWB@XV``}tVWqUNLfpg zx&7)WtK{4_CjoJNsqDiTXpO1DqRevBtO>0ASDL@0JbO-B)?n@;MR%6DuU76u=KOYB zqZ!AquUl8w;qcSH&CV1*VCRVqr3~Q_J%!t<0i>rlD<wE{sZ*fRFvAurpVKvZmC(kav1z86X&DFCcJcHVYo2M z(Qj#I$Azb9mmHLT#aLaC8=jl&h;yObP~BJLQG1PEEb9puyHv~!d+rNeKJbv7n<5HRD$uy92X(CYKurqh|8*iHZA5miJ$=DW}9^!?0~Dbt}lM z+2&_$SofUP91G0)9Omr>T~^zMmUKMC&C7S2LTmq%(ehEO-gm?qit<8g$^MC>J{`>% zzQjh}``TTw*5p?_TM65r6rR~{t*>oFTrkYg_YC!h;=^R%d=*ayzk3vJc#En=&-FC^ z(SO7?$UYN%N4Lgs^NeU{MksHop3FfKZF|xGUt?$186nXtav+5zJJN*FTC8X!$y3D( z@&6hhsCa8R#ty${S+SF}@bjMzQOfq(@>Wu+h=Bg7feO4T(@G}-OURl+RvS@#Vdnsy z!@q%PK<~K0Lo20dyF2*-S6G|}>*}1j+21ZZf&xf{r80b62TuP-Vb;)X^r|AXgTuvO zVJ^$-S~&h$D)k@0$Wb(WaLIg-QXxVh7kqxH+50>&6#Vr+4AFbd!QAJU(49(Hg%{$O zyAg{qq~?0DZqkD ziLoCSm%Z23T^0W${uw^@y^eVfji4w;I;%AnDmizHB$mR8#yN zoRF>vXJcmTqeRq9iW#z_Z*$16{6i~E>ob*gd-%&0Je^0?u-R>4+~H%UQnBcbvSrsI z@R7u5c!*J;9ApLImfgfXCYCr-2W8kY7n`D73Ys-y*IP4RB^}^bfog% zl(VV=?DCe`O!Gugew{-srQF}-*78Iaa+2^pGhqtMUtZ_Cf9xs%b zhdE0N@8#2Q)z3TV&O|%fi)#p~ZxtVMYR#Bu{U#{Z>GWOPcq>8sA6n}lp6mbTz>075 zM~g7U1>zU=T!^si9|J4crvZ7Ei0iTIe%|94vXr4K;`;sv4^u(@Zw*+G{xeG|l_LLB zKXkAch9ouG)BHKbFkGD|mFzzPcpw%QC4ecNMASbf@TIzN{&DmFj-32&QGnj>SYYs^ zG6GmU^{@BA|LC*)Pd8O&|NO5P_-H<&(oVzmU@b;ME0vI5pEP^3iN)r z?ao^y1-LFi5BOyKsEN>E$bSAAxzO}>?rpd;QYXTW6iyN5{o@j%E`10eH2?%o1Y_|G zeoX^{8IURjFE44Se!Ej}b8ojAgG2mlMG59Y(+Pj)+N~gbB0LT} zpVC}Z0Qw!i8|#^TktX)=ho~nGO!$QA+vggH)3$gQ#@!E=sX0sYb-#7Db*&;}qDM}Y zU4#h)O@QW~PU8v~+{)(7)kn#&!$Tw!tm@HeekX)-zyMu)hLaakFj}BeaBTpa8_HP+ zYMq1mFbT6F!F(-bB8j|n0=loTPPJ~s(4nJJIWm}!Dwj@>2H&UPGUaZfij_ifb>pXQ znFc3`G$;Jy^-@ENJ%45Q&?LLd^OXDCobkdQK|g=Bk1z$>qRm_ zULxjZ%%c%muM5Kx&IMnRA%eTP{lL6r(i{eLN)iD9piN|GW9!-!XC4MG6ODcxsCoIt z&vIvoQ2V809w1e@#V@dHMUx6X1RY!pP5vD49?FNwqA4`Aet0cA41!@(6b7F+^j>#T z2-GyBo-EB?f4Ii>BbHK2o*X*v^OO6J`rz3ElMw7g3ZQ$c|L#QJnzG`J59?g+D0ovp zA?XDP{P^wh-~Ff+0fIj$Bf`HWIpnLb%sLW&A1`IJg}g)mhC7XulmpK>1<@_uPxw5_ zkW&|%tcy|-bf;1&P{hxGqyz4EM@eQ3#}xC*HoE~7*P!91A3F4Yw3;GWXYy+916EMv^zCNj36snUvb08JeGeJl2I7l#k%@q8WO^DJJUDfNKb?zvD0L#-yK*T+B;TU+sgC&k&HHlx;p_Xn{GJs z{(c#I)m%t{gof$B3I^6PtIX}vb&cWnDsTyiWz5Fs z=RHvtq2=>HE<69;a*|Rj+vlm(f0nD8Z6-7lS!n2-@Ja$zDZSr6vP#zIH^uki3=|i7+_WfXpDa25$Aw3`P*7d zhjQ!QeB|=VU2KBssdnkkc-YTu{37t)h2>6i_)2#YZ;?sd#=1< z+jJP4*SfES5Nsfyc9`9+-UW?5(KqhO)ZQS81jI0+*Zv})KG`6NUkvkNggJZtPdNl} zZY!_ST-t$loM#yWFJ+Jd+E&#i?+`?>Q-`?un!uMfy_GAJyof$)ASjy9Tb|W8e z!nR%pWSq7IAgpw&S|H)!(Gja+}-bG^is(^LkFv=n!tM=81XF*I6G$u;g2E_XLpdRuk4hR}UlACi(9CJ6S%qiCa?`p8Y0jdqECMs8zaMf;Mg ztI~r1=?;GT!RBA`kgRRS5?a9V)Gg|As-Nb$Y?>05Q7fv^0^!3eq=!?WV=OIHSgRlh zhto;a0acDmaZ?fI9}u*!zPYyjv4mz2L6k4NT|E{A_c2~q()TW}<>oT|@chWz5&vfG zk)#2gb!nN97!w|uuXhcWzE7kEu5$j=Qk|!43vdU)K(GjR;y~a7`QI6ZGTk3bR>1l} zRf2d`51`gpSQnK|bNeFl#*drqUb|sHR+&#(1nrGz;y$XHp|zUPpFXZCyW#M@<#w?K zhonAn5|`NAc;gkkmDDnQ2yPR8>1O=b%N@_n9ELqnglp5udi6;MqUnF8X0PFA z`QX42*$Qjd#!`a|R&K80w2T9ACI};!>9XmCi32^a68zKtzN6jSKPdO z>w9;G)U?Eu$)sp1EIaD3VIcRzDC{OOOHSks0p9MiVwDyMYt)uv%aD}*N`XNMI}>49 z&B*jebDKXU#O`djd9iicBmn+&AW|`G3EaN@Cbrx7V$xn4MqEx-{L5Q0odBkjsZJQY zd>gnn>Irplf~kc|$biqBKi{jgNW+wW%zMGtwK)4QsCw(gHELmKrxOKld{gwk{Y0jL ztP(7Q)?tYQ&nZ{_E|}tvuq5#x_jI7M))yuu&TsQAfZlbsBoUVNcf;1*R957fhZ^wA zqDRm)F3}U}-A;`6NN)7JAWK0T@MiuUrHSy$Rs(G1O<;pvg-#^rR0#@6!lrMlLPKb`;6B3ZfLU>dkB>G;U^aEMek^Fv))o@}yu=hF)Zo?-hZMb@<|%Gpf2b0`Wcm)bk}C*hc$yyjWB8-B zFlwPSDZq-GBHL$|Y@nKbLKKEgc@yg;otJ%#G*23#d+FJ8!t5#J+uMbc0IGj2895>> zbQwO*;k>3u1gh{?(w|Vj5Vhy*z6l>lsu*(G^Xc25ZN+p0)PqJmGw4+JU2fc5R9)21 zH2p+wot4h>xnsbt2!~?%kJS{lq9>&gI{DHQg%qIvaWZ;7=RCn+96l_<2Qmh`AY6t* zX)jxu;U%SFddF(o)LWJN{R^y`yyx%V0!41zY;=S4EqqJ4$4N)L8n3A)?#Vjdmy#dy zL^`g!uXia?KWG(^r?68*#AUZ*W!s+~=3yi=&>Jf&p&#@WpfDk^M=qxFB=%G^(RdjbJgEpWg@Pr|n$3*e6BMo!of_<>zmCGjp&kyi`ed$_7U_ zKV2!hJjCyR9ux8FKR04O*}{YE!BF;bm%C*=R4eD;B;^a%F~&oZSb{q0A+|PxQamhX zh%Z#WxCLck`a`qj_q4I%&N2r)1hgFJ68Zvp8|Mc9G;VSG6peTO<9Y4n&gHXFl$UC| zhjZ}nTM&i|h?}9MjHVW9%ehJfi)Ko>n-E?;EmuX{yU_xDdvOic1E1ItuS~zcmr;v3 zrCrq?UFBC|RD(IVF5{mQ(}}-EJ)*W;wQSPr3Pnt;t#4#=zHar_W|+mP+rFp;+qXtbD%3Qc4k**I@)z%0P)7U0$_0C1sv7Kk4wp_z}*9l__>^X|qcig!j zB(DM)kZ8W$_7J;QTbi?hx5Q!5XXI8zQEK8mb2}IFs+O?^`Wn*BC~q)~p&3qys@!e= z9vchXU%02}yUsVfBQsqERCO$Iz;#v|w+|+Q_1SmEpBj4unUUzh=)<0!y{ySD6^-oH z`YM&(Z)IJ4lr_;?$z!S&^xtTY#WMn4k+I{}jT(7y+W~E58BIarp}O*MD!r))Nwtae z$?kaM6cdHxAaZU(l04Zl*mDoQ*5^>USB;4C8$htS7a?vYd_E^)a*!VZ0h3o^#@goV>vA_$7K)CmE!J0_y^ zTeu>C!I8gr-XmDvoM$3aGrd2oDLOtx>5?3Uiy6YUg-Nw3WDFAig(KB_x z?VPqg5xP^I02_V?meTEjV=2g<^x~y*=vuDRS>$378eGv?=q!^0j`x1;AyJ2e?Q1Sy a;WO4yRlgQn5W}89gHx8*kgJuk2>U;G4*s+N literal 0 HcmV?d00001 From 5ce63952ca7119234e738887e7cdb5f73eae93ac Mon Sep 17 00:00:00 2001 From: drpatelh Date: Fri, 31 May 2019 00:52:40 +0100 Subject: [PATCH 033/124] Update CHANGELOG --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6e44bb74aa..a53cd86fac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,8 @@ * Fix small typo in central readme of tools for future releases * Switched to yaml.safe_load() to fix PyYAML warning that was thrown because of a possible [exploit](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation) * Add `nf-core` citation - +* Add proper `nf-core` logo for tools +` ## v1.6 #### Syncing From a19da15d8f2a90f2fe72315797b774db0b27ab54 Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Fri, 31 May 2019 15:02:39 +0200 Subject: [PATCH 034/124] Prints warning in dev mode for unequal container name --- nf_core/lint.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nf_core/lint.py b/nf_core/lint.py index ace64ea9de..e3d601c1e5 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -431,7 +431,10 @@ def check_nextflow_config(self): try: assert self.config.get('process.container', '').strip("'") == container_name except AssertionError: - self.failed.append((4, "Config variable process.container looks wrong. Should be '{}' but is '{}'".format(container_name, self.config.get('process.container', '').strip("'")))) + if self.release_mode: + self.failed.append((4, "Config variable process.container looks wrong. Should be '{}' but is '{}'".format(container_name, self.config.get('process.container', '').strip("'")))) + else: + self.warned.append((4, "Config variable process.container looks wrong. Should be '{}' but is '{}'".format(container_name, self.config.get('process.container', '').strip("'")))) else: self.passed.append((4, "Config variable process.container looks correct: '{}'".format(container_name))) From b52b40d3c6475d7ea250462e71859a37acf9593f Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Fri, 31 May 2019 15:10:20 +0200 Subject: [PATCH 035/124] Updates Changelog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a53cd86fac..bb6201d606 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,9 @@ * Removed `chipseq` from Blacklist of synced pipelines * Fixed issue [#314](https://github.com/nf-core/tools/issues/314) +#### Linting +* If the container slug does not contain the nf-core organisation (for example during development on a fork), linting will raise a warning, and an error with release mode on + #### Other * Fix small typo in central readme of tools for future releases * Switched to yaml.safe_load() to fix PyYAML warning that was thrown because of a possible [exploit](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation) From 87f85fd64f4a3bdcdb98634e0796cee0a8975388 Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Fri, 31 May 2019 15:14:19 +0200 Subject: [PATCH 036/124] Adds more imperative warning massage --- nf_core/lint.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/lint.py b/nf_core/lint.py index e3d601c1e5..33d919d400 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -434,7 +434,7 @@ def check_nextflow_config(self): if self.release_mode: self.failed.append((4, "Config variable process.container looks wrong. Should be '{}' but is '{}'".format(container_name, self.config.get('process.container', '').strip("'")))) else: - self.warned.append((4, "Config variable process.container looks wrong. Should be '{}' but is '{}'".format(container_name, self.config.get('process.container', '').strip("'")))) + self.warned.append((4, "Config variable process.container looks wrong. Should be '{}' but is '{}'. Fix this before you make a release of your pipeline!".format(container_name, self.config.get('process.container', '').strip("'")))) else: self.passed.append((4, "Config variable process.container looks correct: '{}'".format(container_name))) From 8e445b54523e2ba80f72724311e40ac509477ed5 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 10 Jun 2019 20:55:45 +0100 Subject: [PATCH 037/124] Add Quick start section to template --- CHANGELOG.md | 3 ++- .../{{cookiecutter.name_noslash}}/README.md | 24 +++++++++++++++++-- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bb6201d606..97e94c8d97 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,7 +16,8 @@ * Switched to yaml.safe_load() to fix PyYAML warning that was thrown because of a possible [exploit](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation) * Add `nf-core` citation * Add proper `nf-core` logo for tools -` +* Add `Quick Start` section to main README of template + ## v1.6 #### Syncing diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index cd760421b7..c04c7bef47 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -11,6 +11,26 @@ ## Introduction The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool to run tasks across multiple compute infrastructures in a very portable manner. It comes with docker containers making installation trivial and results highly reproducible. +## Quick Start + +1. Install [`nextflow`](docs/installation.md) + +2. Install one of [`docker`](https://docs.docker.com/engine/installation/), [`singularity`](https://www.sylabs.io/guides/3.0/user-guide/) or [`conda`](https://conda.io/miniconda.html) + +3. Download and test the pipeline using the provided test data + +```bash +nextflow run {{ cookiecutter.name }} -profile test, +``` + +4. Start running your own analysis! + + +```bash +nextflow run {{ cookiecutter.name }} -profile --reads '*_R{1,2}.fastq.gz' --genome GRCh37 +``` + +See [usage docs](docs/usage.md) for all of the available options when running the pipeline. ## Documentation The {{ cookiecutter.name }} pipeline comes with documentation about the pipeline, found in the `docs/` directory: @@ -32,8 +52,8 @@ The {{ cookiecutter.name }} pipeline comes with documentation about the pipeline ## Citation - - + + You can cite the `nf-core` pre-print as follows: Ewels PA, Peltzer A, Fillinger S, Alneberg JA, Patel H, Wilm A, Garcia MU, Di Tommaso P, Nahnsen S. **nf-core: Community curated bioinformatics pipelines**. *bioRxiv*. 2019. p. 610741. [doi: 10.1101/610741](https://www.biorxiv.org/content/10.1101/610741v1). From 8543ff0835ab7db6828567e8afff1bcd72ac6ac3 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 10 Jun 2019 20:59:18 +0100 Subject: [PATCH 038/124] Update description --- .../pipeline-template/{{cookiecutter.name_noslash}}/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index c04c7bef47..4cf29ff04f 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -13,11 +13,11 @@ The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool ## Quick Start -1. Install [`nextflow`](docs/installation.md) +1. Install [`nextflow`](https://nf-co.re/usage/installation) 2. Install one of [`docker`](https://docs.docker.com/engine/installation/), [`singularity`](https://www.sylabs.io/guides/3.0/user-guide/) or [`conda`](https://conda.io/miniconda.html) -3. Download and test the pipeline using the provided test data +3. Download the pipeline and test it with a minimal dataset ```bash nextflow run {{ cookiecutter.name }} -profile test, From 8d220db727f8fec611069433ad1f723567a6bae4 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 10 Jun 2019 21:23:16 +0100 Subject: [PATCH 039/124] Fix markdownlint --- .../pipeline-template/{{cookiecutter.name_noslash}}/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index 4cf29ff04f..93aba396a3 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -25,11 +25,11 @@ nextflow run {{ cookiecutter.name }} -profile test, 4. Start running your own analysis! - ```bash nextflow run {{ cookiecutter.name }} -profile --reads '*_R{1,2}.fastq.gz' --genome GRCh37 ``` + See [usage docs](docs/usage.md) for all of the available options when running the pipeline. ## Documentation From b169af6ab18cb1f30e2a65eafb02cf8eb53bd072 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 10 Jun 2019 21:33:47 +0100 Subject: [PATCH 040/124] Tweak description --- .../pipeline-template/{{cookiecutter.name_noslash}}/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index 93aba396a3..f83ad5b680 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -17,7 +17,7 @@ The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool 2. Install one of [`docker`](https://docs.docker.com/engine/installation/), [`singularity`](https://www.sylabs.io/guides/3.0/user-guide/) or [`conda`](https://conda.io/miniconda.html) -3. Download the pipeline and test it with a minimal dataset +3. Download the pipeline and test it on a minimal dataset with a single command ```bash nextflow run {{ cookiecutter.name }} -profile test, From 2a0341806a592570eb5befd82b91c0cd9a061dfc Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 10 Jun 2019 22:54:32 +0100 Subject: [PATCH 041/124] Bypass bullet check... --- .../{{cookiecutter.name_noslash}}/README.md | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index f83ad5b680..8f1a6a0ed4 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -9,30 +9,32 @@ [![Docker](https://img.shields.io/docker/automated/{{ cookiecutter.name_docker }}.svg)](https://hub.docker.com/r/{{ cookiecutter.name_docker }}) ## Introduction + The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool to run tasks across multiple compute infrastructures in a very portable manner. It comes with docker containers making installation trivial and results highly reproducible. ## Quick Start -1. Install [`nextflow`](https://nf-co.re/usage/installation) +1 Install [`nextflow`](https://nf-co.re/usage/installation) -2. Install one of [`docker`](https://docs.docker.com/engine/installation/), [`singularity`](https://www.sylabs.io/guides/3.0/user-guide/) or [`conda`](https://conda.io/miniconda.html) +2 Install one of [`docker`](https://docs.docker.com/engine/installation/), [`singularity`](https://www.sylabs.io/guides/3.0/user-guide/) or [`conda`](https://conda.io/miniconda.html) -3. Download the pipeline and test it on a minimal dataset with a single command +3 Download the pipeline and test it on a minimal dataset with a single command ```bash nextflow run {{ cookiecutter.name }} -profile test, ``` -4. Start running your own analysis! +4 Start running your own analysis! + ```bash nextflow run {{ cookiecutter.name }} -profile --reads '*_R{1,2}.fastq.gz' --genome GRCh37 ``` - See [usage docs](docs/usage.md) for all of the available options when running the pipeline. ## Documentation + The {{ cookiecutter.name }} pipeline comes with documentation about the pipeline, found in the `docs/` directory: 1. [Installation](https://nf-co.re/usage/installation) @@ -47,8 +49,8 @@ The {{ cookiecutter.name }} pipeline comes with documentation about the pipeline ## Credits -{{ cookiecutter.name }} was originally written by {{ cookiecutter.author }}. +{{ cookiecutter.name }} was originally written by {{ cookiecutter.author }}. ## Citation From 1140e4f26c2086ac0c3635d8a97214352ed191ba Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 10 Jun 2019 22:56:28 +0100 Subject: [PATCH 042/124] Bypass bullet check... --- .../{{cookiecutter.name_noslash}}/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index 8f1a6a0ed4..c5bf9c4522 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -14,17 +14,17 @@ The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool ## Quick Start -1 Install [`nextflow`](https://nf-co.re/usage/installation) +i. Install [`nextflow`](https://nf-co.re/usage/installation) -2 Install one of [`docker`](https://docs.docker.com/engine/installation/), [`singularity`](https://www.sylabs.io/guides/3.0/user-guide/) or [`conda`](https://conda.io/miniconda.html) +ii. Install one of [`docker`](https://docs.docker.com/engine/installation/), [`singularity`](https://www.sylabs.io/guides/3.0/user-guide/) or [`conda`](https://conda.io/miniconda.html) -3 Download the pipeline and test it on a minimal dataset with a single command +iii. Download the pipeline and test it on a minimal dataset with a single command ```bash nextflow run {{ cookiecutter.name }} -profile test, ``` -4 Start running your own analysis! +iv. Start running your own analysis! ```bash From 7b97e3527bb1e2c862de2a845ffed89d39630db2 Mon Sep 17 00:00:00 2001 From: Anthony Underwood Date: Mon, 17 Jun 2019 11:48:11 +0100 Subject: [PATCH 043/124] Cope with edge versions when linting --- nf_core/lint.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/nf_core/lint.py b/nf_core/lint.py index cd90406464..9d9966452b 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -419,7 +419,11 @@ def check_nextflow_config(self): if self.config.get('manifest.nextflowVersion', '').strip('"\'').startswith('>='): self.passed.append((4, "Config variable 'manifest.nextflowVersion' started with >=")) # Save self.minNextflowVersion for convenience - self.minNextflowVersion = re.sub(r'[^0-9\.]', '', self.config.get('manifest.nextflowVersion', '')) + nextflowVersionMatch = re.search(r'[0-9\.]+(-edge)?', self.config.get('manifest.nextflowVersion', '')) + if nextflowVersionMatch: + self.minNextflowVersion = nextflowVersionMatch.group(0) + else: + self.minNextflowVersion = None else: self.failed.append((4, "Config variable 'manifest.nextflowVersion' did not start with '>=' : '{}'".format(self.config.get('manifest.nextflowVersion', '')).strip('"\''))) From 47f161fd17095cb1c92c27b68376a3701c2a8942 Mon Sep 17 00:00:00 2001 From: Olga Botvinnik Date: Mon, 24 Jun 2019 17:30:39 -0700 Subject: [PATCH 044/124] Add docker.runOptions to avoid memory swap error --- .../{{cookiecutter.name_noslash}}/nextflow.config | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config index 6f0d53a0bb..f684adc20f 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config @@ -58,6 +58,11 @@ profiles { test { includeConfig 'conf/test.config' } } +// Avoid this error: +// WARNING: Your kernel does not support swap limit capabilities or the cgroup is not mounted. Memory limited without swap. +// Thanks to: https://github.com/alesssia/YAMP/wiki/How-to-use-Docker +docker.runOptions = '-u $(id -u):$(id -g)' + // Load igenomes.config if required if(!params.igenomesIgnore){ includeConfig 'conf/igenomes.config' From 9911537f00e5db399ff489a3ca7c8516abd2a1c6 Mon Sep 17 00:00:00 2001 From: MaxUlysse Date: Tue, 25 Jun 2019 11:49:46 +0200 Subject: [PATCH 045/124] fix: Small code polishing + typo fix in the template main.nf file --- .../{{cookiecutter.name_noslash}}/main.nf | 46 +++++++++---------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index a3d90900ad..a23fb4b800 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -49,7 +49,7 @@ def helpMessage() { * SET UP CONFIGURATION VARIABLES */ -// Show help emssage +// Show help message if (params.help){ helpMessage() exit 0 @@ -65,7 +65,7 @@ if (params.genomes && params.genome && !params.genomes.containsKey(params.genome fasta = params.genome ? params.genomes[ params.genome ].fasta ?: false : false if ( params.fasta ){ fasta = file(params.fasta) - if( !fasta.exists() ) exit 1, "Fasta file not found: ${params.fasta}" + if ( !fasta.exists() ) exit 1, "Fasta file not found: ${params.fasta}" } // // NOTE - THIS IS NOT USED IN THIS PIPELINE, EXAMPLE ONLY @@ -78,12 +78,12 @@ if ( params.fasta ){ // Has the run name been specified by the user? // this has the bonus effect of catching both -name and --name custom_runName = params.name -if( !(workflow.runName ==~ /[a-z]+_[a-z]+/) ){ +if ( !(workflow.runName ==~ /[a-z]+_[a-z]+/) ){ custom_runName = workflow.runName } -if( workflow.profile == 'awsbatch') { +if ( workflow.profile == 'awsbatch') { // AWSBatch sanity checking if (!params.awsqueue || !params.awsregion) exit 1, "Specify correct --awsqueue and --awsregion parameters on AWSBatch!" // Check outdir paths to be S3 buckets if running on AWSBatch @@ -100,8 +100,8 @@ ch_output_docs = Channel.fromPath("$baseDir/docs/output.md") /* * Create a channel for input read files */ -if(params.readPaths){ - if(params.singleEnd){ +if (params.readPaths){ + if (params.singleEnd){ Channel .from(params.readPaths) .map { row -> [ row[0], [file(row[1][0])]] } @@ -125,28 +125,28 @@ if(params.readPaths){ // Header log info log.info nfcoreHeader() def summary = [:] -if(workflow.revision) summary['Pipeline Release'] = workflow.revision +if (workflow.revision) summary['Pipeline Release'] = workflow.revision summary['Run Name'] = custom_runName ?: workflow.runName // TODO nf-core: Report custom parameters here summary['Reads'] = params.reads summary['Fasta Ref'] = params.fasta summary['Data Type'] = params.singleEnd ? 'Single-End' : 'Paired-End' summary['Max Resources'] = "$params.max_memory memory, $params.max_cpus cpus, $params.max_time time per job" -if(workflow.containerEngine) summary['Container'] = "$workflow.containerEngine - $workflow.container" +if (workflow.containerEngine) summary['Container'] = "$workflow.containerEngine - $workflow.container" summary['Output dir'] = params.outdir summary['Launch dir'] = workflow.launchDir summary['Working dir'] = workflow.workDir summary['Script dir'] = workflow.projectDir summary['User'] = workflow.userName -if(workflow.profile == 'awsbatch'){ +if (workflow.profile == 'awsbatch'){ summary['AWS Region'] = params.awsregion summary['AWS Queue'] = params.awsqueue } summary['Config Profile'] = workflow.profile -if(params.config_profile_description) summary['Config Description'] = params.config_profile_description -if(params.config_profile_contact) summary['Config Contact'] = params.config_profile_contact -if(params.config_profile_url) summary['Config URL'] = params.config_profile_url -if(params.email) { +if (params.config_profile_description) summary['Config Description'] = params.config_profile_description +if (params.config_profile_contact) summary['Config Contact'] = params.config_profile_contact +if (params.config_profile_url) summary['Config URL'] = params.config_profile_url +if (params.email) { summary['E-mail Address'] = params.email summary['MultiQC maxsize'] = params.maxMultiqcEmailFileSize } @@ -279,7 +279,7 @@ workflow.onComplete { // Set up the e-mail variables def subject = "[{{ cookiecutter.name }}] Successful: $workflow.runName" - if(!workflow.success){ + if (!workflow.success){ subject = "[{{ cookiecutter.name }}] FAILED: $workflow.runName" } def email_fields = [:] @@ -298,10 +298,10 @@ workflow.onComplete { email_fields['summary']['Date Completed'] = workflow.complete email_fields['summary']['Pipeline script file path'] = workflow.scriptFile email_fields['summary']['Pipeline script hash ID'] = workflow.scriptId - if(workflow.repository) email_fields['summary']['Pipeline repository Git URL'] = workflow.repository - if(workflow.commitId) email_fields['summary']['Pipeline repository Git Commit'] = workflow.commitId - if(workflow.revision) email_fields['summary']['Pipeline Git branch/tag'] = workflow.revision - if(workflow.container) email_fields['summary']['Docker image'] = workflow.container + if (workflow.repository) email_fields['summary']['Pipeline repository Git URL'] = workflow.repository + if (workflow.commitId) email_fields['summary']['Pipeline repository Git Commit'] = workflow.commitId + if (workflow.revision) email_fields['summary']['Pipeline Git branch/tag'] = workflow.revision + if (workflow.container) email_fields['summary']['Docker image'] = workflow.container email_fields['summary']['Nextflow Version'] = workflow.nextflow.version email_fields['summary']['Nextflow Build'] = workflow.nextflow.build email_fields['summary']['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp @@ -341,7 +341,7 @@ workflow.onComplete { // Send the HTML e-mail if (params.email) { try { - if( params.plaintext_email ){ throw GroovyException('Send plaintext e-mail, not HTML') } + if ( params.plaintext_email ){ throw GroovyException('Send plaintext e-mail, not HTML') } // Try to send HTML e-mail using sendmail [ 'sendmail', '-t' ].execute() << sendmail_html log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $params.email (sendmail)" @@ -354,7 +354,7 @@ workflow.onComplete { // Write summary e-mail HTML to a file def output_d = new File( "${params.outdir}/pipeline_info/" ) - if( !output_d.exists() ) { + if ( !output_d.exists() ) { output_d.mkdirs() } def output_hf = new File( output_d, "pipeline_report.html" ) @@ -373,7 +373,7 @@ workflow.onComplete { log.info "${c_green}Number of successfully ran process(es) : ${workflow.stats.succeedCountFmt} ${c_reset}" } - if(workflow.success){ + if (workflow.success){ log.info "${c_purple}[{{ cookiecutter.name }}]${c_green} Pipeline completed successfully${c_reset}" } else { checkHostname() @@ -411,11 +411,11 @@ def checkHostname(){ def c_white = params.monochrome_logs ? '' : "\033[0;37m" def c_red = params.monochrome_logs ? '' : "\033[1;91m" def c_yellow_bold = params.monochrome_logs ? '' : "\033[1;93m" - if(params.hostnames){ + if (params.hostnames){ def hostname = "hostname".execute().text.trim() params.hostnames.each { prof, hnames -> hnames.each { hname -> - if(hostname.contains(hname) && !workflow.profile.contains(prof)){ + if (hostname.contains(hname) && !workflow.profile.contains(prof)){ log.error "====================================================\n" + " ${c_red}WARNING!${c_reset} You are running with `-profile $workflow.profile`\n" + " but your machine hostname is ${c_white}'$hostname'${c_reset}\n" + From 9eb39f87ed5a3368d385cd6b1a548ce2ba8e4f45 Mon Sep 17 00:00:00 2001 From: MaxUlysse Date: Tue, 25 Jun 2019 11:49:56 +0200 Subject: [PATCH 046/124] fix: update CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d6573a119e..b0bc96bab2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ #### Other * Fix small typo in central readme of tools for future releases +* Small code polishing + typo fix in the template main.nf file * Switched to yaml.safe_load() to fix PyYAML warning that was thrown because of a possible [exploit](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation) ## v1.6 From ca036a96d6e5a41e207a35b8d319c2c01263dd93 Mon Sep 17 00:00:00 2001 From: Anthony Underwood Date: Tue, 25 Jun 2019 11:11:04 +0100 Subject: [PATCH 047/124] Update Changelog with lint feature to accept edge versions --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d6e3847e01..6def656143 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ * Improved documentation * Fixed bugs in `nf-core lint` * The order of conda channels is now correct, avoiding occasional erroneous errors that packages weren't found ([#207](https://github.com/nf-core/tools/issues/207)) + * Allow edge versions in nf-core pipelines * Add reporting of ignored errored process * As a solution for [#103](https://github.com/nf-core/tools/issues/103)) From e7e36f44dab6d47a0f842f91d08b3810808306b7 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Fri, 28 Jun 2019 09:00:02 +0200 Subject: [PATCH 048/124] Adjust options with @maxulysse's input --- .../{{cookiecutter.name_noslash}}/nextflow.config | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config index f684adc20f..75b9229fac 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config @@ -60,8 +60,8 @@ profiles { // Avoid this error: // WARNING: Your kernel does not support swap limit capabilities or the cgroup is not mounted. Memory limited without swap. -// Thanks to: https://github.com/alesssia/YAMP/wiki/How-to-use-Docker -docker.runOptions = '-u $(id -u):$(id -g)' +// Testing this in nf-core after discussion here https://github.com/nf-core/tools/pull/351, once this is established and works well, nextflow might implement this behavior as new default. +docker.runOptions = '-u \$(id -u):\$(id -g)' // Load igenomes.config if required if(!params.igenomesIgnore){ From 50bbf240bdd8e3b4f671d42730ebba48fc740294 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Fri, 28 Jun 2019 09:02:44 +0200 Subject: [PATCH 049/124] Add Changelog --- CHANGELOG.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 64b8d67356..88c52ca557 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,24 +3,29 @@ ## v1.7dev #### Tools helper code + * The tools `create` command now sets up a TEMPLATE branch for syncing #### Syncing + * Can now sync a targeted pipeline via command-line * Updated Blacklist of synced pipelines * Removed `chipseq` from Blacklist of synced pipelines * Fixed issue [#314](https://github.com/nf-core/tools/issues/314) #### Linting + * If the container slug does not contain the nf-core organisation (for example during development on a fork), linting will raise a warning, and an error with release mode on -#### Other +#### Template + * Fix small typo in central readme of tools for future releases * Small code polishing + typo fix in the template main.nf file * Switched to yaml.safe_load() to fix PyYAML warning that was thrown because of a possible [exploit](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation) * Add `nf-core` citation * Add proper `nf-core` logo for tools * Add `Quick Start` section to main README of template +* Fix [Docker RunOptions](https://github.com/nf-core/tools/pull/351) to get UID and GID set in the template ## v1.6 From 700af5be2548e21261a4596b15d956c2946bd22f Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Fri, 28 Jun 2019 14:50:13 +0200 Subject: [PATCH 050/124] Bump version of Conda in base to 4.6.14 --- CHANGELOG.md | 4 ++++ Dockerfile | 5 ++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 88c52ca557..7377fb51f6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,6 +27,10 @@ * Add `Quick Start` section to main README of template * Fix [Docker RunOptions](https://github.com/nf-core/tools/pull/351) to get UID and GID set in the template +#### Other + +* Bump `conda` to 4.6.14 in base nf-core Dockerfile + ## v1.6 #### Syncing diff --git a/Dockerfile b/Dockerfile index a63e25af58..7e3babe4bb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,6 @@ -FROM continuumio/miniconda:4.5.4 +FROM continuumio/miniconda:4.6.14 LABEL authors="phil.ewels@scilifelab.se,alexander.peltzer@qbic.uni-tuebingen.de" \ description="Docker image containing base requirements for the nfcore pipelines" # Install procps so that Nextflow can poll CPU usage -RUN apt-get update && apt-get install -y procps && apt-get clean -y -RUN conda install conda=4.6.7 +RUN apt-get update && apt-get install -y procps && apt-get clean -y \ No newline at end of file From f247e5b4969c24c728c77b50331b8359607bf3bd Mon Sep 17 00:00:00 2001 From: Olga Botvinnik Date: Tue, 2 Jul 2019 18:04:18 -0700 Subject: [PATCH 051/124] new File --> file MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit As mentioned in https://github.com/nf-core/rnaseq/pull/245, the `pipeline_report.{html,txt}` files get written with `new File` instead of `file` which leads to weird behavior and creating an `s3:/` folder locally if the output folder is on AWS S3: ``` Thu 27 Jun - 09:03  ~/code/nf-core/rnaseq   origin ☊ olgabot/salmon-gencode ✔ 28☀   ll --tree s3: Permissions Size User Date Modified Git Name drwxr-xr-x - olgabot 11 Jun 10:26 -- s3: drwxr-xr-x - olgabot 11 Jun 10:26 -- └── olgabot-maca drwxr-xr-x - olgabot 11 Jun 10:26 -- └── mini-maca drwxr-xr-x - olgabot 11 Jun 10:26 -- └── results drwxr-xr-x - olgabot 11 Jun 10:26 -- └── pipeline_info .rw-r--r-- 12k olgabot 11 Jun 16:40 -- ├── pipeline_report.html .rw-r--r-- 2.7k olgabot 11 Jun 16:40 -- └── pipeline_report.txt ``` This is especially problematic as after the first time the pipeline is run, then the `s3:/` folder is created and any input files get tested against that "folder" and suddenly they "don't exist" because they look like they're on the local filesystem as locally, `s3://` --> `s3:/`, and then pipelines break 😢 --- .../pipeline-template/{{cookiecutter.name_noslash}}/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index a3d90900ad..f4747f6148 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -353,13 +353,13 @@ workflow.onComplete { } // Write summary e-mail HTML to a file - def output_d = new File( "${params.outdir}/pipeline_info/" ) + def output_d = file( "${params.outdir}/pipeline_info/" ) if( !output_d.exists() ) { output_d.mkdirs() } - def output_hf = new File( output_d, "pipeline_report.html" ) + def output_hf = file( output_d, "pipeline_report.html" ) output_hf.withWriter { w -> w << email_html } - def output_tf = new File( output_d, "pipeline_report.txt" ) + def output_tf = file( output_d, "pipeline_report.txt" ) output_tf.withWriter { w -> w << email_txt } c_reset = params.monochrome_logs ? '' : "\033[0m"; From 0bd384d834856ac7fed95c008086d7e05cdb955c Mon Sep 17 00:00:00 2001 From: Olga Botvinnik Date: Wed, 3 Jul 2019 10:46:20 -0700 Subject: [PATCH 052/124] Add comment in changelog --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7377fb51f6..8bed6e5c90 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,9 @@ * Add proper `nf-core` logo for tools * Add `Quick Start` section to main README of template * Fix [Docker RunOptions](https://github.com/nf-core/tools/pull/351) to get UID and GID set in the template +* Use [`file`](https://github.com/nf-core/tools/pull/354) instead of `new File` + to avoid weird behavior such as making an `s3:/` directory locally when using + an AWS S3 bucket as the `--outdir`. #### Other From 3393579a9d31991e5c4c1b0f597c55cc0ff91b48 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Thu, 18 Jul 2019 10:07:47 +0200 Subject: [PATCH 053/124] Add changes for fixing workflow.onComplete() --- .../pipeline-template/{{cookiecutter.name_noslash}}/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index f4747f6148..ea63cc8e61 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -367,10 +367,10 @@ workflow.onComplete { c_green = params.monochrome_logs ? '' : "\033[0;32m"; c_red = params.monochrome_logs ? '' : "\033[0;31m"; - if (workflow.stats.ignoredCountFmt > 0 && workflow.success) { + if (workflow.stats.ignoredCount > 0 && workflow.success) { log.info "${c_purple}Warning, pipeline completed, but with errored process(es) ${c_reset}" - log.info "${c_red}Number of ignored errored process(es) : ${workflow.stats.ignoredCountFmt} ${c_reset}" - log.info "${c_green}Number of successfully ran process(es) : ${workflow.stats.succeedCountFmt} ${c_reset}" + log.info "${c_red}Number of ignored errored process(es) : ${workflow.stats.ignoredCount} ${c_reset}" + log.info "${c_green}Number of successfully ran process(es) : ${workflow.stats.succeedCount} ${c_reset}" } if(workflow.success){ From 1570f20a41c91df1acc5f6c347cc822db35e14c8 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Thu, 18 Jul 2019 10:07:55 +0200 Subject: [PATCH 054/124] Add changes + clean up markdownlint errors --- CHANGELOG.md | 38 +++++++++++++++++++++++++------------- 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8bed6e5c90..77dd96fb49 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,22 +2,22 @@ ## v1.7dev -#### Tools helper code +### Tools helper code * The tools `create` command now sets up a TEMPLATE branch for syncing -#### Syncing +### Syncing * Can now sync a targeted pipeline via command-line * Updated Blacklist of synced pipelines * Removed `chipseq` from Blacklist of synced pipelines * Fixed issue [#314](https://github.com/nf-core/tools/issues/314) -#### Linting +### Linting * If the container slug does not contain the nf-core organisation (for example during development on a fork), linting will raise a warning, and an error with release mode on -#### Template +### Template * Fix small typo in central readme of tools for future releases * Small code polishing + typo fix in the template main.nf file @@ -29,19 +29,22 @@ * Use [`file`](https://github.com/nf-core/tools/pull/354) instead of `new File` to avoid weird behavior such as making an `s3:/` directory locally when using an AWS S3 bucket as the `--outdir`. +* Fix workflow.onComplete() message when finishing pipeline -#### Other +### Other * Bump `conda` to 4.6.14 in base nf-core Dockerfile ## v1.6 -#### Syncing +### Syncing + * Code refactoring to make the script more readable * No travis build failure anymore on sync errors * More verbose logging -#### Template pipeline +### Template pipeline + * awsbatch `work-dir` checking moved to nextflow itself. Removed unsatisfiable check in main.nf template. * Fixed markdown linting * Tools CI testing now runs markdown lint on compiled template pipeline @@ -50,7 +53,8 @@ * Changed `scrape_software_versions.py` to output `.csv` file * Added `export_plots` parameter to multiqc config -#### Tools helper code +### Tools helper code + * Drop [nf-core/rnaseq](https://github.com/nf-core/rnaseq]) from `blacklist.json` to make template sync available * Updated main help command to sort the subcommands in a more logical order * Updated readme to describe the new `nf-core launch` command @@ -70,7 +74,8 @@ ## [v1.5](https://github.com/nf-core/tools/releases/tag/1.5) - 2019-03-13 Iron Shark -#### Template pipeline +### Template pipeline + * Dropped Singularity file * Summary now logs details of the cluster profile used if from [nf-core/configs](https://github.com/nf-core/configs) * Dockerhub is used in favor of Singularity Hub for pulling when using the Singularity profile @@ -84,7 +89,8 @@ * Together with nf-core/configs, helper function now checks hostname and suggests a valid config profile * `awsbatch` executor requires the `tracedir` not to be set to an `s3` bucket. -#### Tools helper code +### Tools helper code + * New `nf-core launch` command to interactively launch nf-core pipelines from command-line * Works with a `parameters.settings.json` file shipped with each pipeline * Discovers additional `params` from the pipeline dynamically @@ -107,12 +113,14 @@ * Now correctly validates version pinning for packages from PyPI * Updates for changes to `process.container` definition -#### Other +### Other + * Bump `conda` to 4.6.7 in base nf-core Dockerfile ## [v1.4](https://github.com/nf-core/tools/releases/tag/1.4) - 2018-12-12 Tantalum Butterfly -#### Template pipeline +### Template pipeline + * Institutional custom config profiles moved to github `nf-core/configs` * These will now be maintained centrally as opposed to being shipped with the pipelines in `conf/` * Load `base.config` by default for all profiles @@ -126,7 +134,8 @@ * Travis: Pull the `dev` tagged docker image for testing * Removed UPPMAX-specific documentation from the template. -#### Tools helper code +### Tools helper code + * Make Travis CI tests fail on pull requests if the `CHANGELOG.md` file hasn't been updated * Minor bugfixing in Python code (eg. removing unused import statements) * Made the web requests caching work on multi-user installations @@ -134,6 +143,7 @@ * Linting: Update for Travis: Pull the `dev` tagged docker image for testing ## [v1.3](https://github.com/nf-core/tools/releases/tag/1.3) - 2018-11-21 + * `nf-core create` command line interface updated * Interactive prompts for required arguments if not given * New flag for workflow author @@ -147,6 +157,7 @@ * Added `pip install --upgrade pip` to `.travis.yml` to update pip in the Travis CI environment ## [v1.2](https://github.com/nf-core/tools/releases/tag/1.2) - 2018-10-01 + * Updated the `nf-core release` command * Now called `nf-core bump-versions` instead * New flag `--nextflow` to change the required nextflow version instead @@ -167,6 +178,7 @@ * New GitHub contributing instructions and pull request template ## [v1.1](https://github.com/nf-core/tools/releases/tag/1.1) - 2018-08-14 + Very large release containing lots of work from the first nf-core hackathon, held in SciLifeLab Stockholm. * The [Cookiecutter template](https://github.com/nf-core/cookiecutter) has been merged into tools From 655dc9b0057377f78761f7b0281ec36ebb8c6a9f Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Thu, 18 Jul 2019 14:34:00 +0200 Subject: [PATCH 055/124] Fix URLs --- CHANGELOG.md | 28 +++++++++++++------ .../.github/CONTRIBUTING.md | 2 +- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d6e3847e01..3c3ff60b7f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,12 +2,14 @@ ## v1.6 -#### Syncing +### Syncing + * Code refactoring to make the script more readable * No travis build failure anymore on sync errors * More verbose logging -#### Template pipeline +### Template pipeline + * awsbatch `work-dir` checking moved to nextflow itself. Removed unsatisfiable check in main.nf template. * Fixed markdown linting * Tools CI testing now runs markdown lint on compiled template pipeline @@ -15,8 +17,10 @@ * Removed Gitter references in `.github/` directories for `tools/` and pipeline template. * Changed `scrape_software_versions.py` to output `.csv` file * Added `export_plots` parameter to multiqc config +* Corrected some typos as listed [here](https://github.com/nf-core/tools/issues/348) to Guidelines + +### Tools helper code -#### Tools helper code * Drop [nf-core/rnaseq](https://github.com/nf-core/rnaseq]) from `blacklist.json` to make template sync available * Updated main help command to sort the subcommands in a more logical order * Updated readme to describe the new `nf-core launch` command @@ -34,7 +38,8 @@ ## [v1.5](https://github.com/nf-core/tools/releases/tag/1.5) - 2019-03-13 Iron Shark -#### Template pipeline +### Template pipeline + * Dropped Singularity file * Summary now logs details of the cluster profile used if from [nf-core/configs](https://github.com/nf-core/configs) * Dockerhub is used in favor of Singularity Hub for pulling when using the Singularity profile @@ -48,7 +53,8 @@ * Together with nf-core/configs, helper function now checks hostname and suggests a valid config profile * `awsbatch` executor requires the `tracedir` not to be set to an `s3` bucket. -#### Tools helper code +### Tools helper code + * New `nf-core launch` command to interactively launch nf-core pipelines from command-line * Works with a `parameters.settings.json` file shipped with each pipeline * Discovers additional `params` from the pipeline dynamically @@ -71,12 +77,14 @@ * Now correctly validates version pinning for packages from PyPI * Updates for changes to `process.container` definition -#### Other +### Other + * Bump `conda` to 4.6.7 in base nf-core Dockerfile ## [v1.4](https://github.com/nf-core/tools/releases/tag/1.4) - 2018-12-12 Tantalum Butterfly -#### Template pipeline +### Template pipeline + * Institutional custom config profiles moved to github `nf-core/configs` * These will now be maintained centrally as opposed to being shipped with the pipelines in `conf/` * Load `base.config` by default for all profiles @@ -90,7 +98,8 @@ * Travis: Pull the `dev` tagged docker image for testing * Removed UPPMAX-specific documentation from the template. -#### Tools helper code +### Tools helper code + * Make Travis CI tests fail on pull requests if the `CHANGELOG.md` file hasn't been updated * Minor bugfixing in Python code (eg. removing unused import statements) * Made the web requests caching work on multi-user installations @@ -98,6 +107,7 @@ * Linting: Update for Travis: Pull the `dev` tagged docker image for testing ## [v1.3](https://github.com/nf-core/tools/releases/tag/1.3) - 2018-11-21 + * `nf-core create` command line interface updated * Interactive prompts for required arguments if not given * New flag for workflow author @@ -111,6 +121,7 @@ * Added `pip install --upgrade pip` to `.travis.yml` to update pip in the Travis CI environment ## [v1.2](https://github.com/nf-core/tools/releases/tag/1.2) - 2018-10-01 + * Updated the `nf-core release` command * Now called `nf-core bump-versions` instead * New flag `--nextflow` to change the required nextflow version instead @@ -131,6 +142,7 @@ * New GitHub contributing instructions and pull request template ## [v1.1](https://github.com/nf-core/tools/releases/tag/1.1) - 2018-08-14 + Very large release containing lots of work from the first nf-core hackathon, held in SciLifeLab Stockholm. * The [Cookiecutter template](https://github.com/nf-core/cookiecutter) has been merged into tools diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md index b5fbadfd4d..3202cbe6a3 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md @@ -32,7 +32,7 @@ Typically, pull-requests are only fully reviewed when these tests are passing, t There are typically two types of tests that run: ### Lint Tests -The nf-core has a [set of guidelines](http://nf-co.re/guidelines) which all pipelines must adhere to. +The nf-core has a [set of guidelines](https://nf-co.re/developers/guidelines) which all pipelines must adhere to. To enforce these and ensure that all pipelines stay in sync, we have developed a helper tool which runs checks on the pipeline code. This is in the [nf-core/tools repository](https://github.com/nf-core/tools) and once installed can be run locally with the `nf-core lint ` command. If any failures or warnings are encountered, please follow the listed URL for more documentation. From c17ef0a0fd115b0eca1ff84e4e95afe9e7b0890b Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 30 Jul 2019 16:06:56 +0200 Subject: [PATCH 056/124] Replace URL for joining slack --- .github/CONTRIBUTING.md | 4 ++-- .../{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md | 4 ++-- .../{{cookiecutter.name_noslash}}/CODE_OF_CONDUCT.md | 2 +- .../{{cookiecutter.name_noslash}}/docs/usage.md | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 936e7a876f..f2e172926e 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -6,7 +6,7 @@ We try to manage the required tasks for nf-core/tools using GitHub issues, you p However, don't be put off by this template - other more general issues and suggestions are welcome! Contributions to the code are even more welcome ;) -> If you need help using or developing nf-core/tools then the best place to ask is the nf-core `tools` channel on [Slack](https://nf-core-invite.herokuapp.com/). +> If you need help using or developing nf-core/tools then the best place to ask is the nf-core `tools` channel on [Slack](https://nf-co.re/join/slack/). ## Contribution workflow If you'd like to write some code for nf-core/tools, the standard workflow @@ -68,4 +68,4 @@ nf-core lint nf-core-testpipeline ``` ## Getting help -For further information/help, please consult the [nf-core/tools documentation](https://github.com/nf-core/tools#documentation) and don't hesitate to get in touch on the nf-core `tools` channel on [Slack](https://nf-core-invite.herokuapp.com/). +For further information/help, please consult the [nf-core/tools documentation](https://github.com/nf-core/tools#documentation) and don't hesitate to get in touch on the nf-core `tools` channel on [Slack](https://nf-co.re/join/slack/). diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md index 3202cbe6a3..12d63197ee 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md @@ -6,7 +6,7 @@ We try to manage the required tasks for {{ cookiecutter.name }} using GitHub iss However, don't be put off by this template - other more general issues and suggestions are welcome! Contributions to the code are even more welcome ;) -> If you need help using or modifying {{ cookiecutter.name }} then the best place to ask is on the pipeline channel on [Slack](https://nf-core-invite.herokuapp.com/). +> If you need help using or modifying {{ cookiecutter.name }} then the best place to ask is on the pipeline channel on [Slack](https://nf-co.re/join/slack/). @@ -44,4 +44,4 @@ If there are any failures then the automated tests fail. These tests are run both with the latest available version of Nextflow and also the minimum required version that is stated in the pipeline code. ## Getting help -For further information/help, please consult the [{{ cookiecutter.name }} documentation](https://github.com/{{ cookiecutter.name }}#documentation) and don't hesitate to get in touch on the pipeline channel on [Slack](https://nf-core-invite.herokuapp.com/). +For further information/help, please consult the [{{ cookiecutter.name }} documentation](https://github.com/{{ cookiecutter.name }}#documentation) and don't hesitate to get in touch on the pipeline channel on [Slack](https://nf-co.re/join/slack/). diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CODE_OF_CONDUCT.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CODE_OF_CONDUCT.md index 09226d0d8d..1cda760094 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CODE_OF_CONDUCT.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/CODE_OF_CONDUCT.md @@ -34,7 +34,7 @@ This Code of Conduct applies both within project spaces and in public spaces whe ## Enforcement -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team on [Slack](https://nf-core-invite.herokuapp.com/). The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team on [Slack](https://nf-co.re/join/slack/). The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md index 36f8fe598c..abf63de0f4 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md @@ -190,7 +190,7 @@ Wherever process-specific requirements are set in the pipeline, the default valu If you are likely to be running `nf-core` pipelines regularly it may be a good idea to request that your custom config file is uploaded to the `nf-core/configs` git repository. Before you do this please can you test that the config file works with your pipeline of choice using the `-c` parameter (see definition below). You can then create a pull request to the `nf-core/configs` repository with the addition of your config file, associated documentation file (see examples in [`nf-core/configs/docs`](https://github.com/nf-core/configs/tree/master/docs)), and amending [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config) to include your custom profile. -If you have any questions or issues please send us a message on [Slack](https://nf-core-invite.herokuapp.com/). +If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack/). ## AWS Batch specific parameters Running the pipeline on AWS Batch requires a couple of specific parameters to be set according to your AWS Batch configuration. Please use the `-awsbatch` profile and then specify all of the following parameters. From 2a1acb72cf8153e586b2c2c07bd46b3b0acd8cb8 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 30 Jul 2019 16:09:32 +0200 Subject: [PATCH 057/124] Changelog update --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5835ee3c06..704bae2040 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ to avoid weird behavior such as making an `s3:/` directory locally when using an AWS S3 bucket as the `--outdir`. * Fix workflow.onComplete() message when finishing pipeline +* Update URL for joining the nf-core slack to https://nf-co.re/join/slack ### Other From 6fa27c18118a6b7a5411b98b98b0ca2c478a0d1a Mon Sep 17 00:00:00 2001 From: MaxUlysse Date: Thu, 1 Aug 2019 11:46:34 +0200 Subject: [PATCH 058/124] feat: add direct link to the pipeline slack channel --- .../{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md index 12d63197ee..937aed8212 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.github/CONTRIBUTING.md @@ -44,4 +44,4 @@ If there are any failures then the automated tests fail. These tests are run both with the latest available version of Nextflow and also the minimum required version that is stated in the pipeline code. ## Getting help -For further information/help, please consult the [{{ cookiecutter.name }} documentation](https://github.com/{{ cookiecutter.name }}#documentation) and don't hesitate to get in touch on the pipeline channel on [Slack](https://nf-co.re/join/slack/). +For further information/help, please consult the [{{ cookiecutter.name }} documentation](https://github.com/{{ cookiecutter.name }}#documentation) and don't hesitate to get in touch on the [{{ cookiecutter.name }} pipeline channel](https://nfcore.slack.com/channels/{{ cookiecutter.name }}) on [Slack](https://nf-co.re/join/slack/). From af782db3b5952ee0c1dee3341343a9dedeea170e Mon Sep 17 00:00:00 2001 From: MaxUlysse Date: Thu, 1 Aug 2019 11:47:57 +0200 Subject: [PATCH 059/124] feat: add contributions and support heading with links to contibutions guidelines and link to the pipeline slack channel --- .../{{cookiecutter.name_noslash}}/README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index c5bf9c4522..dad0a49d7e 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -52,6 +52,12 @@ The {{ cookiecutter.name }} pipeline comes with documentation about the pipeline {{ cookiecutter.name }} was originally written by {{ cookiecutter.author }}. +## Contributions and Support + +If you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md). + +For further information or help, don't hesitate to get in touch on [Slack](https://nfcore.slack.com/channels/{{ cookiecutter.name }}) (you can join with [this invite](https://nf-co.re/join/slack)). + ## Citation From 76273fe1dd26b0016e4c0030dce1af22b5b74262 Mon Sep 17 00:00:00 2001 From: MaxUlysse Date: Thu, 1 Aug 2019 11:51:01 +0200 Subject: [PATCH 060/124] feat: update CHANGELOG --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 704bae2040..6fefe8bf9f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -31,6 +31,8 @@ an AWS S3 bucket as the `--outdir`. * Fix workflow.onComplete() message when finishing pipeline * Update URL for joining the nf-core slack to https://nf-co.re/join/slack +* Add direct link to the pipeline slack channel in the contribution guidelines +* Add contributions and support heading with links to contribution guidelines and link to the pipeline slack channel in the main README ### Other From c34b04bf8039de39c3fd8a41a75cdaf2dbfc90ae Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Mon, 19 Aug 2019 14:32:55 +0200 Subject: [PATCH 061/124] Renders readme properly on Pypi --- CHANGELOG.md | 4 ++++ setup.py | 5 +---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 704bae2040..807b6ef9a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## v1.7dev +### Pypi package description + +* The readme should now be rendered properly on Pypi. + ### Tools helper code * The tools `create` command now sets up a TEMPLATE branch for syncing diff --git a/setup.py b/setup.py index b616ae51d2..e9490e3179 100644 --- a/setup.py +++ b/setup.py @@ -8,9 +8,6 @@ with open('README.md') as f: readme = f.read() -with open('LICENSE') as f: - license = f.read() - setup( name = 'nf-core', version = version, @@ -21,7 +18,7 @@ author = 'Phil Ewels', author_email = 'phil.ewels@scilifelab.se', url = 'https://github.com/nf-core/tools', - license = license, + license = 'MIT', scripts = ['scripts/nf-core'], install_requires = [ 'cookiecutter', From 37e4be33dcdbc06956b820938807f3f4f392dc1e Mon Sep 17 00:00:00 2001 From: Lukas Heumos Date: Thu, 22 Aug 2019 14:48:41 +0200 Subject: [PATCH 062/124] [FIX] Missing r-rmarkdown dependency Fixes ``` ERROR ~ Error executing process > 'output_documentation (1)' Caused by: Process `output_documentation (1)` terminated with an error exit status (127) Command executed: markdown_to_html.r output.md results_description.html Command exit status: 127 Command output: (empty) Command error: /usr/bin/env: 'Rscript': No such file or directory ``` --- .../{{cookiecutter.name_noslash}}/environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml index 3d9eadf736..883d476964 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml @@ -9,3 +9,4 @@ dependencies: # TODO nf-core: Add required software dependencies here - bioconda::fastqc=0.11.8 - bioconda::multiqc=1.7 + - conda-forge::r-markdown=0.9 From e38f1580c71450616a3992865665547e823c98e7 Mon Sep 17 00:00:00 2001 From: Lukas Heumos Date: Thu, 22 Aug 2019 15:12:50 +0200 Subject: [PATCH 063/124] [FEATURE] Added R-base + r-markdown 0.9 -> 1.1 --- .../{{cookiecutter.name_noslash}}/environment.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml index 883d476964..ea7b78288b 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml @@ -8,5 +8,6 @@ channels: dependencies: # TODO nf-core: Add required software dependencies here - bioconda::fastqc=0.11.8 - - bioconda::multiqc=1.7 - - conda-forge::r-markdown=0.9 + - bioconda::multiqc=1. + - conda-forge::r-markdown=1.1 + - conda-forge::r-base=3.6.1 From d504ad6c2bb1dbe2e37dd9097843d90820f95859 Mon Sep 17 00:00:00 2001 From: Lukas Heumos Date: Thu, 22 Aug 2019 15:13:30 +0200 Subject: [PATCH 064/124] [FIX] Accidently removed correct version of multiqc --- .../{{cookiecutter.name_noslash}}/environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml index ea7b78288b..537942ae77 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/environment.yml @@ -8,6 +8,6 @@ channels: dependencies: # TODO nf-core: Add required software dependencies here - bioconda::fastqc=0.11.8 - - bioconda::multiqc=1. + - bioconda::multiqc=1.7 - conda-forge::r-markdown=1.1 - conda-forge::r-base=3.6.1 From 6af4f8aaf96d210406bc51c8dd39e14102bd9fc4 Mon Sep 17 00:00:00 2001 From: Lukas Heumos Date: Sat, 24 Aug 2019 15:41:51 +0200 Subject: [PATCH 065/124] [CHANGELOG] Added r-base and r-markdown --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 807b6ef9a8..ca45f5124a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,7 @@ an AWS S3 bucket as the `--outdir`. * Fix workflow.onComplete() message when finishing pipeline * Update URL for joining the nf-core slack to https://nf-co.re/join/slack +* Added conda-forge::r-markdown=1.1 and conda-forge::r-base=3.6.1 to environment ### Other From 191dd74e6949c43efb8aa6d27ab30409997b0314 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Wed, 18 Sep 2019 19:12:59 +0200 Subject: [PATCH 066/124] Fixing Parameter issues --- CHANGELOG.md | 1 + nf_core/workflow/parameters.py | 2 +- tests/workflow/example.json | 13 ++++++++----- tests/workflow/test_parameters.py | 4 ++-- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 807b6ef9a8..76a256fa06 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,7 @@ an AWS S3 bucket as the `--outdir`. * Fix workflow.onComplete() message when finishing pipeline * Update URL for joining the nf-core slack to https://nf-co.re/join/slack +* Fix Parameters JSON due to new versionized structure ### Other diff --git a/nf_core/workflow/parameters.py b/nf_core/workflow/parameters.py index 5dbe87dfcb..90b273306e 100644 --- a/nf_core/workflow/parameters.py +++ b/nf_core/workflow/parameters.py @@ -10,7 +10,7 @@ import nf_core.workflow.validation as vld -NFCORE_PARAMS_SCHEMA_URI = "https://nf-co.re/parameters.schema.json" +NFCORE_PARAMS_SCHEMA_URI = "https://nf-co.re/params/0.1.0dev/parameters.schema.json" class Parameters: """Contains a static factory method diff --git a/tests/workflow/example.json b/tests/workflow/example.json index 2a6b587158..003b4cfc6e 100644 --- a/tests/workflow/example.json +++ b/tests/workflow/example.json @@ -8,7 +8,8 @@ "render": "file", "default_value": "path/to/reads.fastq.gz", "pattern": ".*(\\.fastq$|\\.fastq\\.gz$)", - "group": "inputdata" + "group": "inputdata", + "required": false }, { "name": "index", @@ -18,7 +19,8 @@ "render": "file", "default_value": "path/to/index", "pattern": ".*", - "group": "inputdata" + "group": "inputdata", + "required": false }, { "name": "norm_factor", @@ -26,9 +28,10 @@ "usage": "Integer value that will be applied against input reads.", "type": "integer", "render": "range", - "choices": [1, 150], - "default_value": 1, - "group": "normalization" + "choices": ["1", "150"], + "default_value": "1", + "group": "normalization", + "required": false } ] } \ No newline at end of file diff --git a/tests/workflow/test_parameters.py b/tests/workflow/test_parameters.py index d374c1256d..dbb3af8519 100644 --- a/tests/workflow/test_parameters.py +++ b/tests/workflow/test_parameters.py @@ -13,7 +13,7 @@ WD = os.path.dirname(__file__) PATH_WORKING_EXAMPLE = os.path.join(WD, 'example.json') -SCHEMA_URI = "https://nf-co.re/parameters.schema.json" +SCHEMA_URI = "https://nf-co.re/params/0.1.0dev/parameters.schema.json" @pytest.fixture(scope="class") def schema(): @@ -69,6 +69,6 @@ def test_validation(schema): def test_validation_with_success(schema): """Tests the parameter objects against the JSON schema.""" parameter = pms.Parameter.builder().name("width").param_type("integer") \ - .default(2).label("The width of a table.").render("textfield").required(False).build() + .default("2").label("The width of a table.").render("range").required(False).build() params_in_json = pms.Parameters.in_full_json([parameter]) jsonschema.validate(json.loads(pms.Parameters.in_full_json([parameter])), json.loads(schema)) From 10a2c71e25e55103ee260d7f17ab08422afde1cc Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Wed, 18 Sep 2019 19:16:04 +0200 Subject: [PATCH 067/124] Create Dev too please --- CHANGELOG.md | 2 +- nf_core/create.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 807b6ef9a8..f202c65cee 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,7 @@ ### Tools helper code -* The tools `create` command now sets up a TEMPLATE branch for syncing +* The tools `create` command now sets up a `TEMPLATE` and a `dev` branch for syncing ### Syncing diff --git a/nf_core/create.py b/nf_core/create.py index c1991bb24b..323181f072 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -104,5 +104,6 @@ def git_init_pipeline(self): repo.index.commit("initial template build from nf-core/tools, version {}".format(nf_core.__version__)) #Add TEMPLATE branch to git repository repo.git.branch('TEMPLATE') + repo.git.branch('dev') logging.info("Done. Remember to add a remote and push to GitHub:\n cd {}\n git remote add origin git@github.com:USERNAME/REPO_NAME.git\n git push --all origin".format(self.outdir)) - logging.info("This will also push your newly created TEMPLATE branch for syncing.") + logging.info("This will also push your newly created dev branch and the TEMPLATE branch for syncing.") \ No newline at end of file From 70b28a66210f16847ee685b7c7259cefe572732c Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Thu, 19 Sep 2019 14:41:27 +0200 Subject: [PATCH 068/124] Changes nf-core cache dir to user's home --- nf_core/utils.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 227bc903ed..ffb1397f6d 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -75,8 +75,9 @@ def setup_requests_cachedir(): """ # Only import it if we need it import requests_cache + - cachedir = os.path.join(tempfile.gettempdir(), 'nfcore_cache') + cachedir = os.path.join(os.getenv("HOME"), '.nfcore_cache') if not os.path.exists(cachedir): os.mkdir(cachedir) requests_cache.install_cache( @@ -84,6 +85,3 @@ def setup_requests_cachedir(): expire_after=datetime.timedelta(hours=1), backend='sqlite', ) - # Make world-writeable so that multi-user installations work - os.chmod(cachedir, 0o777) - os.chmod(os.path.join(cachedir, 'nfcore_cache.sqlite'), 0o777) From bd281142e3fddf275b59f2bee608282e0160813f Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Thu, 19 Sep 2019 14:55:35 +0200 Subject: [PATCH 069/124] Updates changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 704bae2040..216057fff6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### Tools helper code * The tools `create` command now sets up a TEMPLATE branch for syncing +* Fixed issue [379](https://github.com/nf-core/tools/issues/379) ### Syncing From 5cc8e99a79d76b142b3a455350602e279b7d5c18 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Thu, 19 Sep 2019 17:43:34 +0200 Subject: [PATCH 070/124] Fix markdownlint --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f202c65cee..633b2500b3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,7 @@ ### Tools helper code -* The tools `create` command now sets up a `TEMPLATE` and a `dev` branch for syncing +* The tools `create` command now sets up a `TEMPLATE` and a `dev` branch for syncing ### Syncing From f71f037708eb4ca573abf3ecad37dab7d37aeea6 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Thu, 19 Sep 2019 18:27:20 +0200 Subject: [PATCH 071/124] Adding in more time (#378) * Adding in more time * Fix markdownlint * Fix stuff * Remove whitespace --- CHANGELOG.md | 1 + .../{{cookiecutter.name_noslash}}/conf/base.config | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f43c611d2f..a920c8f0d0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,7 @@ an AWS S3 bucket as the `--outdir`. * Fix workflow.onComplete() message when finishing pipeline * Update URL for joining the nf-core slack to https://nf-co.re/join/slack +* [Increased default time limit](https://github.com/nf-core/tools/issues/370) to 4h * Add direct link to the pipeline slack channel in the contribution guidelines * Add contributions and support heading with links to contribution guidelines and link to the pipeline slack channel in the main README * Fix Parameters JSON due to new versionized structure diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config index 2487563553..47f3e530c7 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config @@ -14,7 +14,7 @@ process { // TODO nf-core: Check the defaults for all processes cpus = { check_max( 1 * task.attempt, 'cpus' ) } memory = { check_max( 8.GB * task.attempt, 'memory' ) } - time = { check_max( 2.h * task.attempt, 'time' ) } + time = { check_max( 4.h * task.attempt, 'time' ) } errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' } maxRetries = 1 From 097398a536d52c886f6d0931470ceaf983d0f3c3 Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Fri, 20 Sep 2019 11:43:01 +0200 Subject: [PATCH 072/124] Updates caching method docs --- nf_core/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index ffb1397f6d..37a9d19cb5 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -70,8 +70,8 @@ def fetch_wf_config(wf_path, wf=None): def setup_requests_cachedir(): """Sets up local caching for faster remote HTTP requests. - Caching directory will be generated by tempfile.gettempdir() under - a nfcore_cache subdir. + Caching directory will be set up in the user's home directory under + a .nfcore_cache subdir. """ # Only import it if we need it import requests_cache From f9fffc8da9f1c86757b4d2f8bb6264e5487b9c0f Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Fri, 20 Sep 2019 11:45:24 +0200 Subject: [PATCH 073/124] Removes unused import statement --- nf_core/utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 37a9d19cb5..0b1eec5bc9 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -8,7 +8,6 @@ import logging import os import subprocess -import tempfile def fetch_wf_config(wf_path, wf=None): """Uses Nextflow to retrieve the the configuration variables From 9b4a77319470902aaefe4e69bcf5bdce263beed3 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Fri, 20 Sep 2019 13:03:21 +0200 Subject: [PATCH 074/124] chore: Add Code of Conduct for Tools (#385) * chore: Add Code of Conduct for Tools This adds a code of conduct for the nf-core/tools project to address #384 Closes #384 * Add small changelog --- CHANGELOG.md | 1 + nf_core/CODE_OF_CONDUCT.md | 46 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) create mode 100644 nf_core/CODE_OF_CONDUCT.md diff --git a/CHANGELOG.md b/CHANGELOG.md index a920c8f0d0..c783ad634d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,6 +44,7 @@ ### Other * Bump `conda` to 4.6.14 in base nf-core Dockerfile +* Added a Code of Conduct to nf-core/tools ## v1.6 diff --git a/nf_core/CODE_OF_CONDUCT.md b/nf_core/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..1cda760094 --- /dev/null +++ b/nf_core/CODE_OF_CONDUCT.md @@ -0,0 +1,46 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team on [Slack](https://nf-co.re/join/slack/). The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ From 86e0bde0dfd478317134790c6a8938cb1261fa0f Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Fri, 20 Sep 2019 14:14:53 +0200 Subject: [PATCH 075/124] Wrong directory for code of conduct (#386) * chore: Add Code of Conduct for Tools This adds a code of conduct for the nf-core/tools project to address #384 Closes #384 * Add small changelog * Shift this to the root directory --- CHANGELOG.md | 2 +- nf_core/CODE_OF_CONDUCT.md => CODE_OF_CONDUCT.md | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename nf_core/CODE_OF_CONDUCT.md => CODE_OF_CONDUCT.md (100%) diff --git a/CHANGELOG.md b/CHANGELOG.md index c783ad634d..e5f48e35fe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,7 +44,7 @@ ### Other * Bump `conda` to 4.6.14 in base nf-core Dockerfile -* Added a Code of Conduct to nf-core/tools +* Added a Code of Conduct to nf-core/tools, as only the template had this before ## v1.6 diff --git a/nf_core/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md similarity index 100% rename from nf_core/CODE_OF_CONDUCT.md rename to CODE_OF_CONDUCT.md From 1a95fdd0402fdb54df8965ca7175fe3b3adf10de Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Fri, 20 Sep 2019 14:50:16 +0200 Subject: [PATCH 076/124] Refactors cache path --- nf_core/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 0b1eec5bc9..6993e2f927 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -76,11 +76,11 @@ def setup_requests_cachedir(): import requests_cache - cachedir = os.path.join(os.getenv("HOME"), '.nfcore_cache') + cachedir = os.path.join(os.getenv("HOME"), os.path.join('.nfcore', 'cache')) if not os.path.exists(cachedir): - os.mkdir(cachedir) + os.mkdirs(cachedir) requests_cache.install_cache( - os.path.join(cachedir, 'nfcore_cache'), + os.path.join(cachedir, 'github_info'), expire_after=datetime.timedelta(hours=1), backend='sqlite', ) From cb39811496cad52e1eea6ba4380929531f0ad9c6 Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Fri, 20 Sep 2019 14:54:46 +0200 Subject: [PATCH 077/124] Refactors function call --- nf_core/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/utils.py b/nf_core/utils.py index 6993e2f927..a5ef17f7ab 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -78,7 +78,7 @@ def setup_requests_cachedir(): cachedir = os.path.join(os.getenv("HOME"), os.path.join('.nfcore', 'cache')) if not os.path.exists(cachedir): - os.mkdirs(cachedir) + os.makedirs(cachedir) requests_cache.install_cache( os.path.join(cachedir, 'github_info'), expire_after=datetime.timedelta(hours=1), From 3d90c74c15a96d265fd5b5e9fd62e68ce65f8a56 Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Sat, 21 Sep 2019 19:09:10 +0200 Subject: [PATCH 078/124] Introduces stable parameter schema reference --- README.md | 2 +- nf_core/workflow/parameters.py | 2 +- tests/workflow/test_parameters.py | 2 +- tests/workflow/test_validator.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 190da47a11..4403afcce3 100644 --- a/README.md +++ b/README.md @@ -143,7 +143,7 @@ Some nextflow pipelines have a considerable number of command line flags that ca To help with this, the `nf-core launch` command uses an interactive command-line wizard tool to prompt you for values for running nextflow and the pipeline parameters. -If the pipeline in question has a `parameters.settings.json` file following the [nf-core parameter JSON schema](https://nf-co.re/parameters.schema.json), parameters will be grouped and have associated description text and variable typing. +If the pipeline in question has a `parameters.settings.json` file following the [nf-core parameter JSON schema](https://nf-co.re/parameter-schema), parameters will be grouped and have associated description text and variable typing. Nextflow `params` variables are saved in to a JSON file called `nfx-params.json` and used by nextflow with the `-params-file` flag. This makes it easier to reuse these in the future. diff --git a/nf_core/workflow/parameters.py b/nf_core/workflow/parameters.py index 90b273306e..ee6434ba02 100644 --- a/nf_core/workflow/parameters.py +++ b/nf_core/workflow/parameters.py @@ -10,7 +10,7 @@ import nf_core.workflow.validation as vld -NFCORE_PARAMS_SCHEMA_URI = "https://nf-co.re/params/0.1.0dev/parameters.schema.json" +NFCORE_PARAMS_SCHEMA_URI = "https://nf-co.re/params/0.1.0/parameters.schema.json" class Parameters: """Contains a static factory method diff --git a/tests/workflow/test_parameters.py b/tests/workflow/test_parameters.py index dbb3af8519..8be8b35f11 100644 --- a/tests/workflow/test_parameters.py +++ b/tests/workflow/test_parameters.py @@ -13,7 +13,7 @@ WD = os.path.dirname(__file__) PATH_WORKING_EXAMPLE = os.path.join(WD, 'example.json') -SCHEMA_URI = "https://nf-co.re/params/0.1.0dev/parameters.schema.json" +SCHEMA_URI = "https://nf-co.re/params/0.1.0/parameters.schema.json" @pytest.fixture(scope="class") def schema(): diff --git a/tests/workflow/test_validator.py b/tests/workflow/test_validator.py index f7c2e6cefa..4531068aae 100644 --- a/tests/workflow/test_validator.py +++ b/tests/workflow/test_validator.py @@ -12,7 +12,7 @@ WD = os.path.dirname(__file__) PATH_WORKING_EXAMPLE = os.path.join(WD, 'example.json') -SCHEMA_URI = "https://nf-co.re/parameters.schema.json" +SCHEMA_URI = "https://nf-co.re/params/0.1.0/parameters.schema.json" @pytest.fixture(scope="class") def valid_integer_param(): From 11f15400915a5cde9a03522a1f47facb5f4c0d8a Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Sat, 21 Sep 2019 19:18:35 +0200 Subject: [PATCH 079/124] Update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f8f730c132..b74c81d810 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ * The tools `create` command now sets up a `TEMPLATE` and a `dev` branch for syncing * Fixed issue [379](https://github.com/nf-core/tools/issues/379) +* nf-core launch now uses stable parameter schema version 0.1.0 ### Syncing From 219957eff33c18ddd72f78cea764bfda07dbce62 Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Sat, 21 Sep 2019 19:25:12 +0200 Subject: [PATCH 080/124] Corrects schema URL --- nf_core/workflow/parameters.py | 2 +- tests/workflow/test_parameters.py | 2 +- tests/workflow/test_validator.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/workflow/parameters.py b/nf_core/workflow/parameters.py index ee6434ba02..424983e8cc 100644 --- a/nf_core/workflow/parameters.py +++ b/nf_core/workflow/parameters.py @@ -10,7 +10,7 @@ import nf_core.workflow.validation as vld -NFCORE_PARAMS_SCHEMA_URI = "https://nf-co.re/params/0.1.0/parameters.schema.json" +NFCORE_PARAMS_SCHEMA_URI = "https://nf-co.re/parameter-schema/0.1.0/parameters.schema.json" class Parameters: """Contains a static factory method diff --git a/tests/workflow/test_parameters.py b/tests/workflow/test_parameters.py index 8be8b35f11..ef6812a9de 100644 --- a/tests/workflow/test_parameters.py +++ b/tests/workflow/test_parameters.py @@ -13,7 +13,7 @@ WD = os.path.dirname(__file__) PATH_WORKING_EXAMPLE = os.path.join(WD, 'example.json') -SCHEMA_URI = "https://nf-co.re/params/0.1.0/parameters.schema.json" +SCHEMA_URI = "https://nf-co.re/parameter-schema/0.1.0/parameters.schema.json" @pytest.fixture(scope="class") def schema(): diff --git a/tests/workflow/test_validator.py b/tests/workflow/test_validator.py index 4531068aae..07192125f8 100644 --- a/tests/workflow/test_validator.py +++ b/tests/workflow/test_validator.py @@ -12,7 +12,7 @@ WD = os.path.dirname(__file__) PATH_WORKING_EXAMPLE = os.path.join(WD, 'example.json') -SCHEMA_URI = "https://nf-co.re/params/0.1.0/parameters.schema.json" +SCHEMA_URI = "https://nf-co.re/parameter-schema/0.1.0/parameters.schema.json" @pytest.fixture(scope="class") def valid_integer_param(): From 78e0c75ed29e33ddaeda1d9d69d0d19008f58749 Mon Sep 17 00:00:00 2001 From: Sven Fillinger Date: Sat, 21 Sep 2019 19:29:27 +0200 Subject: [PATCH 081/124] Removes trailing space --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b74c81d810..b8a0c1d0da 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,7 +10,7 @@ * The tools `create` command now sets up a `TEMPLATE` and a `dev` branch for syncing * Fixed issue [379](https://github.com/nf-core/tools/issues/379) -* nf-core launch now uses stable parameter schema version 0.1.0 +* nf-core launch now uses stable parameter schema version 0.1.0 ### Syncing From d09930f70cd4d6ee5788d2cf81f90744b0f65adb Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Tue, 24 Sep 2019 16:12:10 +0200 Subject: [PATCH 082/124] Add Patch to enable PRs coming from patch branch --- CHANGELOG.md | 2 ++ docs/lint_errors.md | 2 +- nf_core/lint.py | 8 ++++---- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e48b39b09c..59ab5af3e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ * The tools `create` command now sets up a `TEMPLATE` and a `dev` branch for syncing * Fixed issue [379](https://github.com/nf-core/tools/issues/379) * nf-core launch now uses stable parameter schema version 0.1.0 +* Check that PR from patch or dev branch is acceptable by linting ### Syncing @@ -25,6 +26,7 @@ ### Template +* Add new code for Travis CI to allow PRs from patch branches too * Fix small typo in central readme of tools for future releases * Small code polishing + typo fix in the template main.nf file * Switched to yaml.safe_load() to fix PyYAML warning that was thrown because of a possible [exploit](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation) diff --git a/docs/lint_errors.md b/docs/lint_errors.md index 7139c8b3a2..1e9cdc55f3 100644 --- a/docs/lint_errors.md +++ b/docs/lint_errors.md @@ -127,7 +127,7 @@ This test fails if the following happens: ```yaml before_install: - - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])' + - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && ([ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ] || [ $TRAVIS_PULL_REQUEST_BRANCH = "patch" ]))' ``` ## Error #6 - Repository `README.md` tests ## {#6} diff --git a/nf_core/lint.py b/nf_core/lint.py index ba0765d9bd..51fe117683 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -454,14 +454,14 @@ def check_ci_config(self): if os.path.isfile(fn): with open(fn, 'r') as fh: ciconf = yaml.safe_load(fh) - # Check that we have the master branch protection - travisMasterCheck = '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])' + # Check that we have the master branch protection, but allow patch as well + travisMasterCheck = '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && ([ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ] || [ $TRAVIS_PULL_REQUEST_BRANCH = "patch" ]))' try: assert(travisMasterCheck in ciconf.get('before_install', {})) except AssertionError: - self.failed.append((5, "Continuous integration must check for master branch PRs: '{}'".format(fn))) + self.failed.append((5, "Continuous integration must check for master/patch branch PRs: '{}'".format(fn))) else: - self.passed.append((5, "Continuous integration checks for master branch PRs: '{}'".format(fn))) + self.passed.append((5, "Continuous integration checks for master/patch branch PRs: '{}'".format(fn))) # Check that the nf-core linting runs try: assert('nf-core lint ${TRAVIS_BUILD_DIR}' in ciconf['script']) From 1843f99a991fecb5d6011bee8032145fafe9658f Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Tue, 24 Sep 2019 16:32:46 +0200 Subject: [PATCH 083/124] Adjusting testing --- tests/lint_examples/minimal_working_example/.travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/lint_examples/minimal_working_example/.travis.yml b/tests/lint_examples/minimal_working_example/.travis.yml index 7b04d4d0ad..85b43c60d8 100644 --- a/tests/lint_examples/minimal_working_example/.travis.yml +++ b/tests/lint_examples/minimal_working_example/.travis.yml @@ -11,7 +11,7 @@ matrix: before_install: # PRs to master are only ok if coming from dev branch - - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])' + - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && ([ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ] || [ $TRAVIS_PULL_REQUEST_BRANCH = "patch" ]))' # Pull the docker image first so the test doesn't wait for this - docker pull nfcore/tools:dev # Fake the tag locally so that the pipeline runs properly From 4271ca30aa339b89093ccd1921f6f9939b16b782 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Tue, 24 Sep 2019 16:38:02 +0200 Subject: [PATCH 084/124] Add to template to have tests pass :+1: --- .../pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml index 8eb1fdd31f..31c5205eb3 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/.travis.yml @@ -9,7 +9,7 @@ matrix: before_install: # PRs to master are only ok if coming from dev branch - - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])' + - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && ([ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ] || [ $TRAVIS_PULL_REQUEST_BRANCH = "patch" ]))' # Pull the docker image first so the test doesn't wait for this - docker pull {{ cookiecutter.name_docker }}:dev # Fake the tag locally so that the pipeline runs properly From aac8e8b265dd1ba3be75d3c1d113be18f491b13b Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 26 Sep 2019 15:39:53 +0200 Subject: [PATCH 085/124] Travis: Test Python 3.7 --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index b3894a6c64..8cc70c7355 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,7 @@ python: - '2.7' - '3.5' - '3.6' + - '3.7' before_install: # PRs to master are only ok if coming from dev branch - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])' From 9229218af02413179058c448b518918458bc3b8e Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 26 Sep 2019 16:05:56 +0200 Subject: [PATCH 086/124] Don't try to push built docs if on a branch --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8cc70c7355..e3e0e50a5c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -41,7 +41,7 @@ after_success: jobs: include: - stage: docs generation - if: (branch = "master" OR branch = "dev") AND type = push + if: (branch = "master" OR branch = "dev") AND type = push AND repo = nf-core/tools script: bash ./bin/push.sh deploy: From a35d554a0d4178913da7f7a0e0ede8b951c2cc41 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 26 Sep 2019 17:00:48 +0200 Subject: [PATCH 087/124] Python 3.7 fixes - Don't use os.errno as this is gone in Python 3.7 (import errno instead) - requests_cache cache is python version specific, so include version number in cache directory - Remove single usage of nose --- bin/syncutils/utils.py | 4 ++-- nf_core/download.py | 3 ++- nf_core/launch.py | 3 ++- nf_core/list.py | 5 +++-- nf_core/utils.py | 8 +++++--- tests/test_list.py | 15 +++++++-------- 6 files changed, 21 insertions(+), 17 deletions(-) diff --git a/bin/syncutils/utils.py b/bin/syncutils/utils.py index 871c15c07f..8d3ac94e09 100644 --- a/bin/syncutils/utils.py +++ b/bin/syncutils/utils.py @@ -1,3 +1,4 @@ +import erro import os import requests import subprocess @@ -13,7 +14,7 @@ def fetch_wf_config(wf_path): with open(os.devnull, 'w') as devnull: nfconfig_raw = subprocess.check_output(['nextflow', 'config', '-flat', wf_path], stderr=devnull) except OSError as e: - if e.errno == os.errno.ENOENT: + if e.errno == errno.ENOENT: raise AssertionError("It looks like Nextflow is not installed. It is required for most nf-core functions.") except subprocess.CalledProcessError as e: raise AssertionError("`nextflow config` returned non-zero error code: %s,\n %s", e.returncode, e.output) @@ -61,4 +62,3 @@ def repos_without_template_branch(pipeline_names): print("WARNING: nf-core/{} had no TEMPLATE branch!".format(pipeline)) return pipelines_without_template - diff --git a/nf_core/download.py b/nf_core/download.py index 176f2ba159..da98c771a1 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -3,6 +3,7 @@ from __future__ import print_function +import errno from io import BytesIO import logging import hashlib @@ -208,7 +209,7 @@ def pull_singularity_image(self, container): try: subprocess.call(singularity_command) except OSError as e: - if e.errno == os.errno.ENOENT: + if e.errno == errno.ENOENT: # Singularity is not installed logging.error('Singularity is not installed!') else: diff --git a/nf_core/launch.py b/nf_core/launch.py index d4e05e3453..3ad9f98c9b 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -5,6 +5,7 @@ from collections import OrderedDict import click +import errno import jsonschema import logging import os @@ -98,7 +99,7 @@ def get_local_wf(self): with open(os.devnull, 'w') as devnull: subprocess.check_output(['nextflow', 'pull', self.workflow], stderr=devnull) except OSError as e: - if e.errno == os.errno.ENOENT: + if e.errno == errno.ENOENT: raise AssertionError("It looks like Nextflow is not installed. It is required for most nf-core functions.") except subprocess.CalledProcessError as e: raise AssertionError("`nextflow pull` returned non-zero error code: %s,\n %s", e.returncode, e.output) diff --git a/nf_core/list.py b/nf_core/list.py index 07d2149954..6ae30dc29c 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -6,6 +6,7 @@ import click import datetime +import errno import json import logging import os @@ -96,7 +97,7 @@ def get_local_nf_workflows(self): with open(os.devnull, 'w') as devnull: nflist_raw = subprocess.check_output(['nextflow', 'list'], stderr=devnull) except OSError as e: - if e.errno == os.errno.ENOENT: + if e.errno == errno.ENOENT: raise AssertionError("It looks like Nextflow is not installed. It is required for most nf-core functions.") except subprocess.CalledProcessError as e: raise AssertionError("`nextflow list` returned non-zero error code: %s,\n %s", e.returncode, e.output) @@ -287,7 +288,7 @@ def get_local_nf_workflow_details(self): with open(os.devnull, 'w') as devnull: nfinfo_raw = subprocess.check_output(['nextflow', 'info', '-d', self.full_name], stderr=devnull) except OSError as e: - if e.errno == os.errno.ENOENT: + if e.errno == errno.ENOENT: raise AssertionError("It looks like Nextflow is not installed. It is required for most nf-core functions.") except subprocess.CalledProcessError as e: raise AssertionError("`nextflow list` returned non-zero error code: %s,\n %s", e.returncode, e.output) diff --git a/nf_core/utils.py b/nf_core/utils.py index a5ef17f7ab..a1e4038c3f 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -4,10 +4,12 @@ """ import datetime +import errno import json import logging import os import subprocess +import sys def fetch_wf_config(wf_path, wf=None): """Uses Nextflow to retrieve the the configuration variables @@ -47,7 +49,7 @@ def fetch_wf_config(wf_path, wf=None): with open(os.devnull, 'w') as devnull: nfconfig_raw = subprocess.check_output(['nextflow', 'config', '-flat', wf_path], stderr=devnull) except OSError as e: - if e.errno == os.errno.ENOENT: + if e.errno == errno.ENOENT: raise AssertionError("It looks like Nextflow is not installed. It is required for most nf-core functions.") except subprocess.CalledProcessError as e: raise AssertionError("`nextflow config` returned non-zero error code: %s,\n %s", e.returncode, e.output) @@ -74,9 +76,9 @@ def setup_requests_cachedir(): """ # Only import it if we need it import requests_cache - - cachedir = os.path.join(os.getenv("HOME"), os.path.join('.nfcore', 'cache')) + pyversion = '.'.join(str(v) for v in sys.version_info[0:3]) + cachedir = os.path.join(os.getenv("HOME"), os.path.join('.nfcore', 'cache_'+pyversion)) if not os.path.exists(cachedir): os.makedirs(cachedir) requests_cache.install_cache( diff --git a/tests/test_list.py b/tests/test_list.py index 43609a506e..f54816d8e8 100644 --- a/tests/test_list.py +++ b/tests/test_list.py @@ -11,7 +11,6 @@ import time import unittest -from nose.tools import raises from datetime import datetime class TestLint(unittest.TestCase): @@ -38,7 +37,7 @@ def test_pretty_datetime(self): now_ts = time.mktime(now.timetuple()) nf_core.list.pretty_date(now_ts) - @raises(AssertionError) + @pytest.mark.xfail(raises=AssertionError) def test_local_workflows_and_fail(self): """ Test the local workflow class and try to get local Nextflow workflow information """ @@ -67,7 +66,7 @@ def test_local_workflows_compare_and_fail_silently(self): rwf_ex = nf_core.list.RemoteWorkflow(remote) rwf_ex.commit_sha = "aw3s0meh1sh" rwf_ex.releases = [{'tag_sha': "aw3s0meh1sh"}] - + wfs.local_workflows.append(lwf_ex) wfs.remote_workflows.append(rwf_ex) @@ -83,7 +82,7 @@ def test_local_workflows_compare_and_fail_silently(self): wfs.compare_remote_local() rwf_ex.releases = None - + @mock.patch('nf_core.list.LocalWorkflow') def test_parse_local_workflow_and_succeed(self, mock_local_wf): test_path = '/tmp/nxf/nf-core' @@ -92,7 +91,7 @@ def test_parse_local_workflow_and_succeed(self, mock_local_wf): if not os.environ.get('NXF_ASSETS'): os.environ['NXF_ASSETS'] = '/tmp/nxf' assert os.environ['NXF_ASSETS'] == '/tmp/nxf' - with open('/tmp/nxf/nf-core/dummy-wf', 'w') as f: + with open('/tmp/nxf/nf-core/dummy-wf', 'w') as f: f.write('dummy') workflows_obj = nf_core.list.Workflows() workflows_obj.get_local_nf_workflows() @@ -108,11 +107,11 @@ def test_parse_local_workflow_home(self, mock_subprocess, mock_local_wf, mock_en mock_env.side_effect = '/tmp/nxf' assert os.environ['NXF_ASSETS'] == '/tmp/nxf' - with open('/tmp/nxf/nf-core/dummy-wf', 'w') as f: + with open('/tmp/nxf/nf-core/dummy-wf', 'w') as f: f.write('dummy') workflows_obj = nf_core.list.Workflows() workflows_obj.get_local_nf_workflows() - + @mock.patch('os.stat') @mock.patch('git.Repo') def test_local_workflow_investigation(self, mock_repo, mock_stat): @@ -121,7 +120,7 @@ def test_local_workflow_investigation(self, mock_repo, mock_stat): mock_repo.head.commit.hexsha = 'h00r4y' mock_stat.st_mode = 1 local_wf.get_local_nf_workflow_details() - + def test_worflow_filter(self): workflows_obj = nf_core.list.Workflows(["rna", "myWF"]) From 9c3d8a7ad5100fbee4e87b826209d26eb47cde54 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Thu, 26 Sep 2019 17:05:57 +0200 Subject: [PATCH 088/124] Changelog --- CHANGELOG.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 59ab5af3e5..827e598aab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,9 +2,9 @@ ## v1.7dev -### Pypi package description +### PyPI package description -* The readme should now be rendered properly on Pypi. +* The readme should now be rendered properly on PyPI. ### Tools helper code @@ -12,6 +12,7 @@ * Fixed issue [379](https://github.com/nf-core/tools/issues/379) * nf-core launch now uses stable parameter schema version 0.1.0 * Check that PR from patch or dev branch is acceptable by linting +* Made code compatible with Python 3.7 ### Syncing From deb2e2c068495454b47b2802d55d99a5f0421e7a Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 4 Oct 2019 12:32:01 +0200 Subject: [PATCH 089/124] nf-core download: Get nf-core/configs Closes #240 --- nf_core/download.py | 45 +++++++++++++++++++ .../minimal_working_example/nextflow.config | 2 + tests/test_download.py | 33 ++++++++++++++ 3 files changed, 80 insertions(+) diff --git a/nf_core/download.py b/nf_core/download.py index da98c771a1..7953233719 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -64,6 +64,11 @@ def download_workflow(self): logging.info("Downloading workflow files from GitHub") self.download_wf_files() + # Download the centralised configs + logging.info("Downloading centralised configs from GitHub") + self.download_configs() + self.wf_use_local_configs() + # Download the singularity images if self.singularity: logging.debug("Fetching container names for workflow") @@ -176,6 +181,46 @@ def download_wf_files(self): for fname in filelist: os.chmod(os.path.join(dirpath, fname), 0o775) + def download_configs(self): + """Downloads the centralised config profiles from nf-core/configs to :attr:`self.outdir`. + """ + configs_zip_url = "https://github.com/nf-core/configs/archive/master.zip" + configs_local_dir = "configs-master" + logging.debug("Downloading {}".format(configs_zip_url)) + + # Download GitHub zip file into memory and extract + url = requests.get(configs_zip_url) + zipfile = ZipFile(BytesIO(url.content)) + zipfile.extractall(self.outdir) + + # Rename the internal directory name to be more friendly + os.rename(os.path.join(self.outdir, configs_local_dir), os.path.join(self.outdir, 'configs')) + + # Make downloaded files executable + for dirpath, subdirs, filelist in os.walk(os.path.join(self.outdir, 'configs')): + for fname in filelist: + os.chmod(os.path.join(dirpath, fname), 0o775) + + def wf_use_local_configs(self): + """Edit the downloaded nextflow.config file to use the local config files + """ + nfconfig_fn = os.path.join(self.outdir, 'workflow', 'nextflow.config') + find_str = 'https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}' + repl_str = '../configs/' + logging.debug("Editing params.custom_config_base in {}".format(nfconfig_fn)) + + # Load the nextflow.config file into memory + with open(nfconfig_fn, 'r') as nfconfig_fh: + nfconfig = nfconfig_fh.read() + + # Replace the target string + nfconfig = nfconfig.replace(find_str, repl_str) + + # Write the file out again + with open(nfconfig_fn, 'w') as nfconfig_fh: + nfconfig_fh.write(nfconfig) + + def find_container_images(self): """ Find container image names for workflow """ diff --git a/tests/lint_examples/minimal_working_example/nextflow.config b/tests/lint_examples/minimal_working_example/nextflow.config index 2dda290924..b0f3a01f56 100644 --- a/tests/lint_examples/minimal_working_example/nextflow.config +++ b/tests/lint_examples/minimal_working_example/nextflow.config @@ -3,6 +3,8 @@ params { outdir = './results' reads = "data/*.fastq" singleEnd = false + custom_config_version = 'master' + custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" } process { diff --git a/tests/test_download.py b/tests/test_download.py index dd26dac841..eaa59d8755 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -3,6 +3,7 @@ """ import nf_core.list +import nf_core.utils from nf_core.download import DownloadWorkflow import hashlib @@ -15,6 +16,8 @@ import tempfile import unittest +PATH_WORKING_EXAMPLE = os.path.join(os.path.dirname(__file__), 'lint_examples/minimal_working_example') + class DownloadTest(unittest.TestCase): # @@ -113,6 +116,36 @@ def test_download_wf_files(self): download_obj.wf_download_url = "https://github.com/nf-core/methylseq/archive/1.0.zip" download_obj.download_wf_files() + # + # Tests for 'download_configs' + # + def test_download_configs(self): + download_obj = DownloadWorkflow( + pipeline = "dummy", + release = "1.2.0", + outdir = tempfile.mkdtemp() + ) + download_obj.download_configs() + + # + # Tests for 'wf_use_local_configs' + # + def test_wf_use_local_configs(self): + # Get a workflow and configs + test_outdir = tempfile.mkdtemp() + download_obj = DownloadWorkflow( + pipeline = "dummy", + release = "1.2.0", + outdir = test_outdir + ) + shutil.copytree(PATH_WORKING_EXAMPLE, os.path.join(test_outdir, 'workflow')) + download_obj.download_configs() + + # Test the function + download_obj.wf_use_local_configs() + wf_config = nf_core.utils.fetch_wf_config(os.path.join(test_outdir, 'workflow')) + assert wf_config['params.custom_config_base'] == "'../configs/'" + # # Tests for 'find_container_images' # From 887df9d8dd6465a8fc43b63f1d6cbc527c8a212d Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 4 Oct 2019 13:53:04 +0200 Subject: [PATCH 090/124] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 827e598aab..528c5497ea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ * nf-core launch now uses stable parameter schema version 0.1.0 * Check that PR from patch or dev branch is acceptable by linting * Made code compatible with Python 3.7 +* The `download` command now also fetches institutional configs from nf-core/configs ### Syncing From 0912d6881ef3e30c410e777329f4457583fe0483 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 4 Oct 2019 17:07:19 +0200 Subject: [PATCH 091/124] download: compress files into archive --- nf_core/download.py | 70 +++++++++++++++++++++++++++++++++++++++++---- scripts/nf-core | 10 +++++-- 2 files changed, 72 insertions(+), 8 deletions(-) diff --git a/nf_core/download.py b/nf_core/download.py index 7953233719..8f6a7c8499 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -9,8 +9,10 @@ import hashlib import os import requests +import shutil import subprocess import sys +import tarfile from zipfile import ZipFile import nf_core.list @@ -28,11 +30,15 @@ class DownloadWorkflow(object): singularity (bool): Flag, if the Singularity container should be downloaded as well. Defaults to False. outdir (str): Path to the local download directory. Defaults to None. """ - def __init__(self, pipeline, release=None, singularity=False, outdir=None): + def __init__(self, pipeline, release=None, singularity=False, outdir=None, compress_type='tar.gz'): self.pipeline = pipeline self.release = release self.singularity = singularity self.outdir = outdir + self.output_filename = None + self.compress_type = compress_type + if self.compress_type == 'none': + self.compress_type = None self.wf_name = None self.wf_sha = None @@ -48,16 +54,28 @@ def download_workflow(self): except LookupError: sys.exit(1) + output_logmsg = "Output directory: {}".format(self.outdir) + + # Set an output filename now that we have the outdir + if self.compress_type is not None: + self.output_filename = '{}.{}'.format(self.outdir, self.compress_type) + output_logmsg = "Output file: {}".format(self.output_filename) + # Check that the outdir doesn't already exist if os.path.exists(self.outdir): logging.error("Output directory '{}' already exists".format(self.outdir)) sys.exit(1) + # Check that compressed output file doesn't already exist + if self.output_filename and os.path.exists(self.output_filename): + logging.error("Output file '{}' already exists".format(self.output_filename)) + sys.exit(1) + logging.info( "Saving {}".format(self.pipeline) + "\n Pipeline release: {}".format(self.release) + "\n Pull singularity containers: {}".format('Yes' if self.singularity else 'No') + - "\n Output directory: {}".format(self.outdir) + "\n {}".format(output_logmsg) ) # Download the pipeline files @@ -87,6 +105,10 @@ def download_workflow(self): logging.error("Not able to pull image. Service might be down or internet connection is dead.") raise r + # Compress into an archive + if self.compress_type is not None: + logging.info("Compressing download..") + self.compress_download() def fetch_workflow_details(self, wfs): @@ -261,7 +283,40 @@ def pull_singularity_image(self, container): # Something else went wrong with singularity command raise e - def validate_md5(self, fname, expected): + def compress_download(self): + """Take the downloaded files and make a compressed .tar.gz archive. + """ + logging.debug('Creating archive: {}'.format(self.output_filename)) + + # .tar.gz and .tar.bz2 files + if self.compress_type == 'tar.gz' or self.compress_type == 'tar.bz2': + ctype = self.compress_type.split('.')[1] + with tarfile.open(self.output_filename, "w:{}".format(ctype)) as tar: + tar.add(self.outdir, arcname=os.path.basename(self.outdir)) + tar_flags = 'xzf' if ctype == 'gz' else 'xjf' + logging.info('Command to extract files: tar -{} {}'.format(tar_flags, self.output_filename)) + + # .zip files + if self.compress_type == 'zip': + with ZipFile(self.output_filename, 'w') as zipObj: + # Iterate over all the files in directory + for folderName, subfolders, filenames in os.walk(self.outdir): + for filename in filenames: + #create complete filepath of file in directory + filePath = os.path.join(folderName, filename) + # Add file to zip + zipObj.write(filePath) + logging.info('Command to extract files: unzip {}'.format(self.output_filename)) + + # Delete original files + logging.debug('Deleting uncompressed files: {}'.format(self.outdir)) + shutil.rmtree(self.outdir) + + # Caclualte md5sum for output file + self.validate_md5(self.output_filename) + + + def validate_md5(self, fname, expected=None): """Calculates the md5sum for a file on the disk and validate with expected. Args: @@ -280,7 +335,10 @@ def validate_md5(self, fname, expected): hash_md5.update(chunk) file_hash = hash_md5.hexdigest() - if file_hash == expected: - logging.debug('md5 sum of image matches expected: {}'.format(expected)) + if expected is None: + logging.info("MD5 checksum for {}: {}".format(fname, file_hash)) else: - raise IOError ("{} md5 does not match remote: {} - {}".format(fname, expected, file_hash)) + if file_hash == expected: + logging.debug('md5 sum of image matches expected: {}'.format(expected)) + else: + raise IOError ("{} md5 does not match remote: {} - {}".format(fname, expected, file_hash)) diff --git a/scripts/nf-core b/scripts/nf-core index 623174b0ef..9202c456d5 100755 --- a/scripts/nf-core +++ b/scripts/nf-core @@ -123,9 +123,15 @@ def launch(pipeline, params, direct): type = str, help = "Output directory" ) -def download(pipeline, release, singularity, outdir): +@click.option( + '-c', '--compress', + type = click.Choice(['tar.gz', 'tar.bz2', 'zip', 'none']), + default = 'tar.gz', + help = "Compression type" +) +def download(pipeline, release, singularity, outdir, compress): """ Download a pipeline and singularity container """ - dl = nf_core.download.DownloadWorkflow(pipeline, release, singularity, outdir) + dl = nf_core.download.DownloadWorkflow(pipeline, release, singularity, outdir, compress) dl.download_workflow() # nf-core licences From de26b2778c360a65e4e1e7187cc7dd383d140c84 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 4 Oct 2019 17:08:00 +0200 Subject: [PATCH 092/124] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 528c5497ea..96483ad2db 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ * Check that PR from patch or dev branch is acceptable by linting * Made code compatible with Python 3.7 * The `download` command now also fetches institutional configs from nf-core/configs +* The `download` command can now compress files into a single archive ### Syncing From ac82ab8f5e0e13f4646087df772ccc93a6f732b9 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Fri, 4 Oct 2019 23:03:30 +0200 Subject: [PATCH 093/124] Fix tests --- nf_core/bump_version.py | 2 +- nf_core/create.py | 2 +- nf_core/download.py | 4 ++-- nf_core/launch.py | 2 +- nf_core/lint.py | 2 +- tests/test_download.py | 4 ++-- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index d877b71bfe..c56c9f43a5 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -20,7 +20,7 @@ def bump_pipeline_version(lint_obj, new_version): # Collect the old and new version numbers current_version = lint_obj.config.get('manifest.version', '').strip(' \'"') if new_version.startswith('v'): - logging.warn("Stripping leading 'v' from new version number") + logging.warning("Stripping leading 'v' from new version number") new_version = new_version[1:] if not current_version: logging.error("Could not find config variable manifest.version") diff --git a/nf_core/create.py b/nf_core/create.py index 323181f072..ce97785a8e 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -62,7 +62,7 @@ def run_cookiecutter(self): # Check if the output directory exists if os.path.exists(self.outdir): if self.force: - logging.warn("Output directory '{}' exists - continuing as --force specified".format(self.outdir)) + logging.warning("Output directory '{}' exists - continuing as --force specified".format(self.outdir)) else: logging.error("Output directory '{}' exists!".format(self.outdir)) logging.info("Use -f / --force to overwrite existing files") diff --git a/nf_core/download.py b/nf_core/download.py index 8f6a7c8499..64992109d4 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -151,7 +151,7 @@ def fetch_workflow_details(self, wfs): elif not self.release: self.release = 'dev' self.wf_sha = 'master' # Cheating a little, but GitHub download link works - logging.warn("Pipeline is in development - downloading current code on master branch.\n" + + logging.warning("Pipeline is in development - downloading current code on master branch.\n" + "This is likely to change soon should not be considered fully reproducible.") # Set outdir name if not defined @@ -167,7 +167,7 @@ def fetch_workflow_details(self, wfs): # If we got this far, must not be a nf-core pipeline if self.pipeline.count('/') == 1: # Looks like a GitHub address - try working with this repo - logging.warn("Pipeline name doesn't match any nf-core workflows") + logging.warning("Pipeline name doesn't match any nf-core workflows") logging.info("Pipeline name looks like a GitHub address - attempting to download anyway") self.wf_name = self.pipeline if not self.release: diff --git a/nf_core/launch.py b/nf_core/launch.py index 3ad9f98c9b..37b91b25a1 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -361,7 +361,7 @@ def build_command(self): if val: self.nextflow_cmd = "{} {}".format(self.nextflow_cmd, flag) else: - logging.warn("TODO: Can't set false boolean flags currently.") + logging.warning("TODO: Can't set false boolean flags currently.") # String values else: self.nextflow_cmd = '{} {} "{}"'.format(self.nextflow_cmd, flag, val.replace('"', '\\"')) diff --git a/nf_core/lint.py b/nf_core/lint.py index 51fe117683..ed8a5e66b3 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -803,6 +803,6 @@ def print_results(self): if len(self.passed) > 0: logging.debug("{}\n {}".format(click.style("Test Passed:", fg='green'), "\n ".join(["http://nf-co.re/errors#{}: {}".format(eid, msg) for eid, msg in self.passed]))) if len(self.warned) > 0: - logging.warn("{}\n {}".format(click.style("Test Warnings:", fg='yellow'), "\n ".join(["http://nf-co.re/errors#{}: {}".format(eid, msg) for eid, msg in self.warned]))) + logging.warning("{}\n {}".format(click.style("Test Warnings:", fg='yellow'), "\n ".join(["http://nf-co.re/errors#{}: {}".format(eid, msg) for eid, msg in self.warned]))) if len(self.failed) > 0: logging.error("{}\n {}".format(click.style("Test Failures:", fg='red'), "\n ".join(["http://nf-co.re/errors#{}: {}".format(eid, msg) for eid, msg in self.failed]))) diff --git a/tests/test_download.py b/tests/test_download.py index eaa59d8755..75b891cef3 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -215,11 +215,11 @@ def test_pull_singularity_image(self): def test_download_workflow_with_success(self, mock_download_image): - tmp_dir = os.path.join(tempfile.mkdtemp(), 'new') + tmp_dir = tempfile.mkdtemp() download_obj = DownloadWorkflow( pipeline = "nf-core/methylseq", - outdir = tmp_dir, + outdir = os.path.join(tmp_dir, 'new'), singularity = True) download_obj.download_workflow() From 217479d5bc4696ca287d246295e45b842b41f3e2 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 5 Oct 2019 21:15:24 +0200 Subject: [PATCH 094/124] Template: ascii artwork for plaintext email Fixes nf-core/tools#233 --- CHANGELOG.md | 1 + .../assets/email_template.txt | 12 +++++++++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 96483ad2db..152a4ef1b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -47,6 +47,7 @@ * Add contributions and support heading with links to contribution guidelines and link to the pipeline slack channel in the main README * Fix Parameters JSON due to new versionized structure * Added conda-forge::r-markdown=1.1 and conda-forge::r-base=3.6.1 to environment +* Plain-text email template now has nf-core ASCII artwork ### Other diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.txt b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.txt index 2277f36786..3368a5f145 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.txt +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.txt @@ -1,6 +1,12 @@ -======================================== - {{ cookiecutter.name }} v${version} -======================================== +---------------------------------------------------- + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' + {{ cookiecutter.name }} v${version} +---------------------------------------------------- + Run Name: $runName <% if (success){ From 5c062b19fbca8b04df6601acb6c24c3496d4cbc9 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 5 Oct 2019 22:29:05 +0200 Subject: [PATCH 095/124] Create: Fetch pipeline logo Closes nf-core/tools#316 --- CHANGELOG.md | 2 + nf_core/create.py | 51 ++++++++++++++----- .../{{cookiecutter.name_noslash}}/README.md | 2 +- .../assets/email_template.html | 2 + .../assets/sendmail_template.txt | 17 +++++++ 5 files changed, 60 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 152a4ef1b2..55a43c8bf1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ * Made code compatible with Python 3.7 * The `download` command now also fetches institutional configs from nf-core/configs * The `download` command can now compress files into a single archive +* `nf-core create` now fetches a logo for the pipeline from the nf-core website ### Syncing @@ -48,6 +49,7 @@ * Fix Parameters JSON due to new versionized structure * Added conda-forge::r-markdown=1.1 and conda-forge::r-base=3.6.1 to environment * Plain-text email template now has nf-core ASCII artwork +* Template configured to use logo fetched from website ### Other diff --git a/nf_core/create.py b/nf_core/create.py index ce97785a8e..67a6610652 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -6,6 +6,7 @@ import git import logging import os +import requests import shutil import sys import tempfile @@ -27,9 +28,8 @@ class PipelineCreate(object): outdir (str): Path to the local output directory. """ def __init__(self, name, description, author, new_version='1.0dev', no_git=False, force=False, outdir=None): - self.name = 'nf-core/{}'.format( - name.lower().replace(r'/\s+/', '-').replace('nf-core/', '').replace('/', '-') - ) + self.short_name = name.lower().replace(r'/\s+/', '-').replace('nf-core/', '').replace('/', '-') + self.name = 'nf-core/{}'.format(self.short_name) self.name_noslash = self.name.replace('/', '-') self.name_docker = self.name.replace('nf-core', 'nfcore') self.description = description @@ -71,7 +71,7 @@ def run_cookiecutter(self): os.makedirs(self.outdir) # Build the template in a temporary directory - tmpdir = tempfile.mkdtemp() + self.tmpdir = tempfile.mkdtemp() template = os.path.join(os.path.dirname(os.path.realpath(nf_core.__file__)), 'pipeline-template/') cookiecutter.main.cookiecutter( template, @@ -83,27 +83,52 @@ def run_cookiecutter(self): 'name_docker': self.name_docker, 'version': self.new_version }, - no_input=True, - overwrite_if_exists=self.force, - output_dir=tmpdir + no_input = True, + overwrite_if_exists = self.force, + output_dir = self.tmpdir ) + # Make a logo and save it + self.make_pipeline_logo() + # Move the template to the output directory - for f in os.listdir(os.path.join(tmpdir, self.name_noslash)): - shutil.move(os.path.join(tmpdir, self.name_noslash, f), self.outdir) + for f in os.listdir(os.path.join(self.tmpdir, self.name_noslash)): + shutil.move(os.path.join(self.tmpdir, self.name_noslash, f), self.outdir) # Delete the temporary directory - shutil.rmtree(tmpdir) + shutil.rmtree(self.tmpdir) + + def make_pipeline_logo(self): + """Fetch a logo for the new pipeline from the nf-core website + """ + + logo_url = "https://nf-co.re/logo/{}".format(self.short_name) + logging.debug("Fetching logo from {}".format(logo_url)) + + email_logo_path = "{}/{}/assets/{}_logo.png".format(self.tmpdir, self.name_noslash, self.name_noslash) + logging.debug("Writing logo to {}".format(email_logo_path)) + r = requests.get("{}?w=400".format(logo_url)) + with open(email_logo_path, 'wb') as fh: + fh.write(r.content) + + readme_logo_path = "{}/{}/docs/images/{}_logo.png".format(self.tmpdir, self.name_noslash, self.name_noslash) + + logging.debug("Writing logo to {}".format(readme_logo_path)) + if not os.path.exists(os.path.dirname(readme_logo_path)): + os.makedirs(os.path.dirname(readme_logo_path)) + r = requests.get("{}?w=600".format(logo_url)) + with open(readme_logo_path, 'wb') as fh: + fh.write(r.content) def git_init_pipeline(self): """Initialises the new pipeline as a Git repository and submits first commit. """ logging.info("Initialising pipeline git repository") repo = git.Repo.init(self.outdir) - repo.git.add(A=True) + repo.git.add(A=True) repo.index.commit("initial template build from nf-core/tools, version {}".format(nf_core.__version__)) #Add TEMPLATE branch to git repository repo.git.branch('TEMPLATE') - repo.git.branch('dev') + repo.git.branch('dev') logging.info("Done. Remember to add a remote and push to GitHub:\n cd {}\n git remote add origin git@github.com:USERNAME/REPO_NAME.git\n git push --all origin".format(self.outdir)) - logging.info("This will also push your newly created dev branch and the TEMPLATE branch for syncing.") \ No newline at end of file + logging.info("This will also push your newly created dev branch and the TEMPLATE branch for syncing.") diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md index dad0a49d7e..b219a0c3a5 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/README.md @@ -1,4 +1,4 @@ -# {{ cookiecutter.name }} +# ![{{ cookiecutter.name }}](docs/images/{{ cookiecutter.name_noslash }}_logo.png) **{{ cookiecutter.description }}**. diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.html b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.html index 96bcdf29ac..e4cb1c7800 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.html +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.html @@ -11,6 +11,8 @@
+ +

{{ cookiecutter.name }} v${version}

Run Name: $runName

diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/sendmail_template.txt b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/sendmail_template.txt index 2d67122006..db7637e280 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/sendmail_template.txt +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/sendmail_template.txt @@ -8,6 +8,23 @@ Content-Type: text/html; charset=utf-8 $email_html +--nfcoremimeboundary +Content-Type: image/png;name="{{ cookiecutter.name_noslash }}_logo.png" +Content-Transfer-Encoding: base64 +Content-ID: +Content-Disposition: inline; filename="{{ cookiecutter.name_noslash }}_logo.png" + +<% out << new File("$baseDir/assets/{{ cookiecutter.name_noslash }}_logo.png"). + bytes. + encodeBase64(). + toString(). + tokenize( '\n' )*. + toList()*. + collate( 76 )*. + collect { it.join() }. + flatten(). + join( '\n' ) %> + <% if (mqcFile){ def mqcFileObj = new File("$mqcFile") From 7dd1aae51c206d9e82daf7eb6b414cab15bd0382 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 5 Oct 2019 23:18:54 +0200 Subject: [PATCH 096/124] Template: --email_on_fail option New option is the same as --email, except emails are only sent when the workflow is not successful. Closes nf-core/tools#364 --- CHANGELOG.md | 1 + .../docs/usage.md | 4 ++++ .../{{cookiecutter.name_noslash}}/main.nf | 20 +++++++++++++------ .../nextflow.config | 3 ++- 4 files changed, 21 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 96483ad2db..8e39251ed4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -47,6 +47,7 @@ * Add contributions and support heading with links to contribution guidelines and link to the pipeline slack channel in the main README * Fix Parameters JSON due to new versionized structure * Added conda-forge::r-markdown=1.1 and conda-forge::r-base=3.6.1 to environment +* New option `--email_on_fail` which only sends emails if the workflow is not successful ### Other diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md index abf63de0f4..cade6b7cea 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/docs/usage.md @@ -26,6 +26,7 @@ * [Other command line parameters](#other-command-line-parameters) * [`--outdir`](#--outdir) * [`--email`](#--email) + * [`--email_on_fail`](#--email_on_fail) * [`-name`](#-name) * [`-resume`](#-resume) * [`-c`](#-c) @@ -211,6 +212,9 @@ The output directory where the results will be saved. ### `--email` Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to specify this on the command line for every run. +### `--email_on_fail` +This works exactly as with `--email`, except emails are only sent if the workflow is not successful. + ### `-name` Name for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic. diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index ea63cc8e61..8c741436b2 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -36,6 +36,7 @@ def helpMessage() { Other options: --outdir The output directory where the results will be saved --email Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits + --email_on_fail Same as --email, except only send mail if the workflow is not successful --maxMultiqcEmailFileSize Theshold size for MultiQC report to be attached in notification email. If file generated by pipeline exceeds the threshold, it will not be attached (Default: 25MB) -name Name for the pipeline run. If not specified, Nextflow will automatically generate a random mnemonic. @@ -146,8 +147,9 @@ summary['Config Profile'] = workflow.profile if(params.config_profile_description) summary['Config Description'] = params.config_profile_description if(params.config_profile_contact) summary['Config Contact'] = params.config_profile_contact if(params.config_profile_url) summary['Config URL'] = params.config_profile_url -if(params.email) { +if(params.email || params.email_on_fail) { summary['E-mail Address'] = params.email + summary['E-mail on failure'] = params.email_on_fail summary['MultiQC maxsize'] = params.maxMultiqcEmailFileSize } log.info summary.collect { k,v -> "${k.padRight(18)}: $v" }.join("\n") @@ -321,6 +323,12 @@ workflow.onComplete { log.warn "[{{ cookiecutter.name }}] Could not attach MultiQC report to summary email" } + // Check if we are only sending emails on failure + email_address = params.email + if(!params.email && params.email_on_fail && !workflow.success){ + email_address = params.email_on_fail + } + // Render the TXT template def engine = new groovy.text.GStringTemplateEngine() def tf = new File("$baseDir/assets/email_template.txt") @@ -333,22 +341,22 @@ workflow.onComplete { def email_html = html_template.toString() // Render the sendmail template - def smail_fields = [ email: params.email, subject: subject, email_txt: email_txt, email_html: email_html, baseDir: "$baseDir", mqcFile: mqc_report, mqcMaxSize: params.maxMultiqcEmailFileSize.toBytes() ] + def smail_fields = [ email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, baseDir: "$baseDir", mqcFile: mqc_report, mqcMaxSize: params.maxMultiqcEmailFileSize.toBytes() ] def sf = new File("$baseDir/assets/sendmail_template.txt") def sendmail_template = engine.createTemplate(sf).make(smail_fields) def sendmail_html = sendmail_template.toString() // Send the HTML e-mail - if (params.email) { + if (email_address) { try { if( params.plaintext_email ){ throw GroovyException('Send plaintext e-mail, not HTML') } // Try to send HTML e-mail using sendmail [ 'sendmail', '-t' ].execute() << sendmail_html - log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $params.email (sendmail)" + log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $email_address (sendmail)" } catch (all) { // Catch failures and try with plaintext - [ 'mail', '-s', subject, params.email ].execute() << email_txt - log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $params.email (mail)" + [ 'mail', '-s', subject, email_address ].execute() << email_txt + log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $email_address (mail)" } } diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config index 75b9229fac..d45c9f64f4 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config @@ -18,6 +18,7 @@ params { name = false multiqc_config = "$baseDir/assets/multiqc_config.yaml" email = false + email_on_fail = false maxMultiqcEmailFileSize = 25.MB plaintext_email = false monochrome_logs = false @@ -60,7 +61,7 @@ profiles { // Avoid this error: // WARNING: Your kernel does not support swap limit capabilities or the cgroup is not mounted. Memory limited without swap. -// Testing this in nf-core after discussion here https://github.com/nf-core/tools/pull/351, once this is established and works well, nextflow might implement this behavior as new default. +// Testing this in nf-core after discussion here https://github.com/nf-core/tools/pull/351, once this is established and works well, nextflow might implement this behavior as new default. docker.runOptions = '-u \$(id -u):\$(id -g)' // Load igenomes.config if required From 343944eb5915811079e9390e2c6328d843bfafd0 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 5 Oct 2019 23:47:38 +0200 Subject: [PATCH 097/124] List: capture git errors for local workflows Handle exceptions due to detached HEAD refs in the local pulls of remote workflows. Prints a helpful message about how to fix. Fixes nf-core/tools#297 --- nf_core/list.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/nf_core/list.py b/nf_core/list.py index 6ae30dc29c..f77e159c53 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -304,13 +304,22 @@ def get_local_nf_workflow_details(self): # Pull information from the local git repository if self.local_path is not None: - repo = git.Repo(self.local_path) - self.commit_sha = str(repo.head.commit.hexsha) - self.remote_url = str(repo.remotes.origin.url) - self.branch = str(repo.active_branch) - self.last_pull = os.stat(os.path.join(self.local_path, '.git', 'FETCH_HEAD')).st_mtime - self.last_pull_date = datetime.datetime.fromtimestamp(self.last_pull).strftime("%Y-%m-%d %H:%M:%S") - self.last_pull_pretty = pretty_date(self.last_pull) + try: + repo = git.Repo(self.local_path) + self.commit_sha = str(repo.head.commit.hexsha) + self.remote_url = str(repo.remotes.origin.url) + self.branch = str(repo.active_branch) + self.last_pull = os.stat(os.path.join(self.local_path, '.git', 'FETCH_HEAD')).st_mtime + self.last_pull_date = datetime.datetime.fromtimestamp(self.last_pull).strftime("%Y-%m-%d %H:%M:%S") + self.last_pull_pretty = pretty_date(self.last_pull) + except TypeError as e: + logging.error( + "Could not fetch status of local Nextflow copy of {}:".format(self.full_name) + + "\n {}".format(str(e)) + + "\n\nIt's probably a good idea to delete this local copy and pull again:".format(self.local_path) + + "\n rm -rf {}".format(self.local_path) + + "\n nextflow pull {}".format(self.full_name) + ) def pretty_date(time): From 0b3a8ed033dc8d4d44fcc212c7006ec56fd2adb9 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sat, 5 Oct 2019 23:48:50 +0200 Subject: [PATCH 098/124] Changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 96483ad2db..c564f6bed1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ * Made code compatible with Python 3.7 * The `download` command now also fetches institutional configs from nf-core/configs * The `download` command can now compress files into a single archive +* When listing pipelines, a nicer message is given for the rare case of a detached `HEAD` ref in a locally pulled pipeline. [#297](https://github.com/nf-core/tools/issues/297) ### Syncing From 06211d0645926ea9296a7380351565e0b7863298 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sun, 6 Oct 2019 00:04:42 +0200 Subject: [PATCH 099/124] .gitignore API docs build Fixes nf-core/tools#315 --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 27c031f40c..0d1179b934 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ .coverage .pytest_cache +docs/api/_build # Byte-compiled / optimized / DLL files __pycache__/ From b61ee34aa204ea54d599a8b4f1a0ccc361dd4c24 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Sun, 6 Oct 2019 00:07:56 +0200 Subject: [PATCH 100/124] Changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e39251ed4..fd47031a83 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,7 +14,7 @@ * Check that PR from patch or dev branch is acceptable by linting * Made code compatible with Python 3.7 * The `download` command now also fetches institutional configs from nf-core/configs -* The `download` command can now compress files into a single archive +* The `download` command can now compress files into a single archive. ### Syncing From 406e6fd57bf5771c9b06ff77454ba9a65eaff2e8 Mon Sep 17 00:00:00 2001 From: Alexander Peltzer Date: Sun, 6 Oct 2019 14:00:29 +0200 Subject: [PATCH 101/124] Add Patch testing to main travis.yml --- .travis.yml | 2 +- CHANGELOG.md | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e3e0e50a5c..70d4a9547d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,7 +8,7 @@ python: - '3.7' before_install: # PRs to master are only ok if coming from dev branch - - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && [ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ])' + - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $TRAVIS_BRANCH != "master" ] || ([ $TRAVIS_PULL_REQUEST_SLUG = $TRAVIS_REPO_SLUG ] && ([ $TRAVIS_PULL_REQUEST_BRANCH = "dev" ] || [ $TRAVIS_PULL_REQUEST_BRANCH = "patch" ]))' # Check that the changelog has been updated if this is a PR - git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" && git fetch - '[ $TRAVIS_PULL_REQUEST = "false" ] || [ $(git diff --name-only HEAD origin/${TRAVIS_BRANCH} | grep CHANGELOG.md | wc -l) -ge 1 ]' diff --git a/CHANGELOG.md b/CHANGELOG.md index 0b7f3bde92..01807c9fcb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -57,6 +57,7 @@ * Bump `conda` to 4.6.14 in base nf-core Dockerfile * Added a Code of Conduct to nf-core/tools, as only the template had this before +* TravisCI tests will now also start for PRs from `patch` branches, [to allow fixing critical issues](https://github.com/nf-core/tools/pull/392) without making a new major release ## v1.6 From 466797e9095e10a88a53aa07b8adf05e027e6d88 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Sun, 6 Oct 2019 22:24:13 +0100 Subject: [PATCH 102/124] Update fasta logic --- .../{{cookiecutter.name_noslash}}/main.nf | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index 8c741436b2..ed568954bd 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -63,17 +63,14 @@ if (params.genomes && params.genome && !params.genomes.containsKey(params.genome // TODO nf-core: Add any reference files that are needed // Configurable reference genomes -fasta = params.genome ? params.genomes[ params.genome ].fasta ?: false : false -if ( params.fasta ){ - fasta = file(params.fasta) - if( !fasta.exists() ) exit 1, "Fasta file not found: ${params.fasta}" -} // // NOTE - THIS IS NOT USED IN THIS PIPELINE, EXAMPLE ONLY -// If you want to use the above in a process, define the following: +// If you want to use the channel below in a process, define the following: // input: -// file fasta from fasta +// file fasta from ch_fasta // +params.fasta = params.genome ? params.genomes[ params.genome ].fasta ?: false : false +if (params.fasta) { ch_fasta = file(params.fasta, checkIfExists: true) } // Has the run name been specified by the user? From 6fa014a9a082ee7972aa5cb78942296a109d4445 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Sun, 6 Oct 2019 22:24:38 +0100 Subject: [PATCH 103/124] Fix tpyo --- nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index ed568954bd..721168115b 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -50,7 +50,7 @@ def helpMessage() { * SET UP CONFIGURATION VARIABLES */ -// Show help emssage +// Show help message if (params.help){ helpMessage() exit 0 From 7b2efd3d3f7bf86e1a99fefb763e8385207d9cab Mon Sep 17 00:00:00 2001 From: drpatelh Date: Sun, 6 Oct 2019 22:25:26 +0100 Subject: [PATCH 104/124] Initialise genome --- .../{{cookiecutter.name_noslash}}/nextflow.config | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config index d45c9f64f4..47f3ff7360 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config @@ -10,6 +10,7 @@ params { // Workflow flags // TODO nf-core: Specify your pipeline's command line flags + genome = false reads = "data/*{1,2}.fastq.gz" singleEnd = false outdir = './results' From 78ebc1f1660939f9d43c8efaf5660c4eb3072759 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Sun, 6 Oct 2019 22:27:20 +0100 Subject: [PATCH 105/124] Add checkIfExists --- .../pipeline-template/{{cookiecutter.name_noslash}}/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index 721168115b..f2e09f94ca 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -92,8 +92,8 @@ if( workflow.profile == 'awsbatch') { } // Stage config files -ch_multiqc_config = Channel.fromPath(params.multiqc_config) -ch_output_docs = Channel.fromPath("$baseDir/docs/output.md") +ch_multiqc_config = file(params.multiqc_config, checkIfExists: true) +ch_output_docs = file("$baseDir/docs/output.md", checkIfExists: true) /* * Create a channel for input read files From df4507bd13bfc74d68bf6d271d673decbefa3c09 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Sun, 6 Oct 2019 22:40:14 +0100 Subject: [PATCH 106/124] Adjust spacing and if statements --- .../{{cookiecutter.name_noslash}}/main.nf | 83 ++++++++----------- 1 file changed, 35 insertions(+), 48 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index f2e09f94ca..6548d805fc 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -9,7 +9,6 @@ ---------------------------------------------------------------------------------------- */ - def helpMessage() { // TODO nf-core: Add to this help message with new command line parameters log.info nfcoreHeader() @@ -46,16 +45,16 @@ def helpMessage() { """.stripIndent() } -/* - * SET UP CONFIGURATION VARIABLES - */ - // Show help message -if (params.help){ +if (params.help) { helpMessage() exit 0 } +/* + * SET UP CONFIGURATION VARIABLES + */ + // Check if genome exists in the config file if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) { exit 1, "The provided genome '${params.genome}' is not available in the iGenomes file. Currently the available genomes are ${params.genomes.keySet().join(", ")}" @@ -72,16 +71,14 @@ if (params.genomes && params.genome && !params.genomes.containsKey(params.genome params.fasta = params.genome ? params.genomes[ params.genome ].fasta ?: false : false if (params.fasta) { ch_fasta = file(params.fasta, checkIfExists: true) } - // Has the run name been specified by the user? // this has the bonus effect of catching both -name and --name custom_runName = params.name -if( !(workflow.runName ==~ /[a-z]+_[a-z]+/) ){ +if (!(workflow.runName ==~ /[a-z]+_[a-z]+/)) { custom_runName = workflow.runName } - -if( workflow.profile == 'awsbatch') { +if ( workflow.profile == 'awsbatch') { // AWSBatch sanity checking if (!params.awsqueue || !params.awsregion) exit 1, "Specify correct --awsqueue and --awsregion parameters on AWSBatch!" // Check outdir paths to be S3 buckets if running on AWSBatch @@ -98,17 +95,17 @@ ch_output_docs = file("$baseDir/docs/output.md", checkIfExists: true) /* * Create a channel for input read files */ -if(params.readPaths){ - if(params.singleEnd){ +if (params.readPaths) { + if (params.singleEnd) { Channel .from(params.readPaths) - .map { row -> [ row[0], [file(row[1][0])]] } + .map { row -> [ row[0], [ file(row[1][0], checkIfExists: true) ] ] } .ifEmpty { exit 1, "params.readPaths was empty - no input files supplied" } .into { read_files_fastqc; read_files_trimming } } else { Channel .from(params.readPaths) - .map { row -> [ row[0], [file(row[1][0]), file(row[1][1])]] } + .map { row -> [ row[0], [ file(row[1][0], checkIfExists: true), file(row[1][1], checkIfExists: true) ] ] } .ifEmpty { exit 1, "params.readPaths was empty - no input files supplied" } .into { read_files_fastqc; read_files_trimming } } @@ -119,32 +116,31 @@ if(params.readPaths){ .into { read_files_fastqc; read_files_trimming } } - // Header log info log.info nfcoreHeader() def summary = [:] -if(workflow.revision) summary['Pipeline Release'] = workflow.revision +if (workflow.revision) summary['Pipeline Release'] = workflow.revision summary['Run Name'] = custom_runName ?: workflow.runName // TODO nf-core: Report custom parameters here summary['Reads'] = params.reads summary['Fasta Ref'] = params.fasta summary['Data Type'] = params.singleEnd ? 'Single-End' : 'Paired-End' summary['Max Resources'] = "$params.max_memory memory, $params.max_cpus cpus, $params.max_time time per job" -if(workflow.containerEngine) summary['Container'] = "$workflow.containerEngine - $workflow.container" +if (workflow.containerEngine) summary['Container'] = "$workflow.containerEngine - $workflow.container" summary['Output dir'] = params.outdir summary['Launch dir'] = workflow.launchDir summary['Working dir'] = workflow.workDir summary['Script dir'] = workflow.projectDir summary['User'] = workflow.userName -if(workflow.profile == 'awsbatch'){ +if (workflow.profile == 'awsbatch') { summary['AWS Region'] = params.awsregion summary['AWS Queue'] = params.awsqueue } summary['Config Profile'] = workflow.profile -if(params.config_profile_description) summary['Config Description'] = params.config_profile_description -if(params.config_profile_contact) summary['Config Contact'] = params.config_profile_contact -if(params.config_profile_url) summary['Config URL'] = params.config_profile_url -if(params.email || params.email_on_fail) { +if (params.config_profile_description) summary['Config Description'] = params.config_profile_description +if (params.config_profile_contact) summary['Config Contact'] = params.config_profile_contact +if (params.config_profile_url) summary['Config URL'] = params.config_profile_url +if (params.email || params.email_on_fail) { summary['E-mail Address'] = params.email summary['E-mail on failure'] = params.email_on_fail summary['MultiQC maxsize'] = params.maxMultiqcEmailFileSize @@ -172,16 +168,15 @@ ${summary.collect { k,v -> "
$k
${v ?: ' - if (filename.indexOf(".csv") > 0) filename - else null - } + saveAs: { filename -> + if (filename.indexOf(".csv") > 0) filename + else null + } output: file 'software_versions_mqc.yaml' into software_versions_yaml @@ -198,15 +193,13 @@ process get_software_versions { """ } - - /* * STEP 1 - FastQC */ process fastqc { tag "$name" publishDir "${params.outdir}/fastqc", mode: 'copy', - saveAs: {filename -> filename.indexOf(".zip") > 0 ? "zips/$filename" : "$filename"} + saveAs: { filename -> filename.indexOf(".zip") > 0 ? "zips/$filename" : "$filename" } input: set val(name), file(reads) from read_files_fastqc @@ -220,8 +213,6 @@ process fastqc { """ } - - /* * STEP 2 - MultiQC */ @@ -249,8 +240,6 @@ process multiqc { """ } - - /* * STEP 3 - Output Description HTML */ @@ -269,8 +258,6 @@ process output_documentation { """ } - - /* * Completion e-mail notification */ @@ -278,7 +265,7 @@ workflow.onComplete { // Set up the e-mail variables def subject = "[{{ cookiecutter.name }}] Successful: $workflow.runName" - if(!workflow.success){ + if (!workflow.success) { subject = "[{{ cookiecutter.name }}] FAILED: $workflow.runName" } def email_fields = [:] @@ -297,10 +284,10 @@ workflow.onComplete { email_fields['summary']['Date Completed'] = workflow.complete email_fields['summary']['Pipeline script file path'] = workflow.scriptFile email_fields['summary']['Pipeline script hash ID'] = workflow.scriptId - if(workflow.repository) email_fields['summary']['Pipeline repository Git URL'] = workflow.repository - if(workflow.commitId) email_fields['summary']['Pipeline repository Git Commit'] = workflow.commitId - if(workflow.revision) email_fields['summary']['Pipeline Git branch/tag'] = workflow.revision - if(workflow.container) email_fields['summary']['Docker image'] = workflow.container + if (workflow.repository) email_fields['summary']['Pipeline repository Git URL'] = workflow.repository + if (workflow.commitId) email_fields['summary']['Pipeline repository Git Commit'] = workflow.commitId + if (workflow.revision) email_fields['summary']['Pipeline Git branch/tag'] = workflow.revision + if (workflow.container) email_fields['summary']['Docker image'] = workflow.container email_fields['summary']['Nextflow Version'] = workflow.nextflow.version email_fields['summary']['Nextflow Build'] = workflow.nextflow.build email_fields['summary']['Nextflow Compile Timestamp'] = workflow.nextflow.timestamp @@ -311,7 +298,7 @@ workflow.onComplete { try { if (workflow.success) { mqc_report = multiqc_report.getVal() - if (mqc_report.getClass() == ArrayList){ + if (mqc_report.getClass() == ArrayList) { log.warn "[{{ cookiecutter.name }}] Found multiple reports from process 'multiqc', will use only one" mqc_report = mqc_report[0] } @@ -322,7 +309,7 @@ workflow.onComplete { // Check if we are only sending emails on failure email_address = params.email - if(!params.email && params.email_on_fail && !workflow.success){ + if (!params.email && params.email_on_fail && !workflow.success) { email_address = params.email_on_fail } @@ -346,7 +333,7 @@ workflow.onComplete { // Send the HTML e-mail if (email_address) { try { - if( params.plaintext_email ){ throw GroovyException('Send plaintext e-mail, not HTML') } + if ( params.plaintext_email ){ throw GroovyException('Send plaintext e-mail, not HTML') } // Try to send HTML e-mail using sendmail [ 'sendmail', '-t' ].execute() << sendmail_html log.info "[{{ cookiecutter.name }}] Sent summary e-mail to $email_address (sendmail)" @@ -359,7 +346,7 @@ workflow.onComplete { // Write summary e-mail HTML to a file def output_d = file( "${params.outdir}/pipeline_info/" ) - if( !output_d.exists() ) { + if (!output_d.exists()) { output_d.mkdirs() } def output_hf = file( output_d, "pipeline_report.html" ) @@ -378,7 +365,7 @@ workflow.onComplete { log.info "${c_green}Number of successfully ran process(es) : ${workflow.stats.succeedCount} ${c_reset}" } - if(workflow.success){ + if (workflow.success) { log.info "${c_purple}[{{ cookiecutter.name }}]${c_green} Pipeline completed successfully${c_reset}" } else { checkHostname() @@ -416,11 +403,11 @@ def checkHostname(){ def c_white = params.monochrome_logs ? '' : "\033[0;37m" def c_red = params.monochrome_logs ? '' : "\033[1;91m" def c_yellow_bold = params.monochrome_logs ? '' : "\033[1;93m" - if(params.hostnames){ + if (params.hostnames) { def hostname = "hostname".execute().text.trim() params.hostnames.each { prof, hnames -> hnames.each { hname -> - if(hostname.contains(hname) && !workflow.profile.contains(prof)){ + if (hostname.contains(hname) && !workflow.profile.contains(prof)) { log.error "====================================================\n" + " ${c_red}WARNING!${c_reset} You are running with `-profile $workflow.profile`\n" + " but your machine hostname is ${c_white}'$hostname'${c_reset}\n" + From 89967f522cdfd37ccd67bcd43858bccb3810be58 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Sun, 6 Oct 2019 22:51:17 +0100 Subject: [PATCH 107/124] Add spaces for if statements --- .../{{cookiecutter.name_noslash}}/nextflow.config | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config index 47f3ff7360..6ce293dab4 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/nextflow.config @@ -66,7 +66,7 @@ profiles { docker.runOptions = '-u \$(id -u):\$(id -g)' // Load igenomes.config if required -if(!params.igenomesIgnore){ +if (!params.igenomesIgnore) { includeConfig 'conf/igenomes.config' } @@ -103,9 +103,9 @@ manifest { // Function to ensure that resource requirements don't go beyond // a maximum limit def check_max(obj, type) { - if(type == 'memory'){ + if (type == 'memory') { try { - if(obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) + if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) return params.max_memory as nextflow.util.MemoryUnit else return obj @@ -113,9 +113,9 @@ def check_max(obj, type) { println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" return obj } - } else if(type == 'time'){ + } else if (type == 'time') { try { - if(obj.compareTo(params.max_time as nextflow.util.Duration) == 1) + if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) return params.max_time as nextflow.util.Duration else return obj @@ -123,7 +123,7 @@ def check_max(obj, type) { println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" return obj } - } else if(type == 'cpus'){ + } else if (type == 'cpus') { try { return Math.min( obj, params.max_cpus as int ) } catch (all) { From 5a375f0b7a4a75411806f0ddf731b5d9ac577472 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Sun, 6 Oct 2019 22:52:26 +0100 Subject: [PATCH 108/124] Add space --- .../{{cookiecutter.name_noslash}}/conf/test.config | 1 + 1 file changed, 1 insertion(+) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/test.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/test.config index 3b221bb358..9ce6312ae8 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/test.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/test.config @@ -14,6 +14,7 @@ params { max_cpus = 2 max_memory = 6.GB max_time = 48.h + // Input data // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets // TODO nf-core: Give any required params for the test so that command line flags are not needed From 2b6ce74296bfd6f27a31b30ab0cabfdb23596a99 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Sun, 6 Oct 2019 22:55:56 +0100 Subject: [PATCH 109/124] Add file existence check --- .../bin/scrape_software_versions.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py index b5dd4d62f5..a50d40267b 100755 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py @@ -1,7 +1,8 @@ #!/usr/bin/env python +import os +import re from __future__ import print_function from collections import OrderedDict -import re # TODO nf-core: Add additional regexes for new tools in process get_software_versions regexes = { @@ -18,11 +19,14 @@ # Search each file using its regex for k, v in regexes.items(): - with open(v[0]) as x: - versions = x.read() - match = re.search(v[1], versions) - if match: - results[k] = "v{}".format(match.group(1)) + if os.path.exists(v[0]): + with open(v[0]) as x: + versions = x.read() + match = re.search(v[1], versions) + if match: + results[k] = "v{}".format(match.group(1)) + else: + results[k] = False # Remove software set to false in results for k in results: From 4efe858e4d44c23614a451d5f1288c5f5ed9f01a Mon Sep 17 00:00:00 2001 From: drpatelh Date: Sun, 6 Oct 2019 23:01:42 +0100 Subject: [PATCH 110/124] Update CHANGELOG --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01807c9fcb..7f8234361e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -52,6 +52,9 @@ * Plain-text email template now has nf-core ASCII artwork * Template configured to use logo fetched from website * New option `--email_on_fail` which only sends emails if the workflow is not successful +* Add file existence check when checking software versions +* Use `checkIfExists` when initialising `file` objects +* Consistent spacing for `if` statements ### Other From 97b84d70b9a3611aeedc04d272c60afa2da3707e Mon Sep 17 00:00:00 2001 From: drpatelh Date: Sun, 6 Oct 2019 23:13:21 +0100 Subject: [PATCH 111/124] Reorder imports --- .../bin/scrape_software_versions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py index a50d40267b..573e47d934 100755 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py @@ -1,8 +1,8 @@ #!/usr/bin/env python -import os -import re from __future__ import print_function from collections import OrderedDict +import os +import re # TODO nf-core: Add additional regexes for new tools in process get_software_versions regexes = { From 7635cd32ce6646884d36d587addd9bf415cdf8ed Mon Sep 17 00:00:00 2001 From: drpatelh Date: Sun, 6 Oct 2019 23:40:46 +0100 Subject: [PATCH 112/124] Change file to new File --- .../pipeline-template/{{cookiecutter.name_noslash}}/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index 6548d805fc..f531e751aa 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -345,13 +345,13 @@ workflow.onComplete { } // Write summary e-mail HTML to a file - def output_d = file( "${params.outdir}/pipeline_info/" ) + def output_d = new File( "${params.outdir}/pipeline_info/" ) if (!output_d.exists()) { output_d.mkdirs() } - def output_hf = file( output_d, "pipeline_report.html" ) + def output_hf = new File( output_d, "pipeline_report.html" ) output_hf.withWriter { w -> w << email_html } - def output_tf = file( output_d, "pipeline_report.txt" ) + def output_tf = new File( output_d, "pipeline_report.txt" ) output_tf.withWriter { w -> w << email_txt } c_reset = params.monochrome_logs ? '' : "\033[0m"; From 92096d823c3a2edc09f3b0bafd01ded9966ae706 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 7 Oct 2019 00:02:50 +0100 Subject: [PATCH 113/124] Escape the spacing --- .../assets/email_template.txt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.txt b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.txt index 3368a5f145..a2190e361a 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.txt +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/assets/email_template.txt @@ -1,9 +1,9 @@ ---------------------------------------------------- - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' + ,--./,-. + ___ __ __ __ ___ /,-._.--~\\ + |\\ | |__ __ / ` / \\ |__) |__ } { + | \\| | \\__, \\__/ | \\ |___ \\`-._,-`-, + `._,._,' {{ cookiecutter.name }} v${version} ---------------------------------------------------- From 769a145f32b4d69705a6600c32ad435983b4f911 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 7 Oct 2019 00:11:56 +0100 Subject: [PATCH 114/124] Adjust spacing --- .../{{cookiecutter.name_noslash}}/main.nf | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index f531e751aa..b8d29b8e8b 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -133,17 +133,17 @@ summary['Working dir'] = workflow.workDir summary['Script dir'] = workflow.projectDir summary['User'] = workflow.userName if (workflow.profile == 'awsbatch') { - summary['AWS Region'] = params.awsregion - summary['AWS Queue'] = params.awsqueue + summary['AWS Region'] = params.awsregion + summary['AWS Queue'] = params.awsqueue } summary['Config Profile'] = workflow.profile if (params.config_profile_description) summary['Config Description'] = params.config_profile_description if (params.config_profile_contact) summary['Config Contact'] = params.config_profile_contact if (params.config_profile_url) summary['Config URL'] = params.config_profile_url if (params.email || params.email_on_fail) { - summary['E-mail Address'] = params.email - summary['E-mail on failure'] = params.email_on_fail - summary['MultiQC maxsize'] = params.maxMultiqcEmailFileSize + summary['E-mail Address'] = params.email + summary['E-mail on failure'] = params.email_on_fail + summary['MultiQC maxsize'] = params.maxMultiqcEmailFileSize } log.info summary.collect { k,v -> "${k.padRight(18)}: $v" }.join("\n") log.info "\033[2m----------------------------------------------------\033[0m" From bb711db5aefaf471c5f5dc58bda4070a6c4e7038 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 7 Oct 2019 07:46:11 +0200 Subject: [PATCH 115/124] Template Dockerfile: use tagged nfcore/base Closes nf-core/tools#335 --- CHANGELOG.md | 1 + nf_core/create.py | 3 ++- nf_core/pipeline-template/cookiecutter.json | 3 ++- .../pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile | 2 +- 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01807c9fcb..5da4ed397b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -39,6 +39,7 @@ * Add proper `nf-core` logo for tools * Add `Quick Start` section to main README of template * Fix [Docker RunOptions](https://github.com/nf-core/tools/pull/351) to get UID and GID set in the template +* `Dockerfile` now specifically uses the proper release tag of the nfcore/base image * Use [`file`](https://github.com/nf-core/tools/pull/354) instead of `new File` to avoid weird behavior such as making an `s3:/` directory locally when using an AWS S3 bucket as the `--outdir`. diff --git a/nf_core/create.py b/nf_core/create.py index 67a6610652..5b0802eb85 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -81,7 +81,8 @@ def run_cookiecutter(self): 'author': self.author, 'name_noslash': self.name_noslash, 'name_docker': self.name_docker, - 'version': self.new_version + 'version': self.new_version, + 'nf_core_version': nf_core.__version__ }, no_input = True, overwrite_if_exists = self.force, diff --git a/nf_core/pipeline-template/cookiecutter.json b/nf_core/pipeline-template/cookiecutter.json index d9428c8819..dfc2f668a0 100644 --- a/nf_core/pipeline-template/cookiecutter.json +++ b/nf_core/pipeline-template/cookiecutter.json @@ -4,5 +4,6 @@ "author": "Rocky Balboa", "name_noslash": "{{ cookiecutter.name.replace('/', '-') }}", "name_docker": "{{ cookiecutter.name_docker }}", - "version": "1.0dev" + "version": "1.0dev", + "nf_core_version": "{{ cookiecutter.nf_core_version }}" } diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile index 9f64ab4c2f..f57f757dae 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/Dockerfile @@ -1,4 +1,4 @@ -FROM nfcore/base +FROM nfcore/base:{{ 'dev' if 'dev' in cookiecutter.nf_core_version else cookiecutter.nf_core_version }} LABEL authors="{{ cookiecutter.author }}" \ description="Docker image containing all requirements for {{ cookiecutter.name }} pipeline" From e6d78367a9adde14685e9923512b51f7ab1a8d80 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 7 Oct 2019 08:00:58 +0200 Subject: [PATCH 116/124] Template: Fix ANSI codes in header Closes nf-core/tools#329 --- CHANGELOG.md | 1 + .../pipeline-template/{{cookiecutter.name_noslash}}/main.nf | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01807c9fcb..16b7eb2296 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -34,6 +34,7 @@ * Add new code for Travis CI to allow PRs from patch branches too * Fix small typo in central readme of tools for future releases * Small code polishing + typo fix in the template main.nf file +* Header ANSI codes no longer print `[2m` to console when using `-with-ansi` * Switched to yaml.safe_load() to fix PyYAML warning that was thrown because of a possible [exploit](https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation) * Add `nf-core` citation * Add proper `nf-core` logo for tools diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index 8c741436b2..8f6fce61a3 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -153,7 +153,7 @@ if(params.email || params.email_on_fail) { summary['MultiQC maxsize'] = params.maxMultiqcEmailFileSize } log.info summary.collect { k,v -> "${k.padRight(18)}: $v" }.join("\n") -log.info "\033[2m----------------------------------------------------\033[0m" +log.info "-\033[2m--------------------------------------------------\033[0m-" // Check the hostnames against configured profiles checkHostname() @@ -403,14 +403,14 @@ def nfcoreHeader(){ c_cyan = params.monochrome_logs ? '' : "\033[0;36m"; c_white = params.monochrome_logs ? '' : "\033[0;37m"; - return """ ${c_dim}----------------------------------------------------${c_reset} + return """ -${c_dim}--------------------------------------------------${c_reset}- ${c_green},--.${c_black}/${c_green},-.${c_reset} ${c_blue} ___ __ __ __ ___ ${c_green}/,-._.--~\'${c_reset} ${c_blue} |\\ | |__ __ / ` / \\ |__) |__ ${c_yellow}} {${c_reset} ${c_blue} | \\| | \\__, \\__/ | \\ |___ ${c_green}\\`-._,-`-,${c_reset} ${c_green}`._,._,\'${c_reset} ${c_purple} {{ cookiecutter.name }} v${workflow.manifest.version}${c_reset} - ${c_dim}----------------------------------------------------${c_reset} + -${c_dim}--------------------------------------------------${c_reset}- """.stripIndent() } From 25348c3be204af52ea3c0740b0df2384c6b2f8f5 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 7 Oct 2019 08:15:22 +0200 Subject: [PATCH 117/124] Linting: Dockerfile FROM should include tag --- docs/lint_errors.md | 5 ++++- nf_core/lint.py | 2 +- tests/lint_examples/minimal_working_example/Dockerfile | 2 +- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/lint_errors.md b/docs/lint_errors.md index 1e9cdc55f3..9d80f54b15 100644 --- a/docs/lint_errors.md +++ b/docs/lint_errors.md @@ -195,7 +195,7 @@ If a workflow has a conda `environment.yml` file (see above), the `Dockerfile` s to create the container. Such `Dockerfile`s can usually be very short, eg: ```Dockerfile -FROM nfcore/base +FROM nfcore/base:1.7 LABEL authors="your@email.com" \ description="Container image containing all requirements for nf-core/EXAMPLE pipeline" @@ -208,6 +208,9 @@ that the above template is used. Failures are generated if the `FROM`, `COPY` and `RUN` statements above are not present. These lines must be an exact copy of the above example. +Note that the base `nfcore/base` image should be tagged to the most recent release. +The linting tool compares the tag against the currently installed version. + Additional lines and different metadata can be added without causing the test to fail. ## Error #10 - Template TODO statement found ## {#10} diff --git a/nf_core/lint.py b/nf_core/lint.py index ed8a5e66b3..5b65e5688c 100755 --- a/nf_core/lint.py +++ b/nf_core/lint.py @@ -756,7 +756,7 @@ def check_conda_dockerfile(self): return expected_strings = [ - 'FROM nfcore/base', + "FROM nfcore/base:{}".format('dev' if 'dev' in nf_core.__version__ else nf_core.__version__), 'COPY environment.yml /', 'RUN conda env create -f /environment.yml && conda clean -a', 'ENV PATH /opt/conda/envs/{}/bin:$PATH'.format(self.conda_config['name']) diff --git a/tests/lint_examples/minimal_working_example/Dockerfile b/tests/lint_examples/minimal_working_example/Dockerfile index 8029a1f8ae..fc562dc1d7 100644 --- a/tests/lint_examples/minimal_working_example/Dockerfile +++ b/tests/lint_examples/minimal_working_example/Dockerfile @@ -1,4 +1,4 @@ -FROM nfcore/base +FROM nfcore/base:dev MAINTAINER Phil Ewels LABEL authors="phil.ewels@scilifelab.se" \ description="Docker image containing all requirements for the nf-core tools pipeline" From fad147413f3291a8859b28fb78fd44fa116d47ad Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 7 Oct 2019 09:42:49 +0100 Subject: [PATCH 118/124] Add default labels --- .../conf/base.config | 26 ++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config index 47f3e530c7..3c5aac4633 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/conf/base.config @@ -13,7 +13,7 @@ process { // TODO nf-core: Check the defaults for all processes cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 8.GB * task.attempt, 'memory' ) } + memory = { check_max( 7.GB * task.attempt, 'memory' ) } time = { check_max( 4.h * task.attempt, 'time' ) } errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' } @@ -21,8 +21,32 @@ process { maxErrors = '-1' // Process-specific resource requirements + // NOTE - Only one of the labels below are used in the fastqc process in the main script. + // If possible, it would be nice to keep the same label naming convention when + // adding in your processes. // TODO nf-core: Customise requirements for specific processes. // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors + withLabel:process_low { + cpus = { check_max( 2 * task.attempt, 'cpus' ) } + memory = { check_max( 14.GB * task.attempt, 'memory' ) } + time = { check_max( 6.h * task.attempt, 'time' ) } + } + withLabel:process_medium { + cpus = { check_max( 6 * task.attempt, 'cpus' ) } + memory = { check_max( 42.GB * task.attempt, 'memory' ) } + time = { check_max( 8.h * task.attempt, 'time' ) } + } + withLabel:process_high { + cpus = { check_max( 12 * task.attempt, 'cpus' ) } + memory = { check_max( 84.GB * task.attempt, 'memory' ) } + time = { check_max( 10.h * task.attempt, 'time' ) } + } + withLabel:process_long { + time = { check_max( 20.h * task.attempt, 'time' ) } + } + withName:get_software_versions { + cache = false + } } params { From 32c563719b9b1dbd565531a7c8639753555c3080 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 7 Oct 2019 09:43:15 +0100 Subject: [PATCH 119/124] Add label and threads to fastqc --- .../pipeline-template/{{cookiecutter.name_noslash}}/main.nf | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index b8d29b8e8b..d0b44304f2 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -198,6 +198,7 @@ process get_software_versions { */ process fastqc { tag "$name" + label 'process_medium' publishDir "${params.outdir}/fastqc", mode: 'copy', saveAs: { filename -> filename.indexOf(".zip") > 0 ? "zips/$filename" : "$filename" } @@ -209,7 +210,7 @@ process fastqc { script: """ - fastqc -q $reads + fastqc --quiet --threads $task.cpus $reads """ } From 3f74ff612d3e5c6b073c377581360cc35e617687 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 7 Oct 2019 10:10:50 +0100 Subject: [PATCH 120/124] Add label to fastqc --- nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf index d0b44304f2..5864999455 100644 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/main.nf @@ -210,7 +210,7 @@ process fastqc { script: """ - fastqc --quiet --threads $task.cpus $reads + fastqc --quiet --threads $task.cpus $reads """ } From 4f590d98faec030ccdd71bf9881f070ec33097ac Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 7 Oct 2019 10:21:50 +0100 Subject: [PATCH 121/124] Use try..except instead --- .../bin/scrape_software_versions.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py index 573e47d934..c9d2361223 100755 --- a/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py +++ b/nf_core/pipeline-template/{{cookiecutter.name_noslash}}/bin/scrape_software_versions.py @@ -1,7 +1,6 @@ #!/usr/bin/env python from __future__ import print_function from collections import OrderedDict -import os import re # TODO nf-core: Add additional regexes for new tools in process get_software_versions @@ -19,13 +18,13 @@ # Search each file using its regex for k, v in regexes.items(): - if os.path.exists(v[0]): + try: with open(v[0]) as x: versions = x.read() match = re.search(v[1], versions) if match: results[k] = "v{}".format(match.group(1)) - else: + except IOError: results[k] = False # Remove software set to false in results From 33df368852f62774f0294a4f43c2fed87fdc75b8 Mon Sep 17 00:00:00 2001 From: drpatelh Date: Mon, 7 Oct 2019 10:25:43 +0100 Subject: [PATCH 122/124] Update CHANGELOG --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7f8234361e..45879b129e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -53,8 +53,9 @@ * Template configured to use logo fetched from website * New option `--email_on_fail` which only sends emails if the workflow is not successful * Add file existence check when checking software versions -* Use `checkIfExists` when initialising `file` objects +* Fixed issue [https://github.com/nf-core/tools/issues/165] - Use `checkIfExists` * Consistent spacing for `if` statements +* Add sensible resource labels to `base.config` ### Other From fb33947d2e4851d63e03dfbec1f2600a5ac8c9c3 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 7 Oct 2019 15:14:05 +0200 Subject: [PATCH 123/124] 1.7 release --- CHANGELOG.md | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0bd8cda70b..cf38320a15 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # nf-core/tools: Changelog -## v1.7dev +## v1.7 ### PyPI package description diff --git a/setup.py b/setup.py index e9490e3179..f67bfab2ad 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import setup, find_packages import sys -version = '1.7dev' +version = '1.7' with open('README.md') as f: readme = f.read() From 2bc77032ab4c1aa6fdfaa01d3301c489a8a515b8 Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Mon, 7 Oct 2019 15:34:29 +0200 Subject: [PATCH 124/124] Bump version number in minimal example for tests --- tests/lint_examples/minimal_working_example/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/lint_examples/minimal_working_example/Dockerfile b/tests/lint_examples/minimal_working_example/Dockerfile index fc562dc1d7..bcb5d79cd2 100644 --- a/tests/lint_examples/minimal_working_example/Dockerfile +++ b/tests/lint_examples/minimal_working_example/Dockerfile @@ -1,4 +1,4 @@ -FROM nfcore/base:dev +FROM nfcore/base:1.7 MAINTAINER Phil Ewels LABEL authors="phil.ewels@scilifelab.se" \ description="Docker image containing all requirements for the nf-core tools pipeline"