├── .devcontainer └── devcontainer.json ├── .editorconfig ├── .gitattributes ├── .github ├── .dockstore.yml ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ └── feature_request.yml ├── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── awsfulltest.yml │ ├── awstest.yml │ ├── branch.yml │ ├── ci.yml │ ├── clean-up.yml │ ├── download_pipeline.yml │ ├── fix-linting.yml │ ├── linting.yml │ ├── linting_comment.yml │ └── release-announcements.yml ├── .gitignore ├── .gitpod.yml ├── .nf-core.yml ├── .pre-commit-config.yaml ├── .prettierignore ├── .prettierrc.yml ├── CHANGELOG.md ├── CITATIONS.md ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── assets ├── adaptivecard.json ├── bamtools_filter_pe.json ├── bamtools_filter_se.json ├── blacklists │ ├── v1.0 │ │ ├── GRCh37-blacklist.v1.bed │ │ └── hg19-blacklist.v1.bed │ ├── v2.0 │ │ ├── GRCm38-blacklist.v2.bed │ │ ├── ce10-blacklist.v2.bed │ │ ├── ce11-blacklist.v2.bed │ │ ├── dm3-blacklist.v2.bed │ │ ├── dm6-blacklist.v2.bed │ │ ├── hg19-blacklist.v2.bed │ │ ├── hg38-blacklist.v2.bed │ │ └── mm10-blacklist.v2.bed │ └── v3.0 │ │ ├── GRCh38-blacklist.v3.bed │ │ └── hg38-blacklist.v3.bed ├── email_template.html ├── email_template.txt ├── methods_description_template.yml ├── multiqc │ ├── deseq2_clustering_header.txt │ ├── deseq2_pca_header.txt │ ├── frip_score_header.txt │ ├── peak_annotation_header.txt │ ├── peak_count_header.txt │ ├── spp_correlation_header.txt │ ├── spp_nsc_header.txt │ └── spp_rsc_header.txt ├── multiqc_config.yml ├── nf-core-chipseq_logo_light.png ├── samplesheet_pe.csv ├── samplesheet_se.csv ├── schema_input.json ├── sendmail_template.txt └── slackreport.json ├── bin ├── bampe_rm_orphan.py ├── check_samplesheet.py ├── deseq2_qc.r ├── gtf2bed ├── igv_files_to_session.py ├── macs3_merged_expand.py ├── plot_homer_annotatepeaks.r ├── plot_macs3_qc.r └── plot_peak_intersect.r ├── conf ├── base.config ├── igenomes.config ├── modules.config ├── test.config └── test_full.config ├── docs ├── README.md ├── images │ ├── igv_screenshot.png │ ├── mqc_annotatePeaks_feature_percentage_plot.png │ ├── mqc_cutadapt_plot.png │ ├── mqc_deeptools_plotFingerprint_plot.png │ ├── mqc_deeptools_plotProfile_plot.png │ ├── mqc_deseq2_pca_plot.png │ ├── mqc_deseq2_sample_similarity_plot.png │ ├── mqc_fastqc_adapter.png │ ├── mqc_fastqc_counts.png │ ├── mqc_fastqc_quality.png │ ├── mqc_featureCounts_assignment_plot.png │ ├── mqc_frip_score_plot.png │ ├── mqc_macs3_peak_count_plot.png │ ├── mqc_picard_deduplication_plot.png │ ├── mqc_picard_insert_size_plot.png │ ├── mqc_preseq_plot.png │ ├── mqc_samtools_stats_plot.png │ ├── mqc_spp_nsc_plot.png │ ├── mqc_spp_rsc_plot.png │ ├── mqc_spp_strand_correlation_plot.png │ ├── nf-core-chipseq_logo_dark.png │ ├── nf-core-chipseq_logo_light.png │ ├── nf-core-chipseq_metro_map_grey.png │ ├── nf-core-chipseq_metro_map_grey.svg │ └── r_upsetr_intersect_plot.png ├── output.md └── usage.md ├── main.nf ├── modules.json ├── modules ├── local │ ├── annotate_boolean_peaks.nf │ ├── bam_remove_orphans.nf │ ├── bamtools_filter.nf │ ├── bedtools_genomecov.nf │ ├── deseq2_qc.nf │ ├── frip_score.nf │ ├── genome_blacklist_regions.nf │ ├── gtf2bed.nf │ ├── igv.nf │ ├── macs3_consensus.nf │ ├── multiqc.nf │ ├── multiqc_custom_peaks.nf │ ├── multiqc_custom_phantompeakqualtools.nf │ ├── plot_homer_annotatepeaks.nf │ ├── plot_macs3_qc.nf │ ├── samplesheet_check.nf │ ├── star_align.nf │ └── star_genomegenerate.nf └── nf-core │ ├── bowtie2 │ ├── align │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── cram_crai.config │ │ │ ├── large_index.config │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ ├── sam.config │ │ │ ├── sam2.config │ │ │ └── tags.yml │ └── build │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── bwa │ ├── index │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── tags.yml │ └── mem │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── chromap │ ├── chromap │ │ ├── chromap-chromap.diff │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ ├── nextflow.config │ │ │ └── tags.yml │ └── index │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── custom │ └── getchromsizes │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── deeptools │ ├── computematrix │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ ├── nextflow.config │ │ │ └── tags.yml │ ├── plotfingerprint │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── tags.yml │ ├── plotheatmap │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── tags.yml │ └── plotprofile │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── fastqc │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── gffread │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow-fasta.config │ │ ├── nextflow-gff3.config │ │ ├── nextflow.config │ │ └── tags.yml │ ├── gunzip │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── homer │ └── annotatepeaks │ │ ├── environment.yml │ │ ├── main.nf │ │ └── meta.yml │ ├── khmer │ └── uniquekmers │ │ ├── environment.yml │ │ ├── main.nf │ │ └── meta.yml │ ├── macs3 │ └── callpeak │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── bam.config │ │ ├── bed.config │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── multiqc │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── phantompeakqualtools │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── picard │ ├── collectmultiplemetrics │ │ ├── environment.yml │ │ ├── main.nf │ │ └── meta.yml │ ├── markduplicates │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ ├── nextflow.config │ │ │ └── tags.yml │ └── mergesamfiles │ │ ├── environment.yml │ │ ├── main.nf │ │ └── meta.yml │ ├── preseq │ └── lcextrap │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── samtools │ ├── flagstat │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── tags.yml │ ├── idxstats │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── tags.yml │ ├── index │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── csi.nextflow.config │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── tags.yml │ ├── sort │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ ├── nextflow.config │ │ │ ├── nextflow_cram.config │ │ │ └── tags.yml │ └── stats │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── subread │ └── featurecounts │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── trimgalore │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── ucsc │ └── bedgraphtobigwig │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── umitools │ └── extract │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── untar │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ └── untarfiles │ ├── environment.yml │ ├── main.nf │ └── meta.yml ├── nextflow.config ├── nextflow_schema.json ├── subworkflows ├── local │ ├── align_star.nf │ ├── bam_bedgraph_bigwig_bedtools_ucsc.nf │ ├── bam_filter_bamtools.nf │ ├── bam_peaks_call_qc_annotate_macs3_homer.nf │ ├── bed_consensus_quantify_qc_bedtools_featurecounts_deseq2.nf │ ├── input_check.nf │ ├── prepare_genome.nf │ └── utils_nfcore_chipseq_pipeline │ │ └── main.nf └── nf-core │ ├── bam_markduplicates_picard │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── bam_sort_stats_samtools │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── bam_stats_samtools │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── fastq_align_bowtie2 │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── fastq_align_bwa │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── fastq_align_chromap │ ├── main.nf │ └── meta.yml │ ├── fastq_fastqc_umitools_trimgalore │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── utils_nextflow_pipeline │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.function.nf.test │ │ ├── main.function.nf.test.snap │ │ ├── main.workflow.nf.test │ │ ├── nextflow.config │ │ └── tags.yml │ ├── utils_nfcore_pipeline │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.function.nf.test │ │ ├── main.function.nf.test.snap │ │ ├── main.workflow.nf.test │ │ ├── main.workflow.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ └── utils_nfvalidation_plugin │ ├── main.nf │ ├── meta.yml │ └── tests │ ├── main.nf.test │ ├── nextflow_schema.json │ └── tags.yml ├── tower.yml └── workflows └── chipseq.nf /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nfcore", 3 | "image": "nfcore/gitpod:latest", 4 | "remoteUser": "gitpod", 5 | "runArgs": ["--privileged"], 6 | 7 | // Configure tool-specific properties. 8 | "customizations": { 9 | // Configure properties specific to VS Code. 10 | "vscode": { 11 | // Set *default* container specific settings.json values on container create. 12 | "settings": { 13 | "python.defaultInterpreterPath": "/opt/conda/bin/python" 14 | }, 15 | 16 | // Add the IDs of extensions you want installed when the container is created. 17 | "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"] 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | end_of_line = lf 6 | insert_final_newline = true 7 | trim_trailing_whitespace = true 8 | indent_size = 4 9 | indent_style = space 10 | 11 | [*.{md,yml,yaml,html,css,scss,js}] 12 | indent_size = 2 13 | 14 | # These files are edited and tested upstream in nf-core/modules 15 | [/modules/nf-core/**] 16 | charset = unset 17 | end_of_line = unset 18 | insert_final_newline = unset 19 | trim_trailing_whitespace = unset 20 | indent_style = unset 21 | [/subworkflows/nf-core/**] 22 | charset = unset 23 | end_of_line = unset 24 | insert_final_newline = unset 25 | trim_trailing_whitespace = unset 26 | indent_style = unset 27 | 28 | [/assets/email*] 29 | indent_size = unset 30 | 31 | [/assets/blacklists/GRCh37-blacklist.bed] 32 | trim_trailing_whitespace = unset 33 | 34 | # ignore python and markdown 35 | [*.{py,md}] 36 | indent_style = unset 37 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.config linguist-language=nextflow 2 | *.nf.test linguist-language=nextflow 3 | modules/nf-core/** linguist-generated 4 | subworkflows/nf-core/** linguist-generated 5 | -------------------------------------------------------------------------------- /.github/.dockstore.yml: -------------------------------------------------------------------------------- 1 | # Dockstore config version, not pipeline version 2 | version: 1.2 3 | workflows: 4 | - subclass: nfl 5 | primaryDescriptorPath: /nextflow.config 6 | publish: True 7 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug report 2 | description: Report something that is broken or incorrect 3 | labels: bug 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Before you post this issue, please check the documentation: 9 | 10 | - [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) 11 | - [nf-core/chipseq pipeline documentation](https://nf-co.re/chipseq/usage) 12 | 13 | - type: textarea 14 | id: description 15 | attributes: 16 | label: Description of the bug 17 | description: A clear and concise description of what the bug is. 18 | validations: 19 | required: true 20 | 21 | - type: textarea 22 | id: command_used 23 | attributes: 24 | label: Command used and terminal output 25 | description: Steps to reproduce the behaviour. Please paste the command you used to launch the pipeline and the output from your terminal. 26 | render: console 27 | placeholder: | 28 | $ nextflow run ... 29 | 30 | Some output where something broke 31 | 32 | - type: textarea 33 | id: files 34 | attributes: 35 | label: Relevant files 36 | description: | 37 | Please drag and drop the relevant files here. Create a `.zip` archive if the extension is not allowed. 38 | Your verbose log file `.nextflow.log` is often useful _(this is a hidden file in the directory where you launched the pipeline)_ as well as custom Nextflow configuration files. 39 | 40 | - type: textarea 41 | id: system 42 | attributes: 43 | label: System information 44 | description: | 45 | * Nextflow version _(eg. 23.04.0)_ 46 | * Hardware _(eg. HPC, Desktop, Cloud)_ 47 | * Executor _(eg. slurm, local, awsbatch)_ 48 | * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter, Charliecloud, or Apptainer)_ 49 | * OS _(eg. CentOS Linux, macOS, Linux Mint)_ 50 | * Version of nf-core/chipseq _(eg. 1.1, 1.5, 1.8.2)_ 51 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | contact_links: 2 | - name: Join nf-core 3 | url: https://nf-co.re/join 4 | about: Please join the nf-core community here 5 | - name: "Slack #chipseq channel" 6 | url: https://nfcore.slack.com/channels/chipseq 7 | about: Discussion about the nf-core/chipseq pipeline 8 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature request 2 | description: Suggest an idea for the nf-core/chipseq pipeline 3 | labels: enhancement 4 | body: 5 | - type: textarea 6 | id: description 7 | attributes: 8 | label: Description of feature 9 | description: Please describe your suggestion for a new feature. It might help to describe a problem or use case, plus any alternatives that you have considered. 10 | validations: 11 | required: true 12 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 13 | 14 | ## PR checklist 15 | 16 | - [ ] This comment contains a description of changes (with reason). 17 | - [ ] If you've fixed a bug or added code that should be tested, add tests! 18 | - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/chipseq/tree/master/.github/CONTRIBUTING.md) 19 | - [ ] If necessary, also make a PR on the nf-core/chipseq _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. 20 | - [ ] Make sure your code lints (`nf-core lint`). 21 | - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). 22 | - [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir `). 23 | - [ ] Usage Documentation in `docs/usage.md` is updated. 24 | - [ ] Output Documentation in `docs/output.md` is updated. 25 | - [ ] `CHANGELOG.md` is updated. 26 | - [ ] `README.md` is updated (including new tool citations and authors/contributors). 27 | -------------------------------------------------------------------------------- /.github/workflows/awsfulltest.yml: -------------------------------------------------------------------------------- 1 | name: nf-core AWS full size tests 2 | # This workflow is triggered on published releases. 3 | # It can be additionally triggered manually with GitHub actions workflow dispatch button. 4 | # It runs the -profile 'test_full' on AWS batch 5 | 6 | on: 7 | release: 8 | types: [published] 9 | workflow_dispatch: 10 | jobs: 11 | run-platform: 12 | name: Run AWS full tests 13 | if: github.repository == 'nf-core/chipseq' 14 | runs-on: ubuntu-latest 15 | strategy: 16 | matrix: 17 | aligner: ["bwa", "bowtie2", "chromap", "star"] 18 | steps: 19 | - name: Launch workflow via Seqera Platform 20 | uses: seqeralabs/action-tower-launch@v2 21 | with: 22 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 23 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 24 | compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} 25 | revision: ${{ github.sha }} 26 | workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/chipseq/work-${{ github.sha }} 27 | parameters: | 28 | { 29 | "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", 30 | "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/chipseq/results-${{ github.sha }}", 31 | "aligner": "${{ matrix.aligner }}" 32 | } 33 | profiles: test_full 34 | 35 | - uses: actions/upload-artifact@v4 36 | if: success() || failure() 37 | with: 38 | name: Seqera Platform debug log file 39 | path: | 40 | seqera_platform_action_*.log 41 | seqera_platform_action_*.json 42 | -------------------------------------------------------------------------------- /.github/workflows/awstest.yml: -------------------------------------------------------------------------------- 1 | name: nf-core AWS test 2 | # This workflow can be triggered manually with the GitHub actions workflow dispatch button. 3 | # It runs the -profile 'test' on AWS batch 4 | 5 | on: 6 | workflow_dispatch: 7 | jobs: 8 | run-platform: 9 | name: Run AWS tests 10 | if: github.repository == 'nf-core/chipseq' 11 | runs-on: ubuntu-latest 12 | steps: 13 | # Launch workflow using Seqera Platform CLI tool action 14 | - name: Launch workflow via Seqera Platform 15 | uses: seqeralabs/action-tower-launch@v2 16 | with: 17 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 18 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 19 | compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} 20 | revision: ${{ github.sha }} 21 | workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/chipseq/work-${{ github.sha }} 22 | parameters: | 23 | { 24 | "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/chipseq/results-test-${{ github.sha }}" 25 | } 26 | profiles: test 27 | 28 | - uses: actions/upload-artifact@v4 29 | with: 30 | name: Seqera Platform debug log file 31 | path: | 32 | seqera_platform_action_*.log 33 | seqera_platform_action_*.json 34 | -------------------------------------------------------------------------------- /.github/workflows/clean-up.yml: -------------------------------------------------------------------------------- 1 | name: "Close user-tagged issues and PRs" 2 | on: 3 | schedule: 4 | - cron: "0 0 * * 0" # Once a week 5 | 6 | jobs: 7 | clean-up: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | issues: write 11 | pull-requests: write 12 | steps: 13 | - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 14 | with: 15 | stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." 16 | stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." 17 | close-issue-message: "This issue was closed because it has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor and then staled for 20 days with no activity." 18 | days-before-stale: 30 19 | days-before-close: 20 20 | days-before-pr-close: -1 21 | any-of-labels: "awaiting-changes,awaiting-feedback" 22 | exempt-issue-labels: "WIP" 23 | exempt-pr-labels: "WIP" 24 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 25 | -------------------------------------------------------------------------------- /.github/workflows/linting_comment.yml: -------------------------------------------------------------------------------- 1 | name: nf-core linting comment 2 | # This workflow is triggered after the linting action is complete 3 | # It posts an automated comment to the PR, even if the PR is coming from a fork 4 | 5 | on: 6 | workflow_run: 7 | workflows: ["nf-core linting"] 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Download lint results 14 | uses: dawidd6/action-download-artifact@09f2f74827fd3a8607589e5ad7f9398816f540fe # v3 15 | with: 16 | workflow: linting.yml 17 | workflow_conclusion: completed 18 | 19 | - name: Get PR number 20 | id: pr_number 21 | run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT 22 | 23 | - name: Post PR comment 24 | uses: marocchino/sticky-pull-request-comment@331f8f5b4215f0445d3c07b4967662a32a2d3e31 # v2 25 | with: 26 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 27 | number: ${{ steps.pr_number.outputs.pr_number }} 28 | path: linting-logs/lint_results.md 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .nextflow* 2 | work/ 3 | data/ 4 | results/ 5 | .DS_Store 6 | testing/ 7 | testing* 8 | *.pyc 9 | -------------------------------------------------------------------------------- /.gitpod.yml: -------------------------------------------------------------------------------- 1 | image: nfcore/gitpod:latest 2 | tasks: 3 | - name: Update Nextflow and setup pre-commit 4 | command: | 5 | pre-commit install --install-hooks 6 | nextflow self-update 7 | - name: unset JAVA_TOOL_OPTIONS 8 | command: | 9 | unset JAVA_TOOL_OPTIONS 10 | 11 | vscode: 12 | extensions: # based on nf-core.nf-core-extensionpack 13 | - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code 14 | - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files 15 | - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar 16 | - mechatroner.rainbow-csv # Highlight columns in csv files in different colors 17 | # - nextflow.nextflow # Nextflow syntax highlighting 18 | - oderwat.indent-rainbow # Highlight indentation level 19 | - streetsidesoftware.code-spell-checker # Spelling checker for source code 20 | - charliermarsh.ruff # Code linter Ruff 21 | -------------------------------------------------------------------------------- /.nf-core.yml: -------------------------------------------------------------------------------- 1 | repository_type: pipeline 2 | nf_core_version: "2.14.1" 3 | lint: 4 | nextflow_config: 5 | - config_defaults: 6 | - params.bamtools_filter_se_config 7 | - params.bamtools_filter_pe_config 8 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/mirrors-prettier 3 | rev: "v3.1.0" 4 | hooks: 5 | - id: prettier 6 | additional_dependencies: 7 | - prettier@3.2.5 8 | 9 | - repo: https://github.com/editorconfig-checker/editorconfig-checker.python 10 | rev: "2.7.3" 11 | hooks: 12 | - id: editorconfig-checker 13 | alias: ec 14 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | email_template.html 2 | adaptivecard.json 3 | slackreport.json 4 | .nextflow* 5 | work/ 6 | data/ 7 | results/ 8 | .DS_Store 9 | testing/ 10 | testing* 11 | *.pyc 12 | bin/ 13 | -------------------------------------------------------------------------------- /.prettierrc.yml: -------------------------------------------------------------------------------- 1 | printWidth: 120 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Espinosa-Carrasco J, Patel H, Wang C, Ewels P 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /assets/bamtools_filter_pe.json: -------------------------------------------------------------------------------- 1 | { 2 | "filters": [ 3 | { "id": "insert_min", "insertSize": ">=-2000" }, 4 | 5 | { "id": "insert_max", "insertSize": "<=2000" }, 6 | 7 | { "id": "mismatch", "tag": "NM:<=4" } 8 | ], 9 | 10 | "rule": " insert_min & insert_max & mismatch " 11 | } 12 | -------------------------------------------------------------------------------- /assets/bamtools_filter_se.json: -------------------------------------------------------------------------------- 1 | { 2 | "filters": [{ "id": "mismatch", "tag": "NM:<=4" }], 3 | 4 | "rule": " mismatch " 5 | } 6 | -------------------------------------------------------------------------------- /assets/email_template.txt: -------------------------------------------------------------------------------- 1 | ---------------------------------------------------- 2 | ,--./,-. 3 | ___ __ __ __ ___ /,-._.--~\\ 4 | |\\ | |__ __ / ` / \\ |__) |__ } { 5 | | \\| | \\__, \\__/ | \\ |___ \\`-._,-`-, 6 | `._,._,' 7 | nf-core/chipseq ${version} 8 | ---------------------------------------------------- 9 | Run Name: $runName 10 | 11 | <% if (success){ 12 | out << "## nf-core/chipseq execution completed successfully! ##" 13 | } else { 14 | out << """#################################################### 15 | ## nf-core/chipseq execution completed unsuccessfully! ## 16 | #################################################### 17 | The exit status of the task that caused the workflow execution to fail was: $exitStatus. 18 | The full error message was: 19 | 20 | ${errorReport} 21 | """ 22 | } %> 23 | 24 | 25 | The workflow was completed at $dateComplete (duration: $duration) 26 | 27 | The command used to launch the workflow was as follows: 28 | 29 | $commandLine 30 | 31 | 32 | 33 | Pipeline Configuration: 34 | ----------------------- 35 | <% out << summary.collect{ k,v -> " - $k: $v" }.join("\n") %> 36 | 37 | -- 38 | nf-core/chipseq 39 | https://github.com/nf-core/chipseq 40 | -------------------------------------------------------------------------------- /assets/multiqc/deseq2_clustering_header.txt: -------------------------------------------------------------------------------- 1 | #id: 'deseq2_clustering' 2 | #section_name: 'MERGED LIB: DESeq2 sample similarity' 3 | #description: "Matrix is generated from clustering with Euclidean distances between 4 | # DESeq2 5 | # rlog values for each sample 6 | # in the deseq2_qc.r script." 7 | #plot_type: 'heatmap' 8 | #anchor: 'deseq2_clustering' 9 | #pconfig: 10 | # title: 'DESeq2: Heatmap of the sample-to-sample distances' 11 | # xlab: True 12 | # reverseColors: True 13 | -------------------------------------------------------------------------------- /assets/multiqc/deseq2_pca_header.txt: -------------------------------------------------------------------------------- 1 | #id: 'deseq2_pca' 2 | #section_name: 'MERGED LIB: DESeq2 PCA plot' 3 | #description: "PCA plot of the samples in the experiment. 4 | # These values are calculated using DESeq2 5 | # in the deseq2_qc.r script." 6 | #plot_type: 'scatter' 7 | #anchor: 'deseq2_pca' 8 | #pconfig: 9 | # title: 'DESeq2: Principal component plot' 10 | # xlab: PC1 11 | # ylab: PC2 12 | -------------------------------------------------------------------------------- /assets/multiqc/frip_score_header.txt: -------------------------------------------------------------------------------- 1 | #id: 'frip_score' 2 | #section_name: 'MERGED LIB: MACS3 FRiP score' 3 | #description: "is generated by calculating the fraction of all mapped reads that fall 4 | # into the MACS3 called peak regions. A read must overlap a peak by at least 20% to be counted. 5 | # See FRiP score." 6 | #plot_type: 'bargraph' 7 | #anchor: 'frip_score' 8 | #pconfig: 9 | # title: 'FRiP score' 10 | # ylab: 'FRiP score' 11 | # ymax: 1 12 | # ymin: 0 13 | # tt_decimals: 2 14 | -------------------------------------------------------------------------------- /assets/multiqc/peak_annotation_header.txt: -------------------------------------------------------------------------------- 1 | #id: 'peak_annotation' 2 | #section_name: 'MERGED LIB: HOMER peak annotation' 3 | #description: "is generated by calculating the proportion of peaks assigned to genomic features by 4 | # HOMER annotatePeaks.pl." 5 | #plot_type: 'bargraph' 6 | #anchor: 'peak_annotation' 7 | #pconfig: 8 | # title: 'Peak to feature proportion' 9 | # ylab: 'Peak count' 10 | -------------------------------------------------------------------------------- /assets/multiqc/peak_count_header.txt: -------------------------------------------------------------------------------- 1 | #id: 'peak_count' 2 | #section_name: 'MERGED LIB: MACS3 peak count' 3 | #description: "is calculated from total number of peaks called by 4 | # MACS3" 5 | #plot_type: 'bargraph' 6 | #anchor: 'peak_count' 7 | #pconfig: 8 | # title: 'Total peak count' 9 | # ylab: 'Peak count' 10 | -------------------------------------------------------------------------------- /assets/multiqc/spp_correlation_header.txt: -------------------------------------------------------------------------------- 1 | #id: 'strand_shift_correlation' 2 | #section_name: 'MERGED LIB: spp strand-shift correlation' 3 | #description: "generated using run_spp.R script from 4 | # phantompeakqualtools." 5 | #plot_type: 'linegraph' 6 | #anchor: 'strand_shift_correlation' 7 | #pconfig: 8 | # title: 'Strand-shift correlation plot' 9 | # ylab: 'Cross-correlation' 10 | # xlab: 'Strand-shift (bp)' 11 | # xDecimals: False 12 | # tt_label: 'Strand-shift (bp) {point.x}: {point.y:.2f} Cross-correlation' 13 | -------------------------------------------------------------------------------- /assets/multiqc/spp_nsc_header.txt: -------------------------------------------------------------------------------- 1 | #id: 'nsc_coefficient' 2 | #section_name: 'MERGED LIB: spp NSC coefficient' 3 | #description: "generated using run_spp.R script from 4 | # phantompeakqualtools." 5 | #plot_type: 'bargraph' 6 | #anchor: 'nsc_coefficient' 7 | #pconfig: 8 | # title: 'Normalized strand cross-correlation coefficient' 9 | # ylab: 'NSC coefficient' 10 | # ymin: 1 11 | # tt_decimals: 1 12 | -------------------------------------------------------------------------------- /assets/multiqc/spp_rsc_header.txt: -------------------------------------------------------------------------------- 1 | #id: 'rsc_coefficient' 2 | #section_name: 'MERGED LIB: spp RSC coefficient' 3 | #description: "generated using run_spp.R script from 4 | # phantompeakqualtools." 5 | #plot_type: 'bargraph' 6 | #anchor: 'rsc_coefficient' 7 | #pconfig: 8 | # title: 'Relative strand cross-correlation coefficient' 9 | # ylab: 'RSC coefficient' 10 | # ymin: 0 11 | # tt_decimals: 1 12 | -------------------------------------------------------------------------------- /assets/nf-core-chipseq_logo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/assets/nf-core-chipseq_logo_light.png -------------------------------------------------------------------------------- /assets/samplesheet_pe.csv: -------------------------------------------------------------------------------- 1 | sample,fastq_1,fastq_2,replicate,antibody,control,control_replicate 2 | WT_BCATENIN_IP,BLA203A1_S27_L006_R1_001.fastq.gz,BLA203A1_S27_L006_R2_001.fastq.gz,1,BCATENIN,WT_INPUT,1 3 | WT_BCATENIN_IP,BLA203A25_S16_L001_R1_001.fastq.gz,BLA203A25_S16_L001_R2_001.fastq.gz,2,BCATENIN,WT_INPUT,2 4 | WT_BCATENIN_IP,BLA203A25_S16_L002_R1_001.fastq.gz,BLA203A25_S16_L002_R2_001.fastq.gz,2,BCATENIN,WT_INPUT,2 5 | WT_BCATENIN_IP,BLA203A49_S40_L001_R1_001.fastq.gz,BLA203A49_S40_L001_R2_001.fastq.gz,3,BCATENIN,WT_INPUT,3 6 | NAIVE_BCATENIN_IP,BLA203A7_S60_L001_R1_001.fastq.gz,BLA203A7_S60_L001_R2_001.fastq.gz,1,BCATENIN,NAIVE_INPUT,1 7 | NAIVE_BCATENIN_IP,BLA203A43_S34_L001_R1_001.fastq.gz,BLA203A43_S34_L001_R2_001.fastq.gz,2,BCATENIN,NAIVE_INPUT,2 8 | NAIVE_BCATENIN_IP,BLA203A43_S34_L002_R1_001.fastq.gz,BLA203A43_S34_L002_R2_001.fastq.gz,2,BCATENIN,NAIVE_INPUT,2 9 | NAIVE_BCATENIN_IP,BLA203A64_S55_L001_R1_001.fastq.gz,BLA203A64_S55_L001_R2_001.fastq.gz,3,BCATENIN,NAIVE_INPUT,3 10 | WT_TCF4_IP,BLA203A3_S29_L006_R1_001.fastq.gz,BLA203A3_S29_L006_R2_001.fastq.gz,1,TCF4,WT_INPUT,1 11 | WT_TCF4_IP,BLA203A27_S18_L001_R1_001.fastq.gz,BLA203A27_S18_L001_R2_001.fastq.gz,2,TCF4,WT_INPUT,2 12 | WT_TCF4_IP,BLA203A51_S42_L001_R1_001.fastq.gz,BLA203A51_S42_L001_R2_001.fastq.gz,2,TCF4,WT_INPUT,2 13 | NAIVE_TCF4_IP,BLA203A9_S62_L001_R1_001.fastq.gz,BLA203A9_S62_L001_R2_001.fastq.gz,1,TCF4,NAIVE_INPUT,1 14 | NAIVE_TCF4_IP,BLA203A45_S36_L001_R1_001.fastq.gz,BLA203A45_S36_L001_R2_001.fastq.gz,2,TCF4,NAIVE_INPUT,2 15 | NAIVE_TCF4_IP,BLA203A66_S57_L001_R1_001.fastq.gz,BLA203A66_S57_L001_R2_001.fastq.gz,3,TCF4,NAIVE_INPUT,3 16 | WT_INPUT,BLA203A6_S32_L006_R1_001.fastq.gz,BLA203A6_S32_L006_R2_001.fastq.gz,1,,, 17 | WT_INPUT,BLA203A30_S21_L001_R1_001.fastq.gz,BLA203A30_S21_L001_R2_001.fastq.gz,2,,, 18 | WT_INPUT,BLA203A31_S21_L003_R1_001.fastq.gz,BLA203A31_S21_L003_R2_001.fastq.gz,3,,, 19 | NAIVE_INPUT,BLA203A12_S3_L001_R1_001.fastq.gz,BLA203A12_S3_L001_R2_001.fastq.gz,1,,, 20 | NAIVE_INPUT,BLA203A48_S39_L001_R1_001.fastq.gz,BLA203A48_S39_L001_R2_001.fastq.gz,2,,, 21 | NAIVE_INPUT,BLA203A49_S1_L006_R1_001.fastq.gz,BLA203A49_S1_L006_R2_001.fastq.gz,3,,, 22 | -------------------------------------------------------------------------------- /assets/samplesheet_se.csv: -------------------------------------------------------------------------------- 1 | sample,fastq_1,fastq_2,replicate,antibody,control,control_replicate 2 | WT_BCATENIN_IP,BLA203A1_S27_L006_R1_001.fastq.gz,,1,BCATENIN,WT_INPUT,1 3 | WT_BCATENIN_IP,BLA203A25_S16_L001_R1_001.fastq.gz,,2,BCATENIN,WT_INPUT,2 4 | WT_BCATENIN_IP,BLA203A25_S16_L002_R1_001.fastq.gz,,2,BCATENIN,WT_INPUT,2 5 | WT_BCATENIN_IP,BLA203A49_S40_L001_R1_001.fastq.gz,,3,BCATENIN,WT_INPUT,3 6 | NAIVE_BCATENIN_IP,BLA203A7_S60_L001_R1_001.fastq.gz,,1,BCATENIN,NAIVE_INPUT,1 7 | NAIVE_BCATENIN_IP,BLA203A43_S34_L001_R1_001.fastq.gz,,2,BCATENIN,NAIVE_INPUT,2 8 | NAIVE_BCATENIN_IP,BLA203A43_S34_L002_R1_001.fastq.gz,,2,BCATENIN,NAIVE_INPUT,2 9 | NAIVE_BCATENIN_IP,BLA203A64_S55_L001_R1_001.fastq.gz,,3,BCATENIN,NAIVE_INPUT,3 10 | WT_TCF4_IP,BLA203A3_S29_L006_R1_001.fastq.gz,,1,TCF4,WT_INPUT,1 11 | WT_TCF4_IP,BLA203A27_S18_L001_R1_001.fastq.gz,,2,TCF4,WT_INPUT,2 12 | WT_TCF4_IP,BLA203A51_S42_L001_R1_001.fastq.gz,,3,TCF4,WT_INPUT,3 13 | NAIVE_TCF4_IP,BLA203A9_S62_L001_R1_001.fastq.gz,,1,TCF4,NAIVE_INPUT,1 14 | NAIVE_TCF4_IP,BLA203A45_S36_L001_R1_001.fastq.gz,,2,TCF4,NAIVE_INPUT,2 15 | NAIVE_TCF4_IP,BLA203A66_S57_L001_R1_001.fastq.gz,,3,TCF4,NAIVE_INPUT,3 16 | WT_INPUT,BLA203A6_S32_L006_R1_001.fastq.gz,,1,,, 17 | WT_INPUT,BLA203A30_S21_L001_R1_001.fastq.gz,,2,,, 18 | WT_INPUT,BLA203A31_S21_L003_R1_001.fastq.gz,,3,,, 19 | NAIVE_INPUT,BLA203A12_S3_L001_R1_001.fastq.gz,,1,,, 20 | NAIVE_INPUT,BLA203A48_S39_L001_R1_001.fastq.gz,,2,,, 21 | NAIVE_INPUT,BLA203A49_S1_L006_R1_001.fastq.gz,,3,,, 22 | -------------------------------------------------------------------------------- /assets/sendmail_template.txt: -------------------------------------------------------------------------------- 1 | To: $email 2 | Subject: $subject 3 | Mime-Version: 1.0 4 | Content-Type: multipart/related;boundary="nfcoremimeboundary" 5 | 6 | --nfcoremimeboundary 7 | Content-Type: text/html; charset=utf-8 8 | 9 | $email_html 10 | 11 | --nfcoremimeboundary 12 | Content-Type: image/png;name="nf-core-chipseq_logo.png" 13 | Content-Transfer-Encoding: base64 14 | Content-ID: 15 | Content-Disposition: inline; filename="nf-core-chipseq_logo_light.png" 16 | 17 | <% out << new File("$projectDir/assets/nf-core-chipseq_logo_light.png"). 18 | bytes. 19 | encodeBase64(). 20 | toString(). 21 | tokenize( '\n' )*. 22 | toList()*. 23 | collate( 76 )*. 24 | collect { it.join() }. 25 | flatten(). 26 | join( '\n' ) %> 27 | 28 | <% 29 | if (mqcFile){ 30 | def mqcFileObj = new File("$mqcFile") 31 | if (mqcFileObj.length() < mqcMaxSize){ 32 | out << """ 33 | --nfcoremimeboundary 34 | Content-Type: text/html; name=\"multiqc_report\" 35 | Content-Transfer-Encoding: base64 36 | Content-ID: 37 | Content-Disposition: attachment; filename=\"${mqcFileObj.getName()}\" 38 | 39 | ${mqcFileObj. 40 | bytes. 41 | encodeBase64(). 42 | toString(). 43 | tokenize( '\n' )*. 44 | toList()*. 45 | collate( 76 )*. 46 | collect { it.join() }. 47 | flatten(). 48 | join( '\n' )} 49 | """ 50 | }} 51 | %> 52 | 53 | --nfcoremimeboundary-- 54 | -------------------------------------------------------------------------------- /assets/slackreport.json: -------------------------------------------------------------------------------- 1 | { 2 | "attachments": [ 3 | { 4 | "fallback": "Plain-text summary of the attachment.", 5 | "color": "<% if (success) { %>good<% } else { %>danger<%} %>", 6 | "author_name": "nf-core/chipseq ${version} - ${runName}", 7 | "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", 8 | "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", 9 | "fields": [ 10 | { 11 | "title": "Command used to launch the workflow", 12 | "value": "```${commandLine}```", 13 | "short": false 14 | } 15 | <% 16 | if (!success) { %> 17 | , 18 | { 19 | "title": "Full error message", 20 | "value": "```${errorReport}```", 21 | "short": false 22 | }, 23 | { 24 | "title": "Pipeline configuration", 25 | "value": "<% out << summary.collect{ k,v -> k == "hook_url" ? "_${k}_: (_hidden_)" : ( ( v.class.toString().contains('Path') || ( v.class.toString().contains('String') && v.contains('/') ) ) ? "_${k}_: `${v}`" : (v.class.toString().contains('DateTime') ? ("_${k}_: " + v.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM))) : "_${k}_: ${v}") ) }.join(",\n") %>", 26 | "short": false 27 | } 28 | <% } 29 | %> 30 | ], 31 | "footer": "Completed at <% out << dateComplete.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM)) %> (duration: ${duration})" 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /conf/test.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/chipseq -profile test, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | params { 14 | config_profile_name = 'Test profile' 15 | config_profile_description = 'Minimal test dataset to check pipeline function' 16 | 17 | // Limit resources so that this can run on GitHub Actions 18 | max_cpus = 2 19 | max_memory = '6.GB' 20 | max_time = '6.h' 21 | 22 | // Input data 23 | input = params.pipelines_testdata_base_path + 'chipseq/samplesheet/v2.1/samplesheet_test.csv' 24 | read_length = 50 25 | 26 | // Genome references 27 | fasta = params.pipelines_testdata_base_path + 'atacseq/reference/genome.fa' 28 | gtf = params.pipelines_testdata_base_path + 'atacseq/reference/genes.gtf' 29 | 30 | // For speed to avoid CI time-out 31 | fingerprint_bins = 100 32 | 33 | // Avoid preseq errors with test data 34 | skip_preseq = true 35 | } 36 | -------------------------------------------------------------------------------- /conf/test_full.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running full-size tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a full size pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/chipseq -profile test_full, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | params { 14 | config_profile_name = 'Full test profile' 15 | config_profile_description = 'Full test dataset to check pipeline function' 16 | 17 | // Input data for full size test 18 | input = params.pipelines_testdata_base_path + 'chipseq/samplesheet/v2.1/samplesheet_full.csv' 19 | 20 | // Used to calculate --macs_gsize 21 | read_length = 50 22 | 23 | // Genome references 24 | genome = 'hg19' 25 | } 26 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # nf-core/chipseq: Documentation 2 | 3 | The nf-core/chipseq documentation is split into the following pages: 4 | 5 | - [Usage](usage.md) 6 | - An overview of how the pipeline works, how to run it and a description of all of the different command-line flags. 7 | - [Output](output.md) 8 | - An overview of the different results produced by the pipeline and how to interpret them. 9 | 10 | You can find a lot more documentation about installing, configuring and running nf-core pipelines on the website: [https://nf-co.re](https://nf-co.re) 11 | -------------------------------------------------------------------------------- /docs/images/igv_screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/igv_screenshot.png -------------------------------------------------------------------------------- /docs/images/mqc_annotatePeaks_feature_percentage_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_annotatePeaks_feature_percentage_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_cutadapt_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_cutadapt_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_deeptools_plotFingerprint_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_deeptools_plotFingerprint_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_deeptools_plotProfile_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_deeptools_plotProfile_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_deseq2_pca_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_deseq2_pca_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_deseq2_sample_similarity_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_deseq2_sample_similarity_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_fastqc_adapter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_fastqc_adapter.png -------------------------------------------------------------------------------- /docs/images/mqc_fastqc_counts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_fastqc_counts.png -------------------------------------------------------------------------------- /docs/images/mqc_fastqc_quality.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_fastqc_quality.png -------------------------------------------------------------------------------- /docs/images/mqc_featureCounts_assignment_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_featureCounts_assignment_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_frip_score_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_frip_score_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_macs3_peak_count_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_macs3_peak_count_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_picard_deduplication_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_picard_deduplication_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_picard_insert_size_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_picard_insert_size_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_preseq_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_preseq_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_samtools_stats_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_samtools_stats_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_spp_nsc_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_spp_nsc_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_spp_rsc_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_spp_rsc_plot.png -------------------------------------------------------------------------------- /docs/images/mqc_spp_strand_correlation_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/mqc_spp_strand_correlation_plot.png -------------------------------------------------------------------------------- /docs/images/nf-core-chipseq_logo_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/nf-core-chipseq_logo_dark.png -------------------------------------------------------------------------------- /docs/images/nf-core-chipseq_logo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/nf-core-chipseq_logo_light.png -------------------------------------------------------------------------------- /docs/images/nf-core-chipseq_metro_map_grey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/nf-core-chipseq_metro_map_grey.png -------------------------------------------------------------------------------- /docs/images/r_upsetr_intersect_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/chipseq/76e2382b6d443db4dc2396e6831d1243256d80b0/docs/images/r_upsetr_intersect_plot.png -------------------------------------------------------------------------------- /modules/local/annotate_boolean_peaks.nf: -------------------------------------------------------------------------------- 1 | process ANNOTATE_BOOLEAN_PEAKS { 2 | tag "$meta.id" 3 | label 'process_low' 4 | 5 | conda "conda-forge::sed=4.7" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/ubuntu:20.04' : 8 | 'docker.io/library/ubuntu:20.04' }" 9 | 10 | input: 11 | tuple val(meta), path(boolean_txt), path(homer_peaks) 12 | 13 | output: 14 | path '*.boolean.annotatePeaks.txt', emit: annotate_peaks_txt 15 | path "versions.yml" , emit: versions 16 | 17 | script: 18 | def prefix = task.ext.prefix ?: "${meta.id}" 19 | """ 20 | cut -f2- ${homer_peaks} | awk 'NR==1; NR > 1 {print \$0 | "sort -T '.' -k1,1 -k2,2n"}' | cut -f6- > tmp.txt 21 | paste $boolean_txt tmp.txt > ${prefix}.boolean.annotatePeaks.txt 22 | 23 | cat <<-END_VERSIONS > versions.yml 24 | "${task.process}": 25 | sed: \$(echo \$(sed --version 2>&1) | sed 's/^.*GNU sed) //; s/ .*\$//') 26 | END_VERSIONS 27 | """ 28 | } 29 | -------------------------------------------------------------------------------- /modules/local/bam_remove_orphans.nf: -------------------------------------------------------------------------------- 1 | /* 2 | * Remove orphan reads from paired-end BAM file 3 | */ 4 | process BAM_REMOVE_ORPHANS { 5 | tag "$meta.id" 6 | label 'process_medium' 7 | 8 | conda "bioconda::pysam=0.19.0 bioconda::samtools=1.15.1" 9 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 10 | 'https://depot.galaxyproject.org/singularity/mulled-v2-57736af1eb98c01010848572c9fec9fff6ffaafd:402e865b8f6af2f3e58c6fc8d57127ff0144b2c7-0' : 11 | 'biocontainers/mulled-v2-57736af1eb98c01010848572c9fec9fff6ffaafd:402e865b8f6af2f3e58c6fc8d57127ff0144b2c7-0' }" 12 | 13 | input: 14 | tuple val(meta), path(bam) 15 | 16 | output: 17 | tuple val(meta), path("${prefix}.bam"), emit: bam 18 | path "versions.yml" , emit: versions 19 | 20 | when: 21 | task.ext.when == null || task.ext.when 22 | 23 | script: // This script is bundled with the pipeline, in nf-core/chipseq/bin/ 24 | def args = task.ext.args ?: '' 25 | prefix = task.ext.prefix ?: "${meta.id}" 26 | if (!meta.single_end) { 27 | """ 28 | samtools sort -n -@ $task.cpus -o ${prefix}.name.sorted.bam -T ${prefix}.name.sorted $bam 29 | bampe_rm_orphan.py ${prefix}.name.sorted.bam ${prefix}.bam $args 30 | 31 | cat <<-END_VERSIONS > versions.yml 32 | "${task.process}": 33 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 34 | END_VERSIONS 35 | """ 36 | } else { 37 | """ 38 | ln -s $bam ${prefix}.bam 39 | 40 | cat <<-END_VERSIONS > versions.yml 41 | "${task.process}": 42 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 43 | END_VERSIONS 44 | """ 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /modules/local/bamtools_filter.nf: -------------------------------------------------------------------------------- 1 | process BAMTOOLS_FILTER { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | conda "bioconda::bamtools=2.5.2 bioconda::samtools=1.15.1" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/mulled-v2-0560a8046fc82aa4338588eca29ff18edab2c5aa:5687a7da26983502d0a8a9a6b05ed727c740ddc4-0' : 8 | 'biocontainers/mulled-v2-0560a8046fc82aa4338588eca29ff18edab2c5aa:5687a7da26983502d0a8a9a6b05ed727c740ddc4-0' }" 9 | 10 | input: 11 | tuple val(meta), path(bam), path(bai) 12 | path bed 13 | path bamtools_filter_se_config 14 | path bamtools_filter_pe_config 15 | 16 | output: 17 | tuple val(meta), path("*.bam"), emit: bam 18 | path "versions.yml" , emit: versions 19 | 20 | when: 21 | task.ext.when == null || task.ext.when 22 | 23 | script: 24 | def args = task.ext.args ?: '' 25 | def prefix = task.ext.prefix ?: "${meta.id}" 26 | def blacklist = bed ? "-L $bed" : '' 27 | def config = meta.single_end ? bamtools_filter_se_config : bamtools_filter_pe_config 28 | """ 29 | samtools view \\ 30 | $args \\ 31 | $blacklist \\ 32 | -b $bam \\ 33 | | bamtools filter \\ 34 | -out ${prefix}.bam \\ 35 | -script $config 36 | 37 | cat <<-END_VERSIONS > versions.yml 38 | "${task.process}": 39 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 40 | bamtools: \$(echo \$(bamtools --version 2>&1) | sed 's/^.*bamtools //; s/Part .*\$//') 41 | END_VERSIONS 42 | """ 43 | } 44 | -------------------------------------------------------------------------------- /modules/local/bedtools_genomecov.nf: -------------------------------------------------------------------------------- 1 | process BEDTOOLS_GENOMECOV { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | conda "bioconda::bedtools=2.30.0" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0': 8 | 'biocontainers/bedtools:2.30.0--hc088bd4_0' }" 9 | 10 | input: 11 | tuple val(meta), path(bam), path(flagstat) 12 | 13 | output: 14 | tuple val(meta), path("*.bedGraph"), emit: bedgraph 15 | tuple val(meta), path("*.txt") , emit: scale_factor 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | def prefix = task.ext.prefix ?: "${meta.id}" 24 | def pe = meta.single_end ? '' : '-pc' 25 | """ 26 | SCALE_FACTOR=\$(grep '[0-9] mapped (' $flagstat | awk '{print 1000000/\$1}') 27 | echo \$SCALE_FACTOR > ${prefix}.scale_factor.txt 28 | 29 | bedtools \\ 30 | genomecov \\ 31 | -ibam $bam \\ 32 | -bg \\ 33 | -scale \$SCALE_FACTOR \\ 34 | $pe \\ 35 | $args \\ 36 | | sort -T '.' -k1,1 -k2,2n > ${prefix}.bedGraph 37 | 38 | cat <<-END_VERSIONS > versions.yml 39 | "${task.process}": 40 | bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") 41 | END_VERSIONS 42 | """ 43 | } 44 | -------------------------------------------------------------------------------- /modules/local/frip_score.nf: -------------------------------------------------------------------------------- 1 | process FRIP_SCORE { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | conda "bioconda::bedtools=2.30.0 bioconda::samtools=1.15.1" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/mulled-v2-8186960447c5cb2faa697666dc1e6d919ad23f3e:3127fcae6b6bdaf8181e21a26ae61231030a9fcb-0': 8 | 'biocontainers/mulled-v2-8186960447c5cb2faa697666dc1e6d919ad23f3e:3127fcae6b6bdaf8181e21a26ae61231030a9fcb-0' }" 9 | 10 | input: 11 | tuple val(meta), path(bam), path(peak) 12 | 13 | output: 14 | tuple val(meta), path("*.txt"), emit: txt 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def prefix = task.ext.prefix ?: "${meta.id}" 23 | """ 24 | READS_IN_PEAKS=\$(intersectBed -a $bam -b $peak $args | awk -F '\t' '{sum += \$NF} END {print sum}') 25 | samtools flagstat $bam > ${bam}.flagstat 26 | grep 'mapped (' ${bam}.flagstat | grep -v "primary" | awk -v a="\$READS_IN_PEAKS" -v OFS='\t' '{print "${prefix}", a/\$1}' > ${prefix}.FRiP.txt 27 | 28 | cat <<-END_VERSIONS > versions.yml 29 | "${task.process}": 30 | bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") 31 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 32 | END_VERSIONS 33 | """ 34 | } 35 | -------------------------------------------------------------------------------- /modules/local/genome_blacklist_regions.nf: -------------------------------------------------------------------------------- 1 | /* 2 | * Prepare genome intervals for filtering by removing regions in blacklist file 3 | */ 4 | process GENOME_BLACKLIST_REGIONS { 5 | tag "$sizes" 6 | 7 | conda "bioconda::bedtools=2.30.0" 8 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 9 | 'https://depot.galaxyproject.org/singularity/bedtools:2.30.0--hc088bd4_0': 10 | 'biocontainers/bedtools:2.30.0--hc088bd4_0' }" 11 | 12 | input: 13 | path sizes 14 | path blacklist 15 | 16 | output: 17 | path '*.bed' , emit: bed 18 | path "versions.yml", emit: versions 19 | 20 | when: 21 | task.ext.when == null || task.ext.when 22 | 23 | script: 24 | def file_out = "${sizes.simpleName}.include_regions.bed" 25 | if (blacklist) { 26 | """ 27 | sortBed -i $blacklist -g $sizes | complementBed -i stdin -g $sizes > $file_out 28 | 29 | cat <<-END_VERSIONS > versions.yml 30 | "${task.process}": 31 | bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") 32 | END_VERSIONS 33 | """ 34 | } else { 35 | """ 36 | awk '{print \$1, '0' , \$2}' OFS='\t' $sizes > $file_out 37 | 38 | cat <<-END_VERSIONS > versions.yml 39 | "${task.process}": 40 | bedtools: \$(bedtools --version | sed -e "s/bedtools v//g") 41 | END_VERSIONS 42 | """ 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /modules/local/gtf2bed.nf: -------------------------------------------------------------------------------- 1 | process GTF2BED { 2 | tag "$gtf" 3 | label 'process_low' 4 | 5 | conda "conda-forge::perl=5.26.2" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/perl:5.26.2': 8 | 'biocontainers/perl:5.26.2' }" 9 | 10 | input: 11 | path gtf 12 | 13 | output: 14 | path '*.bed' , emit: bed 15 | path "versions.yml", emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: // This script is bundled with the pipeline, in nf-core/chipseq/bin/ 21 | """ 22 | gtf2bed \\ 23 | $gtf \\ 24 | > ${gtf.baseName}.bed 25 | 26 | cat <<-END_VERSIONS > versions.yml 27 | "${task.process}": 28 | perl: \$(echo \$(perl --version 2>&1) | sed 's/.*v\\(.*\\)) built.*/\\1/') 29 | END_VERSIONS 30 | """ 31 | } 32 | -------------------------------------------------------------------------------- /modules/local/igv.nf: -------------------------------------------------------------------------------- 1 | /* 2 | * Create IGV session file 3 | */ 4 | process IGV { 5 | 6 | conda "conda-forge::python=3.8.3" 7 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 8 | 'https://depot.galaxyproject.org/singularity/python:3.8.3': 9 | 'biocontainers/python:3.8.3' }" 10 | 11 | input: 12 | val aligner_dir 13 | val peak_dir 14 | path fasta 15 | path ("${aligner_dir}/merged_library/bigwig/*") 16 | path ("${aligner_dir}/merged_library/macs3/${peak_dir}/*") 17 | path ("${aligner_dir}/merged_library/macs3/${peak_dir}/consensus/*") 18 | path ("mappings/*") 19 | 20 | output: 21 | path "*files.txt" , emit: txt 22 | path "*.xml" , emit: xml 23 | path fasta , emit: fasta 24 | path "versions.yml", emit: versions 25 | 26 | when: 27 | task.ext.when == null || task.ext.when 28 | 29 | script: // scripts are bundled with the pipeline in nf-core/chipseq/bin/ 30 | def consensus_dir = "${aligner_dir}/merged_library/macs3/${peak_dir}/consensus/*" 31 | """ 32 | find * -type l -name "*.bigWig" -exec echo -e ""{}"\\t0,0,178" \\; > bigwig.igv.txt 33 | find * -type l -name "*Peak" -exec echo -e ""{}"\\t0,0,178" \\; > peaks.igv.txt 34 | # Avoid error when consensus not produced 35 | find * -type l -name "*.bed" -exec echo -e ""{}"\\t0,0,178" \\; | { grep "^$consensus_dir" || test \$? = 1; } > consensus.igv.txt 36 | 37 | touch replace_paths.txt 38 | if [ -d "mappings" ]; then 39 | cat mappings/* > replace_paths.txt 40 | fi 41 | 42 | cat *.igv.txt > igv_files_orig.txt 43 | igv_files_to_session.py igv_session.xml igv_files_orig.txt replace_paths.txt ../../genome/${fasta.getName()} --path_prefix '../../' 44 | 45 | cat <<-END_VERSIONS > versions.yml 46 | "${task.process}": 47 | python: \$(python --version | sed 's/Python //g') 48 | END_VERSIONS 49 | """ 50 | } 51 | -------------------------------------------------------------------------------- /modules/local/multiqc_custom_peaks.nf: -------------------------------------------------------------------------------- 1 | process MULTIQC_CUSTOM_PEAKS { 2 | tag "$meta.id" 3 | 4 | conda "conda-forge::sed=4.7" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/ubuntu:20.04' : 7 | 'nf-core/ubuntu:20.04' }" 8 | 9 | input: 10 | tuple val(meta), path(peak), path(frip) 11 | path peak_count_header 12 | path frip_score_header 13 | 14 | output: 15 | tuple val(meta), path("*.peak_count_mqc.tsv"), emit: count 16 | tuple val(meta), path("*.FRiP_mqc.tsv") , emit: frip 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def prefix = task.ext.prefix ?: "${meta.id}" 24 | """ 25 | cat $peak | wc -l | awk -v OFS='\t' '{ print "${prefix}", \$1 }' | cat $peak_count_header - > ${prefix}.peak_count_mqc.tsv 26 | cat $frip_score_header $frip > ${prefix}.FRiP_mqc.tsv 27 | 28 | cat <<-END_VERSIONS > versions.yml 29 | "${task.process}": 30 | sed: \$(echo \$(sed --version 2>&1) | sed 's/^.*GNU sed) //; s/ .*\$//') 31 | END_VERSIONS 32 | """ 33 | } 34 | -------------------------------------------------------------------------------- /modules/local/multiqc_custom_phantompeakqualtools.nf: -------------------------------------------------------------------------------- 1 | process MULTIQC_CUSTOM_PHANTOMPEAKQUALTOOLS { 2 | tag "$meta.id" 3 | conda "conda-forge::r-base=4.3.3" 4 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 5 | 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/45/4569ff9993578b8402d00230ab9dd75ce6e63529731eb24f21579845e6bd5cdb/data': 6 | 'community.wave.seqera.io/library/r-base:4.3.3--14bb33ac537aea22' }" 7 | 8 | input: 9 | tuple val(meta), path(spp), path(rdata) 10 | path nsc_header 11 | path rsc_header 12 | path correlation_header 13 | 14 | output: 15 | tuple val(meta), path("*.spp_nsc_mqc.tsv") , emit: nsc 16 | tuple val(meta), path("*.spp_rsc_mqc.tsv") , emit: rsc 17 | tuple val(meta), path("*.spp_correlation_mqc.tsv"), emit: correlation 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def prefix = task.ext.prefix ?: "${meta.id}" 24 | """ 25 | cp $correlation_header ${prefix}.spp_correlation_mqc.tsv 26 | Rscript --max-ppsize=500000 -e "load('$rdata'); write.table(crosscorr\\\$cross.correlation, file=\\"${prefix}.spp_correlation_mqc.tsv\\", sep=",", quote=FALSE, row.names=FALSE, col.names=FALSE,append=TRUE)" 27 | 28 | awk -v OFS='\t' '{print "${meta.id}", \$9}' $spp | cat $nsc_header - > ${prefix}.spp_nsc_mqc.tsv 29 | awk -v OFS='\t' '{print "${meta.id}", \$10}' $spp | cat $rsc_header - > ${prefix}.spp_rsc_mqc.tsv 30 | 31 | cat <<-END_VERSIONS > versions.yml 32 | "${task.process}": 33 | r-base: \$(echo \$(R --version 2>&1) | sed 's/^.*R version //; s/ .*\$//') 34 | END_VERSIONS 35 | """ 36 | } 37 | -------------------------------------------------------------------------------- /modules/local/plot_homer_annotatepeaks.nf: -------------------------------------------------------------------------------- 1 | process PLOT_HOMER_ANNOTATEPEAKS { 2 | label 'process_medium' 3 | 4 | conda "conda-forge::r-base=4.0.3 conda-forge::r-reshape2=1.4.4 conda-forge::r-optparse=1.6.6 conda-forge::r-ggplot2=3.3.3 conda-forge::r-scales=1.1.1 conda-forge::r-viridis=0.5.1 conda-forge::r-tidyverse=1.3.0 bioconda::bioconductor-biostrings=2.58.0 bioconda::bioconductor-complexheatmap=2.6.2" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/mulled-v2-ad9dd5f398966bf899ae05f8e7c54d0fb10cdfa7:05678da05b8e5a7a5130e90a9f9a6c585b965afa-0': 7 | 'biocontainers/mulled-v2-ad9dd5f398966bf899ae05f8e7c54d0fb10cdfa7:05678da05b8e5a7a5130e90a9f9a6c585b965afa-0' }" 8 | 9 | input: 10 | path annos 11 | path mqc_header 12 | val suffix 13 | 14 | output: 15 | path '*.txt' , emit: txt 16 | path '*.pdf' , emit: pdf 17 | path '*.tsv' , emit: tsv 18 | path "versions.yml", emit: versions 19 | 20 | when: 21 | task.ext.when == null || task.ext.when 22 | 23 | script: // This script is bundled with the pipeline, in nf-core/chipseq/bin/ 24 | def args = task.ext.args ?: '' 25 | def prefix = task.ext.prefix ?: "annotatepeaks" 26 | """ 27 | plot_homer_annotatepeaks.r \\ 28 | -i ${annos.join(',')} \\ 29 | -s ${annos.join(',').replaceAll("${suffix}","")} \\ 30 | -p $prefix \\ 31 | $args 32 | 33 | find ./ -type f -name "*summary.txt" -exec cat {} \\; | cat $mqc_header - > ${prefix}.summary_mqc.tsv 34 | 35 | cat <<-END_VERSIONS > versions.yml 36 | "${task.process}": 37 | r-base: \$(echo \$(R --version 2>&1) | sed 's/^.*R version //; s/ .*\$//') 38 | END_VERSIONS 39 | """ 40 | } 41 | -------------------------------------------------------------------------------- /modules/local/plot_macs3_qc.nf: -------------------------------------------------------------------------------- 1 | process PLOT_MACS3_QC { 2 | label 'process_medium' 3 | 4 | conda "conda-forge::r-base=4.0.3 conda-forge::r-reshape2=1.4.4 conda-forge::r-optparse=1.6.6 conda-forge::r-ggplot2=3.3.3 conda-forge::r-scales=1.1.1 conda-forge::r-viridis=0.5.1 conda-forge::r-tidyverse=1.3.0 bioconda::bioconductor-biostrings=2.58.0 bioconda::bioconductor-complexheatmap=2.6.2" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/mulled-v2-ad9dd5f398966bf899ae05f8e7c54d0fb10cdfa7:05678da05b8e5a7a5130e90a9f9a6c585b965afa-0': 7 | 'biocontainers/mulled-v2-ad9dd5f398966bf899ae05f8e7c54d0fb10cdfa7:05678da05b8e5a7a5130e90a9f9a6c585b965afa-0' }" 8 | 9 | input: 10 | path peaks 11 | val is_narrow_peak 12 | 13 | output: 14 | path '*.txt' , emit: txt 15 | path '*.pdf' , emit: pdf 16 | path "versions.yml", emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: // This script is bundled with the pipeline, in nf-core/chipseq/bin/ 22 | def args = task.ext.args ?: '' 23 | def peak_type = is_narrow_peak ? 'narrowPeak' : 'broadPeak' 24 | """ 25 | plot_macs3_qc.r \\ 26 | -i ${peaks.join(',')} \\ 27 | -s ${peaks.join(',').replaceAll("_peaks.${peak_type}","")} \\ 28 | $args 29 | 30 | cat <<-END_VERSIONS > versions.yml 31 | "${task.process}": 32 | r-base: \$(echo \$(R --version 2>&1) | sed 's/^.*R version //; s/ .*\$//') 33 | END_VERSIONS 34 | """ 35 | } 36 | -------------------------------------------------------------------------------- /modules/local/samplesheet_check.nf: -------------------------------------------------------------------------------- 1 | process SAMPLESHEET_CHECK { 2 | tag "$samplesheet" 3 | label 'process_single' 4 | 5 | conda "conda-forge::python=3.8.3" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/python:3.8.3' : 8 | 'biocontainers/python:3.8.3' }" 9 | 10 | input: 11 | path samplesheet 12 | 13 | output: 14 | path '*.csv' , emit: csv 15 | path "versions.yml", emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: // This script is bundled with the pipeline, in nf-core/chipseq/bin/ 21 | def args = task.ext.args ?: '' 22 | """ 23 | check_samplesheet.py \\ 24 | $samplesheet \\ 25 | $args 26 | 27 | cat <<-END_VERSIONS > versions.yml 28 | "${task.process}": 29 | python: \$(python --version | sed 's/Python //g') 30 | END_VERSIONS 31 | """ 32 | } 33 | -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/align/environment.yml: -------------------------------------------------------------------------------- 1 | name: bowtie2_align 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::bowtie2=2.5.2 8 | - bioconda::samtools=1.18 9 | - conda-forge::pigz=2.6 10 | -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/align/tests/cram_crai.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: BOWTIE2_ALIGN { 3 | ext.args2 = '--output-fmt cram --write-index' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/align/tests/large_index.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: BOWTIE2_BUILD { 3 | ext.args = '--large-index' 4 | } 5 | } -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/align/tests/sam.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: BOWTIE2_ALIGN { 3 | ext.args2 = '--output-fmt SAM' 4 | } 5 | } -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/align/tests/sam2.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: BOWTIE2_ALIGN { 3 | ext.args2 = '-O SAM' 4 | } 5 | } -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/align/tests/tags.yml: -------------------------------------------------------------------------------- 1 | bowtie2/align: 2 | - modules/nf-core/bowtie2/align/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/build/environment.yml: -------------------------------------------------------------------------------- 1 | name: bowtie2_build 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::bowtie2=2.5.2 8 | -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/build/main.nf: -------------------------------------------------------------------------------- 1 | process BOWTIE2_BUILD { 2 | tag "$fasta" 3 | label 'process_high' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/bowtie2:2.5.2--py39h6fed5c7_0' : 8 | 'biocontainers/bowtie2:2.5.2--py39h6fed5c7_0' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta) 12 | 13 | output: 14 | tuple val(meta), path('bowtie2') , emit: index 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | """ 23 | mkdir bowtie2 24 | bowtie2-build $args --threads $task.cpus $fasta bowtie2/${fasta.baseName} 25 | cat <<-END_VERSIONS > versions.yml 26 | "${task.process}": 27 | bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') 28 | END_VERSIONS 29 | """ 30 | 31 | stub: 32 | """ 33 | mkdir bowtie2 34 | touch bowtie2/${fasta.baseName}.{1..4}.bt2 35 | touch bowtie2/${fasta.baseName}.rev.{1,2}.bt2 36 | 37 | cat <<-END_VERSIONS > versions.yml 38 | "${task.process}": 39 | bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') 40 | END_VERSIONS 41 | """ 42 | } 43 | -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/build/meta.yml: -------------------------------------------------------------------------------- 1 | name: bowtie2_build 2 | description: Builds bowtie index for reference genome 3 | keywords: 4 | - build 5 | - index 6 | - fasta 7 | - genome 8 | - reference 9 | tools: 10 | - bowtie2: 11 | description: | 12 | Bowtie 2 is an ultrafast and memory-efficient tool for aligning 13 | sequencing reads to long reference sequences. 14 | homepage: http://bowtie-bio.sourceforge.net/bowtie2/index.shtml 15 | documentation: http://bowtie-bio.sourceforge.net/bowtie2/manual.shtml 16 | doi: 10.1038/nmeth.1923 17 | licence: ["GPL-3.0-or-later"] 18 | input: 19 | - meta: 20 | type: map 21 | description: | 22 | Groovy Map containing reference information 23 | e.g. [ id:'test', single_end:false ] 24 | - fasta: 25 | type: file 26 | description: Input genome fasta file 27 | output: 28 | - meta: 29 | type: map 30 | description: | 31 | Groovy Map containing reference information 32 | e.g. [ id:'test', single_end:false ] 33 | - index: 34 | type: file 35 | description: Bowtie2 genome index files 36 | pattern: "*.bt2" 37 | - versions: 38 | type: file 39 | description: File containing software versions 40 | pattern: "versions.yml" 41 | authors: 42 | - "@joseespinosa" 43 | - "@drpatelh" 44 | maintainers: 45 | - "@joseespinosa" 46 | - "@drpatelh" 47 | -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/build/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process BOWTIE2_BUILD" 4 | script "modules/nf-core/bowtie2/build/main.nf" 5 | process "BOWTIE2_BUILD" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "bowtie2" 9 | tag "bowtie2/build" 10 | 11 | test("Should run without failures") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = [ 17 | [ id:'test' ], 18 | file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) 19 | ] 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assert process.success 26 | assert snapshot(process.out).match() 27 | } 28 | 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/build/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "test" 9 | }, 10 | [ 11 | "genome.1.bt2:md5,cbe3d0bbea55bc57c99b4bfa25b5fbdf", 12 | "genome.2.bt2:md5,47b153cd1319abc88dda532462651fcf", 13 | "genome.3.bt2:md5,4ed93abba181d8dfab2e303e33114777", 14 | "genome.4.bt2:md5,c25be5f8b0378abf7a58c8a880b87626", 15 | "genome.rev.1.bt2:md5,52be6950579598a990570fbcf5372184", 16 | "genome.rev.2.bt2:md5,e3b4ef343dea4dd571642010a7d09597" 17 | ] 18 | ] 19 | ], 20 | "1": [ 21 | "versions.yml:md5,1df11e9b82891527271c889c880d3974" 22 | ], 23 | "index": [ 24 | [ 25 | { 26 | "id": "test" 27 | }, 28 | [ 29 | "genome.1.bt2:md5,cbe3d0bbea55bc57c99b4bfa25b5fbdf", 30 | "genome.2.bt2:md5,47b153cd1319abc88dda532462651fcf", 31 | "genome.3.bt2:md5,4ed93abba181d8dfab2e303e33114777", 32 | "genome.4.bt2:md5,c25be5f8b0378abf7a58c8a880b87626", 33 | "genome.rev.1.bt2:md5,52be6950579598a990570fbcf5372184", 34 | "genome.rev.2.bt2:md5,e3b4ef343dea4dd571642010a7d09597" 35 | ] 36 | ] 37 | ], 38 | "versions": [ 39 | "versions.yml:md5,1df11e9b82891527271c889c880d3974" 40 | ] 41 | } 42 | ], 43 | "timestamp": "2023-11-23T11:51:01.107681997" 44 | } 45 | } -------------------------------------------------------------------------------- /modules/nf-core/bowtie2/build/tests/tags.yml: -------------------------------------------------------------------------------- 1 | bowtie2/build: 2 | - modules/nf-core/bowtie2/build/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/bwa/index/environment.yml: -------------------------------------------------------------------------------- 1 | name: bwa_index 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::bwa=0.7.18 8 | -------------------------------------------------------------------------------- /modules/nf-core/bwa/index/main.nf: -------------------------------------------------------------------------------- 1 | process BWA_INDEX { 2 | tag "$fasta" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/bwa:0.7.18--he4a0461_0' : 8 | 'biocontainers/bwa:0.7.18--he4a0461_0' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta) 12 | 13 | output: 14 | tuple val(meta), path(bwa) , emit: index 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def prefix = task.ext.prefix ?: "${fasta.baseName}" 22 | def args = task.ext.args ?: '' 23 | """ 24 | mkdir bwa 25 | bwa \\ 26 | index \\ 27 | $args \\ 28 | -p bwa/${prefix} \\ 29 | $fasta 30 | 31 | cat <<-END_VERSIONS > versions.yml 32 | "${task.process}": 33 | bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') 34 | END_VERSIONS 35 | """ 36 | 37 | stub: 38 | def prefix = task.ext.prefix ?: "${fasta.baseName}" 39 | """ 40 | mkdir bwa 41 | 42 | touch bwa/${prefix}.amb 43 | touch bwa/${prefix}.ann 44 | touch bwa/${prefix}.bwt 45 | touch bwa/${prefix}.pac 46 | touch bwa/${prefix}.sa 47 | 48 | cat <<-END_VERSIONS > versions.yml 49 | "${task.process}": 50 | bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') 51 | END_VERSIONS 52 | """ 53 | } 54 | -------------------------------------------------------------------------------- /modules/nf-core/bwa/index/meta.yml: -------------------------------------------------------------------------------- 1 | name: bwa_index 2 | description: Create BWA index for reference genome 3 | keywords: 4 | - index 5 | - fasta 6 | - genome 7 | - reference 8 | tools: 9 | - bwa: 10 | description: | 11 | BWA is a software package for mapping DNA sequences against 12 | a large reference genome, such as the human genome. 13 | homepage: http://bio-bwa.sourceforge.net/ 14 | documentation: https://bio-bwa.sourceforge.net/bwa.shtml 15 | arxiv: arXiv:1303.3997 16 | licence: ["GPL-3.0-or-later"] 17 | input: 18 | - meta: 19 | type: map 20 | description: | 21 | Groovy Map containing reference information. 22 | e.g. [ id:'test', single_end:false ] 23 | - fasta: 24 | type: file 25 | description: Input genome fasta file 26 | output: 27 | - meta: 28 | type: map 29 | description: | 30 | Groovy Map containing reference information. 31 | e.g. [ id:'test', single_end:false ] 32 | - index: 33 | type: file 34 | description: BWA genome index files 35 | pattern: "*.{amb,ann,bwt,pac,sa}" 36 | - versions: 37 | type: file 38 | description: File containing software versions 39 | pattern: "versions.yml" 40 | authors: 41 | - "@drpatelh" 42 | - "@maxulysse" 43 | maintainers: 44 | - "@drpatelh" 45 | - "@maxulysse" 46 | - "@gallvp" 47 | -------------------------------------------------------------------------------- /modules/nf-core/bwa/index/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process BWA_INDEX" 4 | tag "modules_nfcore" 5 | tag "modules" 6 | tag "bwa" 7 | tag "bwa/index" 8 | script "../main.nf" 9 | process "BWA_INDEX" 10 | 11 | test("BWA index") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = [ 17 | [id: 'test'], 18 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 19 | ] 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll( 26 | { assert process.success }, 27 | { assert snapshot(process.out).match() } 28 | ) 29 | } 30 | 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /modules/nf-core/bwa/index/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "BWA index": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "test" 9 | }, 10 | [ 11 | "genome.amb:md5,3a68b8b2287e07dd3f5f95f4344ba76e", 12 | "genome.ann:md5,c32e11f6c859f166c7525a9c1d583567", 13 | "genome.bwt:md5,0469c30a1e239dd08f68afe66fde99da", 14 | "genome.pac:md5,983e3d2cd6f36e2546e6d25a0da78d66", 15 | "genome.sa:md5,ab3952cabf026b48cd3eb5bccbb636d1" 16 | ] 17 | ] 18 | ], 19 | "1": [ 20 | "versions.yml:md5,a64462ac7dfb21f4ade9b02e7f65c5bb" 21 | ], 22 | "index": [ 23 | [ 24 | { 25 | "id": "test" 26 | }, 27 | [ 28 | "genome.amb:md5,3a68b8b2287e07dd3f5f95f4344ba76e", 29 | "genome.ann:md5,c32e11f6c859f166c7525a9c1d583567", 30 | "genome.bwt:md5,0469c30a1e239dd08f68afe66fde99da", 31 | "genome.pac:md5,983e3d2cd6f36e2546e6d25a0da78d66", 32 | "genome.sa:md5,ab3952cabf026b48cd3eb5bccbb636d1" 33 | ] 34 | ] 35 | ], 36 | "versions": [ 37 | "versions.yml:md5,a64462ac7dfb21f4ade9b02e7f65c5bb" 38 | ] 39 | } 40 | ], 41 | "meta": { 42 | "nf-test": "0.8.4", 43 | "nextflow": "23.10.1" 44 | }, 45 | "timestamp": "2024-05-16T11:40:09.925307" 46 | } 47 | } -------------------------------------------------------------------------------- /modules/nf-core/bwa/index/tests/tags.yml: -------------------------------------------------------------------------------- 1 | bwa/index: 2 | - modules/nf-core/bwa/index/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/bwa/mem/environment.yml: -------------------------------------------------------------------------------- 1 | name: bwa_mem 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bwa=0.7.18 8 | # renovate: datasource=conda depName=bioconda/samtools 9 | - samtools=1.20 10 | - htslib=1.20.0 11 | -------------------------------------------------------------------------------- /modules/nf-core/bwa/mem/tests/tags.yml: -------------------------------------------------------------------------------- 1 | bwa/mem: 2 | - modules/nf-core/bwa/index/** 3 | - modules/nf-core/bwa/mem/** 4 | -------------------------------------------------------------------------------- /modules/nf-core/chromap/chromap/chromap-chromap.diff: -------------------------------------------------------------------------------- 1 | Changes in module 'nf-core/chromap/chromap' 2 | 'modules/nf-core/chromap/chromap/meta.yml' is unchanged 3 | Changes in 'chromap/chromap/main.nf': 4 | --- modules/nf-core/chromap/chromap/main.nf 5 | +++ modules/nf-core/chromap/chromap/main.nf 6 | @@ -29,6 +29,7 @@ 7 | script: 8 | def args = task.ext.args ?: '' 9 | def args2 = task.ext.args2 ?: '' 10 | + def args3 = task.ext.args3 ?: '' 11 | def prefix = task.ext.prefix ?: "${meta.id}" 12 | def args_list = args.tokenize() 13 | 14 | @@ -49,8 +50,10 @@ 15 | def compression_cmds = "gzip -n ${prefix}.${file_extension}" 16 | if (args.contains("--SAM")) { 17 | compression_cmds = """ 18 | + samtools addreplacerg $args3 -o ${prefix}.rg.${file_extension} ${prefix}.${file_extension} 19 | samtools view $args2 -@ $task.cpus -bh \\ 20 | - -o ${prefix}.bam ${prefix}.${file_extension} 21 | + -o ${prefix}.bam ${prefix}.rg.${file_extension} 22 | + rm ${prefix}.rg.${file_extension} 23 | rm ${prefix}.${file_extension} 24 | """ 25 | } 26 | @@ -63,7 +66,7 @@ 27 | -r $fasta \\ 28 | -1 ${reads.join(',')} \\ 29 | -o ${prefix}.${file_extension} 30 | - 31 | + 32 | $compression_cmds 33 | 34 | cat <<-END_VERSIONS > versions.yml 35 | 36 | 'modules/nf-core/chromap/chromap/environment.yml' is unchanged 37 | 'modules/nf-core/chromap/chromap/tests/main.nf.test.snap' is unchanged 38 | 'modules/nf-core/chromap/chromap/tests/tags.yml' is unchanged 39 | 'modules/nf-core/chromap/chromap/tests/nextflow.config' is unchanged 40 | 'modules/nf-core/chromap/chromap/tests/main.nf.test' is unchanged 41 | ************************************************************ 42 | -------------------------------------------------------------------------------- /modules/nf-core/chromap/chromap/environment.yml: -------------------------------------------------------------------------------- 1 | name: chromap_chromap 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::chromap=0.2.6 8 | - bioconda::samtools=1.20 9 | -------------------------------------------------------------------------------- /modules/nf-core/chromap/chromap/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: CHROMAP_CHROMAP { 4 | ext.args = '--SAM' 5 | } 6 | 7 | } 8 | -------------------------------------------------------------------------------- /modules/nf-core/chromap/chromap/tests/tags.yml: -------------------------------------------------------------------------------- 1 | chromap/chromap: 2 | - modules/nf-core/chromap/chromap/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/chromap/index/environment.yml: -------------------------------------------------------------------------------- 1 | name: chromap_index 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::chromap=0.2.6 8 | -------------------------------------------------------------------------------- /modules/nf-core/chromap/index/main.nf: -------------------------------------------------------------------------------- 1 | process CHROMAP_INDEX { 2 | tag "$fasta" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/chromap:0.2.6--hdcf5f25_0' : 8 | 'biocontainers/chromap:0.2.6--hdcf5f25_0' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta) 12 | 13 | output: 14 | tuple val(meta), path ("*.index"), emit: index 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def prefix = task.ext.prefix ?: "${fasta.baseName}" 23 | """ 24 | chromap \\ 25 | -i \\ 26 | $args \\ 27 | -t $task.cpus \\ 28 | -r $fasta \\ 29 | -o ${prefix}.index 30 | 31 | cat <<-END_VERSIONS > versions.yml 32 | "${task.process}": 33 | chromap: \$(echo \$(chromap --version 2>&1)) 34 | END_VERSIONS 35 | """ 36 | 37 | stub: 38 | def prefix = task.ext.prefix ?: "${fasta.baseName}" 39 | """ 40 | touch ${prefix}.index 41 | 42 | cat <<-END_VERSIONS > versions.yml 43 | "${task.process}": 44 | chromap: \$(echo \$(chromap --version 2>&1)) 45 | END_VERSIONS 46 | """ 47 | } 48 | -------------------------------------------------------------------------------- /modules/nf-core/chromap/index/meta.yml: -------------------------------------------------------------------------------- 1 | name: chromap_index 2 | description: Indexes a fasta reference genome ready for chromatin profiling. 3 | keywords: 4 | - index 5 | - fasta 6 | - genome 7 | - reference 8 | tools: 9 | - chromap: 10 | description: Fast alignment and preprocessing of chromatin profiles 11 | homepage: https://github.com/haowenz/chromap 12 | documentation: https://github.com/haowenz/chromap 13 | tool_dev_url: https://github.com/haowenz/chromap 14 | licence: ["GPL v3"] 15 | input: 16 | - meta: 17 | type: map 18 | description: | 19 | Groovy Map containing reference information 20 | e.g. [ id:'test' ] 21 | - fasta: 22 | type: file 23 | description: Fasta reference file. 24 | output: 25 | - versions: 26 | type: file 27 | description: File containing software versions 28 | pattern: "versions.yml" 29 | - meta: 30 | type: map 31 | description: | 32 | Groovy Map containing reference information 33 | e.g. [ id:'test' ] 34 | - index: 35 | type: file 36 | description: Index file of the reference genome 37 | pattern: "*.{index}" 38 | authors: 39 | - "@mahesh-panchal" 40 | - "@joseespinosa" 41 | maintainers: 42 | - "@mahesh-panchal" 43 | - "@joseespinosa" 44 | -------------------------------------------------------------------------------- /modules/nf-core/chromap/index/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process CHROMAP_INDEX" 4 | script "../main.nf" 5 | process "CHROMAP_INDEX" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "chromap" 10 | tag "chromap/index" 11 | 12 | test("sarscov2 - fasta") { 13 | 14 | when { 15 | process { 16 | """ 17 | input[0] = [ 18 | [ id:'test' ], // meta map 19 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 20 | ] 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | assertAll( 27 | { assert process.success }, 28 | { assert snapshot(file(process.out.index[0][1]).name, 29 | process.out.versions) 30 | .match() 31 | } 32 | ) 33 | } 34 | } 35 | 36 | test("sarscov2 - fasta - stub") { 37 | 38 | options "-stub" 39 | 40 | when { 41 | process { 42 | """ 43 | input[0] = [ 44 | [ id:'test' ], // meta map 45 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 46 | ] 47 | """ 48 | } 49 | } 50 | 51 | then { 52 | assertAll( 53 | { assert process.success }, 54 | { assert snapshot(process.out).match() } 55 | ) 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /modules/nf-core/chromap/index/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "sarscov2 - fasta - stub": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "test" 9 | }, 10 | "genome.index:md5,d41d8cd98f00b204e9800998ecf8427e" 11 | ] 12 | ], 13 | "1": [ 14 | "versions.yml:md5,d558de01f835d2a4e3c09262bf40d2d5" 15 | ], 16 | "index": [ 17 | [ 18 | { 19 | "id": "test" 20 | }, 21 | "genome.index:md5,d41d8cd98f00b204e9800998ecf8427e" 22 | ] 23 | ], 24 | "versions": [ 25 | "versions.yml:md5,d558de01f835d2a4e3c09262bf40d2d5" 26 | ] 27 | } 28 | ], 29 | "meta": { 30 | "nf-test": "0.9.0", 31 | "nextflow": "24.04.3" 32 | }, 33 | "timestamp": "2024-08-05T17:51:53.156411807" 34 | }, 35 | "sarscov2 - fasta": { 36 | "content": [ 37 | "genome.index", 38 | [ 39 | "versions.yml:md5,d558de01f835d2a4e3c09262bf40d2d5" 40 | ] 41 | ], 42 | "meta": { 43 | "nf-test": "0.9.0", 44 | "nextflow": "24.04.3" 45 | }, 46 | "timestamp": "2024-08-05T17:51:48.457842431" 47 | } 48 | } -------------------------------------------------------------------------------- /modules/nf-core/chromap/index/tests/tags.yml: -------------------------------------------------------------------------------- 1 | chromap/index: 2 | - modules/nf-core/chromap/index/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/custom/getchromsizes/environment.yml: -------------------------------------------------------------------------------- 1 | name: custom_getchromsizes 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::samtools=1.20 8 | - bioconda::htslib=1.20 9 | -------------------------------------------------------------------------------- /modules/nf-core/custom/getchromsizes/main.nf: -------------------------------------------------------------------------------- 1 | process CUSTOM_GETCHROMSIZES { 2 | tag "$fasta" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/samtools:1.20--h50ea8bc_0' : 8 | 'biocontainers/samtools:1.20--h50ea8bc_0' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta) 12 | 13 | output: 14 | tuple val(meta), path ("*.sizes"), emit: sizes 15 | tuple val(meta), path ("*.fai") , emit: fai 16 | tuple val(meta), path ("*.gzi") , emit: gzi, optional: true 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | """ 25 | samtools faidx $fasta 26 | cut -f 1,2 ${fasta}.fai > ${fasta}.sizes 27 | 28 | cat <<-END_VERSIONS > versions.yml 29 | "${task.process}": 30 | getchromsizes: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 31 | END_VERSIONS 32 | """ 33 | 34 | stub: 35 | """ 36 | touch ${fasta}.fai 37 | touch ${fasta}.sizes 38 | if [[ "${fasta.extension}" == "gz" ]]; then 39 | touch ${fasta}.gzi 40 | fi 41 | 42 | cat <<-END_VERSIONS > versions.yml 43 | "${task.process}": 44 | getchromsizes: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 45 | END_VERSIONS 46 | """ 47 | } 48 | -------------------------------------------------------------------------------- /modules/nf-core/custom/getchromsizes/meta.yml: -------------------------------------------------------------------------------- 1 | name: custom_getchromsizes 2 | description: Generates a FASTA file of chromosome sizes and a fasta index file 3 | keywords: 4 | - fasta 5 | - chromosome 6 | - indexing 7 | tools: 8 | - samtools: 9 | description: Tools for dealing with SAM, BAM and CRAM files 10 | homepage: http://www.htslib.org/ 11 | documentation: http://www.htslib.org/doc/samtools.html 12 | tool_dev_url: https://github.com/samtools/samtools 13 | doi: 10.1093/bioinformatics/btp352 14 | licence: ["MIT"] 15 | input: 16 | - meta: 17 | type: map 18 | description: | 19 | Groovy Map containing sample information 20 | e.g. [ id:'test', single_end:false ] 21 | - fasta: 22 | type: file 23 | description: FASTA file 24 | pattern: "*.{fa,fasta,fna,fas}" 25 | output: 26 | - meta: 27 | type: map 28 | description: | 29 | Groovy Map containing sample information 30 | e.g. [ id:'test', single_end:false ] 31 | - sizes: 32 | type: file 33 | description: File containing chromosome lengths 34 | pattern: "*.{sizes}" 35 | - fai: 36 | type: file 37 | description: FASTA index file 38 | pattern: "*.{fai}" 39 | - gzi: 40 | type: file 41 | description: Optional gzip index file for compressed inputs 42 | pattern: "*.gzi" 43 | - versions: 44 | type: file 45 | description: File containing software versions 46 | pattern: "versions.yml" 47 | authors: 48 | - "@tamara-hodgetts" 49 | - "@chris-cheshire" 50 | - "@muffato" 51 | maintainers: 52 | - "@tamara-hodgetts" 53 | - "@chris-cheshire" 54 | - "@muffato" 55 | -------------------------------------------------------------------------------- /modules/nf-core/custom/getchromsizes/tests/tags.yml: -------------------------------------------------------------------------------- 1 | custom/getchromsizes: 2 | - modules/nf-core/custom/getchromsizes/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/computematrix/environment.yml: -------------------------------------------------------------------------------- 1 | name: deeptools_computematrix 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - "bioconda::deeptools=3.5.5" 8 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/computematrix/main.nf: -------------------------------------------------------------------------------- 1 | process DEEPTOOLS_COMPUTEMATRIX { 2 | tag "$meta.id" 3 | label 'process_high' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/deeptools:3.5.5--pyhdfd78af_0': 8 | 'biocontainers/deeptools:3.5.5--pyhdfd78af_0' }" 9 | 10 | input: 11 | tuple val(meta), path(bigwig) 12 | path bed 13 | 14 | output: 15 | tuple val(meta), path("*.mat.gz") , emit: matrix 16 | tuple val(meta), path("*.mat.tab"), emit: table 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | def prefix = task.ext.prefix ?: "${meta.id}" 25 | """ 26 | computeMatrix \\ 27 | $args \\ 28 | --regionsFileName $bed \\ 29 | --scoreFileName $bigwig \\ 30 | --outFileName ${prefix}.computeMatrix.mat.gz \\ 31 | --outFileNameMatrix ${prefix}.computeMatrix.vals.mat.tab \\ 32 | --numberOfProcessors $task.cpus 33 | 34 | cat <<-END_VERSIONS > versions.yml 35 | "${task.process}": 36 | deeptools: \$(computeMatrix --version | sed -e "s/computeMatrix //g") 37 | END_VERSIONS 38 | """ 39 | 40 | stub: 41 | def prefix = task.ext.prefix ?: "${meta.id}" 42 | """ 43 | echo "" | gzip > ${prefix}.computeMatrix.mat.gz 44 | touch ${prefix}.computeMatrix.vals.mat.tab 45 | 46 | cat <<-END_VERSIONS > versions.yml 47 | "${task.process}": 48 | deeptools: \$(computeMatrix --version | sed -e "s/computeMatrix //g") 49 | END_VERSIONS 50 | """ 51 | } 52 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/computematrix/meta.yml: -------------------------------------------------------------------------------- 1 | name: deeptools_computematrix 2 | description: calculates scores per genome regions for other deeptools plotting utilities 3 | keywords: 4 | - genome 5 | - regions 6 | - scores 7 | - matrix 8 | tools: 9 | - deeptools: 10 | description: A set of user-friendly tools for normalization and visualization of deep-sequencing data 11 | documentation: https://deeptools.readthedocs.io/en/develop/index.html 12 | tool_dev_url: https://github.com/deeptools/deepTools 13 | doi: "10.1093/nar/gku365" 14 | licence: ["GPL v3"] 15 | input: 16 | - meta: 17 | type: map 18 | description: | 19 | Groovy Map containing sample information 20 | e.g. [ id:'test' ] 21 | - bigwig: 22 | type: file 23 | description: bigwig file containing genomic scores 24 | pattern: "*.{bw,bigwig}" 25 | - bed: 26 | type: file 27 | description: bed file containing genomic regions 28 | pattern: "*.{bed}" 29 | output: 30 | - meta: 31 | type: map 32 | description: | 33 | Groovy Map containing sample information 34 | e.g. [ id:'test', single_end:false ] 35 | - matrix: 36 | type: file 37 | description: | 38 | gzipped matrix file needed by the plotHeatmap and plotProfile 39 | deeptools utilities 40 | pattern: "*.{computeMatrix.mat.gz}" 41 | - table: 42 | type: file 43 | description: | 44 | tabular file containing the scores of the generated matrix 45 | pattern: "*.{computeMatrix.vals.mat.tab}" 46 | - versions: 47 | type: file 48 | description: File containing software versions 49 | pattern: "versions.yml" 50 | authors: 51 | - "@jeremy1805" 52 | - "@edmundmiller" 53 | - "@drpatelh" 54 | - "@joseespinosa" 55 | maintainers: 56 | - "@jeremy1805" 57 | - "@edmundmiller" 58 | - "@drpatelh" 59 | - "@joseespinosa" 60 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/computematrix/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: DEEPTOOLS_COMPUTEMATRIX { 4 | ext.args = 'scale-regions -b 1000' 5 | } 6 | 7 | } -------------------------------------------------------------------------------- /modules/nf-core/deeptools/computematrix/tests/tags.yml: -------------------------------------------------------------------------------- 1 | deeptools/computematrix: 2 | - "modules/nf-core/deeptools/computematrix/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotfingerprint/environment.yml: -------------------------------------------------------------------------------- 1 | name: deeptools_plotfingerprint 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - "bioconda::deeptools=3.5.5" 8 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotfingerprint/main.nf: -------------------------------------------------------------------------------- 1 | process DEEPTOOLS_PLOTFINGERPRINT { 2 | tag "$meta.id" 3 | label 'process_high' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/deeptools:3.5.5--pyhdfd78af_0': 8 | 'biocontainers/deeptools:3.5.5--pyhdfd78af_0' }" 9 | 10 | input: 11 | tuple val(meta), path(bams), path(bais) 12 | 13 | output: 14 | tuple val(meta), path("*.pdf") , emit: pdf 15 | tuple val(meta), path("*.raw.txt") , emit: matrix 16 | tuple val(meta), path("*.qcmetrics.txt"), emit: metrics 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | def prefix = task.ext.prefix ?: "${meta.id}" 25 | def extend = (meta.single_end && params.fragment_size > 0) ? "--extendReads ${params.fragment_size}" : '' 26 | """ 27 | plotFingerprint \\ 28 | $args \\ 29 | $extend \\ 30 | --bamfiles ${bams.join(' ')} \\ 31 | --plotFile ${prefix}.plotFingerprint.pdf \\ 32 | --outRawCounts ${prefix}.plotFingerprint.raw.txt \\ 33 | --outQualityMetrics ${prefix}.plotFingerprint.qcmetrics.txt \\ 34 | --numberOfProcessors $task.cpus 35 | 36 | cat <<-END_VERSIONS > versions.yml 37 | "${task.process}": 38 | deeptools: \$(plotFingerprint --version | sed -e "s/plotFingerprint //g") 39 | END_VERSIONS 40 | """ 41 | 42 | stub: 43 | def prefix = task.ext.prefix ?: "${meta.id}" 44 | """ 45 | touch ${prefix}.plotFingerprint.pdf 46 | touch ${prefix}.plotFingerprint.raw.txt 47 | touch ${prefix}.plotFingerprint.qcmetrics.txt 48 | 49 | cat <<-END_VERSIONS > versions.yml 50 | "${task.process}": 51 | deeptools: \$(plotFingerprint --version | sed -e "s/plotFingerprint //g") 52 | END_VERSIONS 53 | """ 54 | } 55 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotfingerprint/meta.yml: -------------------------------------------------------------------------------- 1 | name: deeptools_plotfingerprint 2 | description: plots cumulative reads coverages by BAM file 3 | keywords: 4 | - plot 5 | - fingerprint 6 | - cumulative coverage 7 | - bam 8 | tools: 9 | - deeptools: 10 | description: A set of user-friendly tools for normalization and visualization of deep-sequencing data 11 | documentation: https://deeptools.readthedocs.io/en/develop/index.html 12 | tool_dev_url: https://github.com/deeptools/deepTools 13 | doi: "10.1093/nar/gku365" 14 | licence: ["GPL v3"] 15 | input: 16 | - meta: 17 | type: map 18 | description: | 19 | Groovy Map containing sample information 20 | e.g. [ id:'test' ] 21 | - bam: 22 | type: file 23 | description: One or more BAM files 24 | pattern: "*.{bam}" 25 | - bais: 26 | type: file 27 | description: Corresponding BAM file indexes 28 | pattern: "*.bam.bai" 29 | output: 30 | - meta: 31 | type: map 32 | description: | 33 | Groovy Map containing sample information 34 | e.g. [ id:'test', single_end:false ] 35 | - pdf: 36 | type: file 37 | description: | 38 | Output figure containing resulting plot 39 | pattern: "*.{plotFingerprint.pdf}" 40 | - matrix: 41 | type: file 42 | description: | 43 | Output file summarizing the read counts per bin 44 | pattern: "*.{plotFingerprint.raw.txt}" 45 | - metrics: 46 | type: file 47 | description: | 48 | file containing BAM file quality metrics 49 | pattern: "*.{qcmetrics.txt}" 50 | - versions: 51 | type: file 52 | description: File containing software versions 53 | pattern: "versions.yml" 54 | authors: 55 | - "@edmundmiller" 56 | - "@drpatelh" 57 | - "@joseespinosa" 58 | maintainers: 59 | - "@edmundmiller" 60 | - "@drpatelh" 61 | - "@joseespinosa" 62 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotfingerprint/tests/tags.yml: -------------------------------------------------------------------------------- 1 | deeptools/bamcoverage: 2 | - "modules/nf-core/deeptools/bamcoverage/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotheatmap/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | name: "deeptools_plotheatmap" 4 | channels: 5 | - conda-forge 6 | - bioconda 7 | - defaults 8 | dependencies: 9 | - "bioconda::deeptools=3.5.5" 10 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotheatmap/main.nf: -------------------------------------------------------------------------------- 1 | process DEEPTOOLS_PLOTHEATMAP { 2 | tag "$meta.id" 3 | label 'process_low' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/deeptools:3.5.5--pyhdfd78af_0': 8 | 'biocontainers/deeptools:3.5.5--pyhdfd78af_0' }" 9 | 10 | input: 11 | tuple val(meta), path(matrix) 12 | 13 | output: 14 | tuple val(meta), path("*.pdf"), emit: pdf 15 | tuple val(meta), path("*.tab"), emit: table 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | def prefix = task.ext.prefix ?: "${meta.id}" 24 | """ 25 | plotHeatmap \\ 26 | $args \\ 27 | --matrixFile $matrix \\ 28 | --outFileName ${prefix}.plotHeatmap.pdf \\ 29 | --outFileNameMatrix ${prefix}.plotHeatmap.mat.tab 30 | 31 | cat <<-END_VERSIONS > versions.yml 32 | "${task.process}": 33 | deeptools: \$(plotHeatmap --version | sed -e "s/plotHeatmap //g") 34 | END_VERSIONS 35 | """ 36 | 37 | stub: 38 | def prefix = task.ext.prefix ?: "${meta.id}" 39 | """ 40 | touch ${prefix}.plotHeatmap.pdf 41 | touch ${prefix}.plotHeatmap.mat.tab 42 | 43 | cat <<-END_VERSIONS > versions.yml 44 | "${task.process}": 45 | deeptools: \$(plotFingerprint --version | sed -e "s/plotFingerprint //g") 46 | END_VERSIONS 47 | """ 48 | } 49 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotheatmap/meta.yml: -------------------------------------------------------------------------------- 1 | name: deeptools_plotheatmap 2 | description: plots values produced by deeptools_computematrix as a heatmap 3 | keywords: 4 | - plot 5 | - heatmap 6 | - scores 7 | - matrix 8 | tools: 9 | - deeptools: 10 | description: A set of user-friendly tools for normalization and visualization of deep-sequencing data 11 | documentation: https://deeptools.readthedocs.io/en/develop/index.html 12 | tool_dev_url: https://github.com/deeptools/deepTools 13 | doi: "10.1093/nar/gku365" 14 | licence: ["GPL v3"] 15 | input: 16 | - meta: 17 | type: map 18 | description: | 19 | Groovy Map containing sample information 20 | e.g. [ id:'test' ] 21 | - matrix: 22 | type: file 23 | description: | 24 | gzipped matrix file produced by deeptools_ 25 | computematrix deeptools utility 26 | pattern: "*.{mat.gz}" 27 | output: 28 | - meta: 29 | type: map 30 | description: | 31 | Groovy Map containing sample information 32 | e.g. [ id:'test', single_end:false ] 33 | - pdf: 34 | type: file 35 | description: | 36 | Output figure containing resulting plot 37 | pattern: "*.{plotHeatmap.pdf}" 38 | - matrix: 39 | type: file 40 | description: | 41 | File containing the matrix of values 42 | used to generate the heatmap 43 | pattern: "*.{plotHeatmap.mat.tab}" 44 | - versions: 45 | type: file 46 | description: File containing software versions 47 | pattern: "versions.yml" 48 | authors: 49 | - "@edmundmiller" 50 | - "@drpatelh" 51 | - "@joseespinosa" 52 | maintainers: 53 | - "@edmundmiller" 54 | - "@drpatelh" 55 | - "@joseespinosa" 56 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotheatmap/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process DEEPTOOLS_PLOTHEATMAP" 4 | script "../main.nf" 5 | process "DEEPTOOLS_PLOTHEATMAP" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "deeptools" 10 | tag "deeptools/plotheatmap" 11 | 12 | test("sarscov2 - mat") { 13 | 14 | when { 15 | process { 16 | """ 17 | input[0] = [ 18 | [ id:'test', single_end:false ], // meta map 19 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/deeptools/test.computeMatrix.mat.gz', checkIfExists: true), 20 | ] 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | assertAll( 27 | { assert process.success }, 28 | { assert snapshot(file(process.out.pdf.get(0).get(1)).name, 29 | file(process.out.table.get(0).get(1)).name, 30 | process.out.versions) 31 | .match() 32 | } 33 | ) 34 | } 35 | 36 | } 37 | 38 | test("sarscov2 - mat - stub") { 39 | 40 | options "-stub" 41 | 42 | when { 43 | process { 44 | """ 45 | input[0] = [ 46 | [ id:'test', single_end:false ], // meta map 47 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/deeptools/test.computeMatrix.mat.gz', checkIfExists: true), 48 | ] 49 | """ 50 | } 51 | } 52 | 53 | then { 54 | assertAll( 55 | { assert process.success }, 56 | { assert snapshot(process.out).match() } 57 | ) 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotheatmap/tests/tags.yml: -------------------------------------------------------------------------------- 1 | deeptools/plotheatmap: 2 | - "modules/nf-core/deeptools/plotheatmap/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotprofile/environment.yml: -------------------------------------------------------------------------------- 1 | name: deeptools_plotprofile 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - "bioconda::deeptools=3.5.5" 8 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotprofile/main.nf: -------------------------------------------------------------------------------- 1 | process DEEPTOOLS_PLOTPROFILE { 2 | tag "$meta.id" 3 | label 'process_low' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/deeptools:3.5.5--pyhdfd78af_0': 8 | 'biocontainers/deeptools:3.5.5--pyhdfd78af_0' }" 9 | 10 | input: 11 | tuple val(meta), path(matrix) 12 | 13 | output: 14 | tuple val(meta), path("*.pdf"), emit: pdf 15 | tuple val(meta), path("*.tab"), emit: table 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | def prefix = task.ext.prefix ?: "${meta.id}" 24 | """ 25 | plotProfile \\ 26 | $args \\ 27 | --matrixFile $matrix \\ 28 | --outFileName ${prefix}.plotProfile.pdf \\ 29 | --outFileNameData ${prefix}.plotProfile.tab 30 | 31 | cat <<-END_VERSIONS > versions.yml 32 | "${task.process}": 33 | deeptools: \$(plotProfile --version | sed -e "s/plotProfile //g") 34 | END_VERSIONS 35 | """ 36 | 37 | stub: 38 | def prefix = task.ext.prefix ?: "${meta.id}" 39 | """ 40 | touch ${prefix}.plotProfile.pdf 41 | touch ${prefix}.plotProfile.tab 42 | 43 | cat <<-END_VERSIONS > versions.yml 44 | "${task.process}": 45 | deeptools: \$(plotProfile --version | sed -e "s/plotProfile //g") 46 | END_VERSIONS 47 | """ 48 | } 49 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotprofile/meta.yml: -------------------------------------------------------------------------------- 1 | name: deeptools_plotprofile 2 | description: plots values produced by deeptools_computematrix as a profile plot 3 | keywords: 4 | - plot 5 | - profile 6 | - scores 7 | - matrix 8 | tools: 9 | - deeptools: 10 | description: A set of user-friendly tools for normalization and visualization of deep-sequencing data 11 | documentation: https://deeptools.readthedocs.io/en/develop/index.html 12 | tool_dev_url: https://github.com/deeptools/deepTools 13 | doi: "10.1093/nar/gku365" 14 | licence: ["GPL v3"] 15 | input: 16 | - meta: 17 | type: map 18 | description: | 19 | Groovy Map containing sample information 20 | e.g. [ id:'test' ] 21 | - matrix: 22 | type: file 23 | description: | 24 | gzipped matrix file produced by deeptools_ 25 | computematrix deeptools utility 26 | pattern: "*.{mat.gz}" 27 | output: 28 | - meta: 29 | type: map 30 | description: | 31 | Groovy Map containing sample information 32 | e.g. [ id:'test', single_end:false ] 33 | - pdf: 34 | type: file 35 | description: | 36 | Output figure containing resulting plot 37 | pattern: "*.{plotProfile.pdf}" 38 | - matrix: 39 | type: file 40 | description: | 41 | File containing the matrix of values 42 | used to generate the profile 43 | pattern: "*.{plotProfile.mat.tab}" 44 | - versions: 45 | type: file 46 | description: File containing software versions 47 | pattern: "versions.yml" 48 | authors: 49 | - "@edmundmiller" 50 | - "@drpatelh" 51 | - "@joseespinosa" 52 | maintainers: 53 | - "@edmundmiller" 54 | - "@drpatelh" 55 | - "@joseespinosa" 56 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotprofile/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process DEEPTOOLS_PLOTPROFILE" 4 | script "../main.nf" 5 | process "DEEPTOOLS_PLOTPROFILE" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "deeptools" 10 | tag "deeptools/plotprofile" 11 | 12 | test("sarscov2 - mat") { 13 | 14 | when { 15 | process { 16 | """ 17 | input[0] = [ 18 | [ id:'test', single_end:false ], // meta map 19 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/deeptools/test.computeMatrix.mat.gz', checkIfExists: true), 20 | ] 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | assertAll( 27 | { assert process.success }, 28 | { assert snapshot(file(process.out.pdf.get(0).get(1)).name, 29 | file(process.out.table.get(0).get(1)).name, 30 | process.out.versions) 31 | .match() 32 | } 33 | ) 34 | } 35 | 36 | } 37 | 38 | test("sarscov2 - mat - stub") { 39 | 40 | options "-stub" 41 | 42 | when { 43 | process { 44 | """ 45 | input[0] = [ 46 | [ id:'test', single_end:false ], // meta map 47 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/deeptools/test.computeMatrix.mat.gz', checkIfExists: true), 48 | ] 49 | """ 50 | } 51 | } 52 | 53 | then { 54 | assertAll( 55 | { assert process.success }, 56 | { assert snapshot(process.out).match() } 57 | ) 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /modules/nf-core/deeptools/plotprofile/tests/tags.yml: -------------------------------------------------------------------------------- 1 | deeptools/plotprofile: 2 | - "modules/nf-core/deeptools/plotprofile/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/fastqc/environment.yml: -------------------------------------------------------------------------------- 1 | name: fastqc 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::fastqc=0.12.1 8 | -------------------------------------------------------------------------------- /modules/nf-core/fastqc/meta.yml: -------------------------------------------------------------------------------- 1 | name: fastqc 2 | description: Run FastQC on sequenced reads 3 | keywords: 4 | - quality control 5 | - qc 6 | - adapters 7 | - fastq 8 | tools: 9 | - fastqc: 10 | description: | 11 | FastQC gives general quality metrics about your reads. 12 | It provides information about the quality score distribution 13 | across your reads, the per base sequence content (%A/C/G/T). 14 | You get information about adapter contamination and other 15 | overrepresented sequences. 16 | homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ 17 | documentation: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/ 18 | licence: ["GPL-2.0-only"] 19 | input: 20 | - meta: 21 | type: map 22 | description: | 23 | Groovy Map containing sample information 24 | e.g. [ id:'test', single_end:false ] 25 | - reads: 26 | type: file 27 | description: | 28 | List of input FastQ files of size 1 and 2 for single-end and paired-end data, 29 | respectively. 30 | output: 31 | - meta: 32 | type: map 33 | description: | 34 | Groovy Map containing sample information 35 | e.g. [ id:'test', single_end:false ] 36 | - html: 37 | type: file 38 | description: FastQC report 39 | pattern: "*_{fastqc.html}" 40 | - zip: 41 | type: file 42 | description: FastQC report archive 43 | pattern: "*_{fastqc.zip}" 44 | - versions: 45 | type: file 46 | description: File containing software versions 47 | pattern: "versions.yml" 48 | authors: 49 | - "@drpatelh" 50 | - "@grst" 51 | - "@ewels" 52 | - "@FelixKrueger" 53 | maintainers: 54 | - "@drpatelh" 55 | - "@grst" 56 | - "@ewels" 57 | - "@FelixKrueger" 58 | -------------------------------------------------------------------------------- /modules/nf-core/fastqc/tests/tags.yml: -------------------------------------------------------------------------------- 1 | fastqc: 2 | - modules/nf-core/fastqc/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/gffread/environment.yml: -------------------------------------------------------------------------------- 1 | name: gffread 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::gffread=0.12.7 8 | -------------------------------------------------------------------------------- /modules/nf-core/gffread/meta.yml: -------------------------------------------------------------------------------- 1 | name: gffread 2 | description: Validate, filter, convert and perform various other operations on GFF files 3 | keywords: 4 | - gff 5 | - conversion 6 | - validation 7 | tools: 8 | - gffread: 9 | description: GFF/GTF utility providing format conversions, region filtering, FASTA sequence extraction and more. 10 | homepage: http://ccb.jhu.edu/software/stringtie/gff.shtml#gffread 11 | documentation: http://ccb.jhu.edu/software/stringtie/gff.shtml#gffread 12 | tool_dev_url: https://github.com/gpertea/gffread 13 | doi: 10.12688/f1000research.23297.1 14 | licence: ["MIT"] 15 | input: 16 | - meta: 17 | type: map 18 | description: | 19 | Groovy Map containing meta data 20 | e.g. [ id:'test' ] 21 | - gff: 22 | type: file 23 | description: A reference file in either the GFF3, GFF2 or GTF format. 24 | pattern: "*.{gff, gtf}" 25 | - fasta: 26 | type: file 27 | description: A multi-fasta file with the genomic sequences 28 | pattern: "*.{fasta,fa,faa,fas,fsa}" 29 | output: 30 | - meta: 31 | type: map 32 | description: | 33 | Groovy Map containing meta data 34 | e.g. [ id:'test' ] 35 | - gtf: 36 | type: file 37 | description: GTF file resulting from the conversion of the GFF input file if '-T' argument is present 38 | pattern: "*.{gtf}" 39 | - gffread_gff: 40 | type: file 41 | description: GFF3 file resulting from the conversion of the GFF input file if '-T' argument is absent 42 | pattern: "*.gff3" 43 | - gffread_fasta: 44 | type: file 45 | description: Fasta file produced when either of '-w', '-x', '-y' parameters is present 46 | pattern: "*.fasta" 47 | - versions: 48 | type: file 49 | description: File containing software versions 50 | pattern: "versions.yml" 51 | authors: 52 | - "@edmundmiller" 53 | maintainers: 54 | - "@edmundmiller" 55 | - "@gallvp" 56 | -------------------------------------------------------------------------------- /modules/nf-core/gffread/tests/nextflow-fasta.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: GFFREAD { 3 | ext.args = '-w -S' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/gffread/tests/nextflow-gff3.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: GFFREAD { 3 | ext.args = '' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/gffread/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: GFFREAD { 3 | ext.args = '-T' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/gffread/tests/tags.yml: -------------------------------------------------------------------------------- 1 | gffread: 2 | - modules/nf-core/gffread/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/environment.yml: -------------------------------------------------------------------------------- 1 | name: gunzip 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - conda-forge::grep=3.11 8 | - conda-forge::sed=4.8 9 | - conda-forge::tar=1.34 10 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/main.nf: -------------------------------------------------------------------------------- 1 | process GUNZIP { 2 | tag "$archive" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/ubuntu:22.04' : 8 | 'nf-core/ubuntu:22.04' }" 9 | 10 | input: 11 | tuple val(meta), path(archive) 12 | 13 | output: 14 | tuple val(meta), path("$gunzip"), emit: gunzip 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def extension = ( archive.toString() - '.gz' ).tokenize('.')[-1] 23 | def name = archive.toString() - '.gz' - ".$extension" 24 | def prefix = task.ext.prefix ?: name 25 | gunzip = prefix + ".$extension" 26 | """ 27 | # Not calling gunzip itself because it creates files 28 | # with the original group ownership rather than the 29 | # default one for that user / the work directory 30 | gzip \\ 31 | -cd \\ 32 | $args \\ 33 | $archive \\ 34 | > $gunzip 35 | 36 | cat <<-END_VERSIONS > versions.yml 37 | "${task.process}": 38 | gunzip: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') 39 | END_VERSIONS 40 | """ 41 | 42 | stub: 43 | def args = task.ext.args ?: '' 44 | def extension = ( archive.toString() - '.gz' ).tokenize('.')[-1] 45 | def name = archive.toString() - '.gz' - ".$extension" 46 | def prefix = task.ext.prefix ?: name 47 | gunzip = prefix + ".$extension" 48 | """ 49 | touch $gunzip 50 | cat <<-END_VERSIONS > versions.yml 51 | "${task.process}": 52 | gunzip: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') 53 | END_VERSIONS 54 | """ 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/meta.yml: -------------------------------------------------------------------------------- 1 | name: gunzip 2 | description: Compresses and decompresses files. 3 | keywords: 4 | - gunzip 5 | - compression 6 | - decompression 7 | tools: 8 | - gunzip: 9 | description: | 10 | gzip is a file format and a software application used for file compression and decompression. 11 | documentation: https://www.gnu.org/software/gzip/manual/gzip.html 12 | licence: ["GPL-3.0-or-later"] 13 | input: 14 | - meta: 15 | type: map 16 | description: | 17 | Optional groovy Map containing meta information 18 | e.g. [ id:'test', single_end:false ] 19 | - archive: 20 | type: file 21 | description: File to be compressed/uncompressed 22 | pattern: "*.*" 23 | output: 24 | - gunzip: 25 | type: file 26 | description: Compressed/uncompressed file 27 | pattern: "*.*" 28 | - versions: 29 | type: file 30 | description: File containing software versions 31 | pattern: "versions.yml" 32 | authors: 33 | - "@joseespinosa" 34 | - "@drpatelh" 35 | - "@jfy133" 36 | maintainers: 37 | - "@joseespinosa" 38 | - "@drpatelh" 39 | - "@jfy133" 40 | - "@gallvp" 41 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: GUNZIP { 3 | ext.prefix = { "${meta.id}.xyz" } 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/tests/tags.yml: -------------------------------------------------------------------------------- 1 | gunzip: 2 | - modules/nf-core/gunzip/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/homer/annotatepeaks/environment.yml: -------------------------------------------------------------------------------- 1 | name: homer_annotatepeaks 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::homer=4.11 8 | -------------------------------------------------------------------------------- /modules/nf-core/homer/annotatepeaks/main.nf: -------------------------------------------------------------------------------- 1 | process HOMER_ANNOTATEPEAKS { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | // WARN: Version information not provided by tool on CLI. Please update version string below when bumping container versions. 6 | conda "${moduleDir}/environment.yml" 7 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 8 | 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : 9 | 'biocontainers/homer:4.11--pl526hc9558a2_3' }" 10 | 11 | input: 12 | tuple val(meta), path(peak) 13 | path fasta 14 | path gtf 15 | 16 | output: 17 | tuple val(meta), path("*annotatePeaks.txt"), emit: txt 18 | tuple val(meta), path("*annStats.txt"), emit: stats, optional: true 19 | path "versions.yml" , emit: versions 20 | 21 | when: 22 | task.ext.when == null || task.ext.when 23 | 24 | script: 25 | def args = task.ext.args ?: '' 26 | def prefix = task.ext.prefix ?: "${meta.id}" 27 | def VERSION = '4.11' // WARN: Version information not provided by tool on CLI. Please update this string when bumping container versions. 28 | """ 29 | annotatePeaks.pl \\ 30 | $peak \\ 31 | $fasta \\ 32 | $args \\ 33 | -gtf $gtf \\ 34 | -cpu $task.cpus \\ 35 | > ${prefix}.annotatePeaks.txt 36 | 37 | cat <<-END_VERSIONS > versions.yml 38 | "${task.process}": 39 | homer: $VERSION 40 | END_VERSIONS 41 | """ 42 | } 43 | -------------------------------------------------------------------------------- /modules/nf-core/homer/annotatepeaks/meta.yml: -------------------------------------------------------------------------------- 1 | name: homer_annotatepeaks 2 | description: Annotate peaks with HOMER suite 3 | keywords: 4 | - annotations 5 | - peaks 6 | - bed 7 | tools: 8 | - homer: 9 | description: | 10 | HOMER (Hypergeometric Optimization of Motif EnRichment) is a suite of tools for Motif Discovery and next-gen sequencing analysis. 11 | documentation: http://homer.ucsd.edu/homer/ 12 | doi: 10.1016/j.molcel.2010.05.004. 13 | licence: ["GPL-3.0-or-later"] 14 | input: 15 | - meta: 16 | type: map 17 | description: | 18 | Groovy Map containing sample information 19 | e.g. [ id:'test', single_end:false ] 20 | - peaks: 21 | type: file 22 | description: The peak files in bed format 23 | pattern: "*.bed" 24 | - fasta: 25 | type: file 26 | description: Fasta file of reference genome 27 | pattern: "*.fasta" 28 | - gtf: 29 | type: file 30 | description: GTF file of reference genome 31 | pattern: "*.gtf" 32 | output: 33 | - meta: 34 | type: map 35 | description: | 36 | Groovy Map containing sample information 37 | e.g. [ id:'test', single_end:false ] 38 | - annotated_peaks: 39 | type: file 40 | description: The annotated peaks 41 | pattern: "*annotatePeaks.txt" 42 | - annotation_stats: 43 | type: file 44 | description: the annStats file output from -annStats parameter 45 | pattern: "*annStats.txt" 46 | - versions: 47 | type: file 48 | description: File containing software versions 49 | pattern: "versions.yml" 50 | authors: 51 | - "@drpatelh" 52 | - "@kevinmenden" 53 | maintainers: 54 | - "@drpatelh" 55 | - "@kevinmenden" 56 | -------------------------------------------------------------------------------- /modules/nf-core/khmer/uniquekmers/environment.yml: -------------------------------------------------------------------------------- 1 | name: khmer_uniquekmers 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::khmer=3.0.0a3 8 | -------------------------------------------------------------------------------- /modules/nf-core/khmer/uniquekmers/main.nf: -------------------------------------------------------------------------------- 1 | process KHMER_UNIQUEKMERS { 2 | tag "$fasta" 3 | label 'process_low' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/khmer:3.0.0a3--py37haa7609a_2' : 8 | 'biocontainers/khmer:3.0.0a3--py37haa7609a_2' }" 9 | 10 | input: 11 | path fasta 12 | val kmer_size 13 | 14 | output: 15 | path "report.txt" , emit: report 16 | path "kmers.txt" , emit: kmers 17 | path "versions.yml", emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | """ 25 | unique-kmers.py \\ 26 | -k $kmer_size \\ 27 | -R report.txt \\ 28 | $args \\ 29 | $fasta 30 | 31 | grep ^number report.txt | sed 's/^.*:.[[:blank:]]//g' > kmers.txt 32 | 33 | cat <<-END_VERSIONS > versions.yml 34 | "${task.process}": 35 | khmer: \$( unique-kmers.py --version 2>&1 | grep ^khmer | sed 's/^khmer //;s/ .*\$//' ) 36 | END_VERSIONS 37 | """ 38 | } 39 | -------------------------------------------------------------------------------- /modules/nf-core/khmer/uniquekmers/meta.yml: -------------------------------------------------------------------------------- 1 | name: "khmer_uniquekmers" 2 | description: In-memory nucleotide sequence k-mer counting, filtering, graph traversal and more 3 | keywords: 4 | - khmer 5 | - k-mer 6 | - effective genome size 7 | tools: 8 | - "khmer": 9 | description: khmer k-mer counting library 10 | homepage: https://github.com/dib-lab/khmer 11 | documentation: https://khmer.readthedocs.io/en/latest/ 12 | tool_dev_url: https://github.com/dib-lab/khmer 13 | doi: "10.12688/f1000research.6924.1" 14 | licence: ["BSD License"] 15 | input: 16 | - fasta: 17 | type: file 18 | description: fasta file 19 | pattern: "*.{fa,fasta}" 20 | - kmer_size: 21 | type: value 22 | description: k-mer size to use 23 | pattern: "[0-9]+" 24 | output: 25 | - report: 26 | type: file 27 | description: Text file containing unique-kmers.py execution report 28 | pattern: "report.txt" 29 | - kmers: 30 | type: file 31 | description: Text file containing number of kmers 32 | pattern: "kmers.txt" 33 | - versions: 34 | type: file 35 | description: File containing software versions 36 | pattern: "versions.yml" 37 | authors: 38 | - "@JoseEspinosa" 39 | maintainers: 40 | - "@JoseEspinosa" 41 | -------------------------------------------------------------------------------- /modules/nf-core/macs3/callpeak/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | name: "macs3_callpeak" 4 | channels: 5 | - conda-forge 6 | - bioconda 7 | - defaults 8 | dependencies: 9 | - "bioconda::macs3=3.0.1" 10 | -------------------------------------------------------------------------------- /modules/nf-core/macs3/callpeak/tests/bam.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'MACS3_CALLPEAK' { 3 | ext.args = '--qval 0.1' 4 | } 5 | } -------------------------------------------------------------------------------- /modules/nf-core/macs3/callpeak/tests/bed.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'MACS3_CALLPEAK' { 3 | ext.args = '--format BED --qval 10 --nomodel --extsize 200' 4 | } 5 | } -------------------------------------------------------------------------------- /modules/nf-core/multiqc/environment.yml: -------------------------------------------------------------------------------- 1 | name: multiqc 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::multiqc=1.23 8 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/main.nf: -------------------------------------------------------------------------------- 1 | process MULTIQC { 2 | label 'process_single' 3 | 4 | conda "${moduleDir}/environment.yml" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/multiqc:1.23--pyhdfd78af_0' : 7 | 'biocontainers/multiqc:1.23--pyhdfd78af_0' }" 8 | 9 | input: 10 | path multiqc_files, stageAs: "?/*" 11 | path(multiqc_config) 12 | path(extra_multiqc_config) 13 | path(multiqc_logo) 14 | path(replace_names) 15 | path(sample_names) 16 | 17 | output: 18 | path "*multiqc_report.html", emit: report 19 | path "*_data" , emit: data 20 | path "*_plots" , optional:true, emit: plots 21 | path "versions.yml" , emit: versions 22 | 23 | when: 24 | task.ext.when == null || task.ext.when 25 | 26 | script: 27 | def args = task.ext.args ?: '' 28 | def config = multiqc_config ? "--config $multiqc_config" : '' 29 | def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' 30 | def logo = multiqc_logo ? /--cl-config 'custom_logo: "${multiqc_logo}"'/ : '' 31 | def replace = replace_names ? "--replace-names ${replace_names}" : '' 32 | def samples = sample_names ? "--sample-names ${sample_names}" : '' 33 | """ 34 | multiqc \\ 35 | --force \\ 36 | $args \\ 37 | $config \\ 38 | $extra_config \\ 39 | $logo \\ 40 | $replace \\ 41 | $samples \\ 42 | . 43 | 44 | cat <<-END_VERSIONS > versions.yml 45 | "${task.process}": 46 | multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) 47 | END_VERSIONS 48 | """ 49 | 50 | stub: 51 | """ 52 | mkdir multiqc_data 53 | touch multiqc_plots 54 | touch multiqc_report.html 55 | 56 | cat <<-END_VERSIONS > versions.yml 57 | "${task.process}": 58 | multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) 59 | END_VERSIONS 60 | """ 61 | } 62 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "multiqc_versions_single": { 3 | "content": [ 4 | [ 5 | "versions.yml:md5,87904cd321df21fac35d18f0fc01bb19" 6 | ] 7 | ], 8 | "meta": { 9 | "nf-test": "0.8.4", 10 | "nextflow": "24.04.2" 11 | }, 12 | "timestamp": "2024-07-10T12:41:34.562023" 13 | }, 14 | "multiqc_stub": { 15 | "content": [ 16 | [ 17 | "multiqc_report.html", 18 | "multiqc_data", 19 | "multiqc_plots", 20 | "versions.yml:md5,87904cd321df21fac35d18f0fc01bb19" 21 | ] 22 | ], 23 | "meta": { 24 | "nf-test": "0.8.4", 25 | "nextflow": "24.04.2" 26 | }, 27 | "timestamp": "2024-07-10T11:27:11.933869532" 28 | }, 29 | "multiqc_versions_config": { 30 | "content": [ 31 | [ 32 | "versions.yml:md5,87904cd321df21fac35d18f0fc01bb19" 33 | ] 34 | ], 35 | "meta": { 36 | "nf-test": "0.8.4", 37 | "nextflow": "24.04.2" 38 | }, 39 | "timestamp": "2024-07-10T11:26:56.709849369" 40 | } 41 | } -------------------------------------------------------------------------------- /modules/nf-core/multiqc/tests/tags.yml: -------------------------------------------------------------------------------- 1 | multiqc: 2 | - modules/nf-core/multiqc/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/phantompeakqualtools/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - "bioconda::phantompeakqualtools=1.2.2" 8 | -------------------------------------------------------------------------------- /modules/nf-core/phantompeakqualtools/main.nf: -------------------------------------------------------------------------------- 1 | process PHANTOMPEAKQUALTOOLS { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | // WARN: Version information not provided by tool on CLI. Please update version string below when bumping container versions. 6 | conda "${moduleDir}/environment.yml" 7 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 8 | 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/4a/4a1cddfad5b503ee347cc5de17d172e1876c547fca00aa844559c9e764fb400f/data' : 9 | 'community.wave.seqera.io/library/phantompeakqualtools:1.2.2--f8026fe2526a5e18' }" 10 | 11 | input: 12 | tuple val(meta), path(bam) 13 | 14 | output: 15 | tuple val(meta), path("*.out") , emit: spp 16 | tuple val(meta), path("*.pdf") , emit: pdf 17 | tuple val(meta), path("*.Rdata"), emit: rdata 18 | path "versions.yml" , emit: versions 19 | 20 | when: 21 | task.ext.when == null || task.ext.when 22 | 23 | script: 24 | def args = task.ext.args ?: '' 25 | def args2 = task.ext.args2 ?: '' 26 | def prefix = task.ext.prefix ?: "${meta.id}" 27 | def VERSION = '1.2.2' // WARN: Version information not provided by tool on CLI. Please update this string when bumping container versions. 28 | """ 29 | RUN_SPP=`which run_spp.R` 30 | Rscript $args -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" $args2 31 | 32 | cat <<-END_VERSIONS > versions.yml 33 | "${task.process}": 34 | phantompeakqualtools: $VERSION 35 | END_VERSIONS 36 | """ 37 | 38 | stub: 39 | def prefix = task.ext.prefix ?: "${meta.id}" 40 | def VERSION = '1.2.2' // WARN: Version information not provided by tool on CLI. Please update this string when bumping container versions. 41 | """ 42 | touch ${prefix}.spp.pdf 43 | touch ${prefix}.spp.Rdata 44 | touch ${prefix}.spp.out 45 | 46 | cat <<-END_VERSIONS > versions.yml 47 | "${task.process}": 48 | phantompeakqualtools: $VERSION 49 | END_VERSIONS 50 | """ 51 | } 52 | -------------------------------------------------------------------------------- /modules/nf-core/phantompeakqualtools/tests/tags.yml: -------------------------------------------------------------------------------- 1 | phantompeakqualtools: 2 | - "modules/nf-core/phantompeakqualtools/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/picard/collectmultiplemetrics/environment.yml: -------------------------------------------------------------------------------- 1 | name: picard_collectmultiplemetrics 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::picard=3.2.0 8 | -------------------------------------------------------------------------------- /modules/nf-core/picard/collectmultiplemetrics/meta.yml: -------------------------------------------------------------------------------- 1 | name: picard_collectmultiplemetrics 2 | description: Collect multiple metrics from a BAM file 3 | keywords: 4 | - alignment 5 | - metrics 6 | - statistics 7 | - insert 8 | - quality 9 | - bam 10 | tools: 11 | - picard: 12 | description: | 13 | A set of command line tools (in Java) for manipulating high-throughput sequencing (HTS) 14 | data and formats such as SAM/BAM/CRAM and VCF. 15 | homepage: https://broadinstitute.github.io/picard/ 16 | documentation: https://broadinstitute.github.io/picard/ 17 | licence: ["MIT"] 18 | input: 19 | - meta: 20 | type: map 21 | description: | 22 | Groovy Map containing sample information 23 | e.g. [ id:'test', single_end:false ] 24 | - bam: 25 | type: file 26 | description: SAM/BAM/CRAM file 27 | pattern: "*.{sam,bam,cram}" 28 | - bai: 29 | type: file 30 | description: Optional SAM/BAM/CRAM file index 31 | pattern: "*.{sai,bai,crai}" 32 | - meta2: 33 | type: map 34 | description: | 35 | Groovy Map containing reference information 36 | e.g. [ id:'genome'] 37 | - fasta: 38 | type: file 39 | description: Genome fasta file 40 | - meta3: 41 | type: map 42 | description: | 43 | Groovy Map containing reference information 44 | e.g. [ id:'genome'] 45 | - fai: 46 | type: file 47 | description: Index of FASTA file. Only needed when fasta is supplied. 48 | pattern: "*.fai" 49 | output: 50 | - meta: 51 | type: map 52 | description: | 53 | Groovy Map containing sample information 54 | e.g. [ id:'test', single_end:false ] 55 | - metrics: 56 | type: file 57 | description: Alignment metrics files generated by picard 58 | pattern: "*_{metrics}" 59 | - pdf: 60 | type: file 61 | description: PDF plots of metrics 62 | pattern: "*.{pdf}" 63 | - versions: 64 | type: file 65 | description: File containing software versions 66 | pattern: "versions.yml" 67 | authors: 68 | - "@drpatelh" 69 | maintainers: 70 | - "@drpatelh" 71 | -------------------------------------------------------------------------------- /modules/nf-core/picard/markduplicates/environment.yml: -------------------------------------------------------------------------------- 1 | name: picard_markduplicates 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::picard=3.2.0 8 | -------------------------------------------------------------------------------- /modules/nf-core/picard/markduplicates/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: PICARD_MARKDUPLICATES { 3 | ext.prefix = { "${meta.id}.marked" } 4 | ext.args = '--ASSUME_SORT_ORDER queryname' 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /modules/nf-core/picard/markduplicates/tests/tags.yml: -------------------------------------------------------------------------------- 1 | picard/markduplicates: 2 | - modules/nf-core/picard/markduplicates/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/picard/mergesamfiles/environment.yml: -------------------------------------------------------------------------------- 1 | name: picard_mergesamfiles 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::picard=3.2.0 8 | -------------------------------------------------------------------------------- /modules/nf-core/picard/mergesamfiles/main.nf: -------------------------------------------------------------------------------- 1 | process PICARD_MERGESAMFILES { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/picard:3.2.0--hdfd78af_0' : 8 | 'biocontainers/picard:3.2.0--hdfd78af_0' }" 9 | 10 | input: 11 | tuple val(meta), path(bams) 12 | 13 | output: 14 | tuple val(meta), path("*.bam"), emit: bam 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def prefix = task.ext.prefix ?: "${meta.id}" 23 | def bam_files = bams.sort() 24 | def avail_mem = 3072 25 | if (!task.memory) { 26 | log.info '[Picard MergeSamFiles] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' 27 | } else { 28 | avail_mem = (task.memory.mega*0.8).intValue() 29 | } 30 | if (bam_files.size() > 1) { 31 | """ 32 | picard \\ 33 | -Xmx${avail_mem}M \\ 34 | MergeSamFiles \\ 35 | $args \\ 36 | ${'--INPUT '+bam_files.join(' --INPUT ')} \\ 37 | --OUTPUT ${prefix}.bam 38 | cat <<-END_VERSIONS > versions.yml 39 | "${task.process}": 40 | picard: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) 41 | END_VERSIONS 42 | """ 43 | } else { 44 | """ 45 | ln -s ${bam_files[0]} ${prefix}.bam 46 | cat <<-END_VERSIONS > versions.yml 47 | "${task.process}": 48 | picard: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) 49 | END_VERSIONS 50 | """ 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /modules/nf-core/picard/mergesamfiles/meta.yml: -------------------------------------------------------------------------------- 1 | name: picard_mergesamfiles 2 | description: Merges multiple BAM files into a single file 3 | keywords: 4 | - merge 5 | - alignment 6 | - bam 7 | - sam 8 | tools: 9 | - picard: 10 | description: | 11 | A set of command line tools (in Java) for manipulating high-throughput sequencing (HTS) 12 | data and formats such as SAM/BAM/CRAM and VCF. 13 | homepage: https://broadinstitute.github.io/picard/ 14 | documentation: https://broadinstitute.github.io/picard/ 15 | licence: ["MIT"] 16 | input: 17 | - meta: 18 | type: map 19 | description: | 20 | Groovy Map containing sample information 21 | e.g. [ id:'test', single_end:false ] 22 | - bam: 23 | type: file 24 | description: List of BAM files 25 | pattern: "*.{bam}" 26 | output: 27 | - meta: 28 | type: map 29 | description: | 30 | Groovy Map containing sample information 31 | e.g. [ id:'test', single_end:false ] 32 | - bam: 33 | type: file 34 | description: Merged BAM file 35 | pattern: "*.{bam}" 36 | - versions: 37 | type: file 38 | description: File containing software versions 39 | pattern: "versions.yml" 40 | authors: 41 | - "@drpatelh" 42 | maintainers: 43 | - "@drpatelh" 44 | -------------------------------------------------------------------------------- /modules/nf-core/preseq/lcextrap/environment.yml: -------------------------------------------------------------------------------- 1 | name: preseq_lcextrap 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::preseq=3.1.2 8 | -------------------------------------------------------------------------------- /modules/nf-core/preseq/lcextrap/main.nf: -------------------------------------------------------------------------------- 1 | process PRESEQ_LCEXTRAP { 2 | tag "$meta.id" 3 | label 'process_single' 4 | label 'error_retry' 5 | 6 | conda "${moduleDir}/environment.yml" 7 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 8 | 'https://depot.galaxyproject.org/singularity/preseq:3.1.2--h445547b_2': 9 | 'biocontainers/preseq:3.1.2--h445547b_2' }" 10 | 11 | input: 12 | tuple val(meta), path(bam) 13 | 14 | output: 15 | tuple val(meta), path("*.lc_extrap.txt"), emit: lc_extrap 16 | tuple val(meta), path("*.log") , emit: log 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | args = task.attempt > 1 ? args.join(' -defects') : args // Disable testing for defects 25 | def prefix = task.ext.prefix ?: "${meta.id}" 26 | def paired_end = meta.single_end ? '' : '-pe' 27 | """ 28 | preseq \\ 29 | lc_extrap \\ 30 | $args \\ 31 | $paired_end \\ 32 | -output ${prefix}.lc_extrap.txt \\ 33 | $bam 34 | cp .command.err ${prefix}.command.log 35 | 36 | cat <<-END_VERSIONS > versions.yml 37 | "${task.process}": 38 | preseq: \$(echo \$(preseq 2>&1) | sed 's/^.*Version: //; s/Usage:.*\$//') 39 | END_VERSIONS 40 | """ 41 | 42 | stub: 43 | def prefix = task.ext.prefix ?: "${meta.id}" 44 | """ 45 | touch ${prefix}.lc_extrap.txt 46 | touch ${prefix}.command.log 47 | 48 | cat <<-END_VERSIONS > versions.yml 49 | "${task.process}": 50 | preseq: \$(echo \$(preseq 2>&1) | sed 's/^.*Version: //; s/Usage:.*\$//') 51 | END_VERSIONS 52 | """ 53 | } 54 | -------------------------------------------------------------------------------- /modules/nf-core/preseq/lcextrap/meta.yml: -------------------------------------------------------------------------------- 1 | name: preseq_lcextrap 2 | description: Software for predicting library complexity and genome coverage in high-throughput sequencing 3 | keywords: 4 | - preseq 5 | - library 6 | - complexity 7 | tools: 8 | - preseq: 9 | description: Software for predicting library complexity and genome coverage in high-throughput sequencing 10 | homepage: http://smithlabresearch.org/software/preseq/ 11 | documentation: http://smithlabresearch.org/wp-content/uploads/manual.pdf 12 | tool_dev_url: https://github.com/smithlabcode/preseq 13 | licence: ["GPL"] 14 | input: 15 | - meta: 16 | type: map 17 | description: | 18 | Groovy Map containing sample information 19 | e.g. [ id:'test', single_end:false ] 20 | - bam: 21 | type: file 22 | description: BAM/CRAM/SAM file 23 | pattern: "*.{bam,cram,sam}" 24 | output: 25 | - meta: 26 | type: map 27 | description: | 28 | Groovy Map containing sample information 29 | e.g. [ id:'test', single_end:false ] 30 | - versions: 31 | type: file 32 | description: File containing software versions 33 | pattern: "versions.yml" 34 | - lc_extrap: 35 | type: file 36 | description: File containing output of Preseq lcextrap 37 | pattern: "*.{lc_extrap.txt}" 38 | - log: 39 | type: file 40 | description: Log file containing stderr produced by Preseq 41 | pattern: "*.{log}" 42 | authors: 43 | - "@drpatelh" 44 | - "@edmundmiller" 45 | maintainers: 46 | - "@drpatelh" 47 | - "@edmundmiller" 48 | -------------------------------------------------------------------------------- /modules/nf-core/preseq/lcextrap/tests/tags.yml: -------------------------------------------------------------------------------- 1 | preseq/lcextrap: 2 | - modules/nf-core/preseq/lcextrap/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/flagstat/environment.yml: -------------------------------------------------------------------------------- 1 | name: samtools_flagstat 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::samtools=1.20 8 | - bioconda::htslib=1.20 9 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/flagstat/main.nf: -------------------------------------------------------------------------------- 1 | process SAMTOOLS_FLAGSTAT { 2 | tag "$meta.id" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/samtools:1.20--h50ea8bc_0' : 8 | 'biocontainers/samtools:1.20--h50ea8bc_0' }" 9 | 10 | input: 11 | tuple val(meta), path(bam), path(bai) 12 | 13 | output: 14 | tuple val(meta), path("*.flagstat"), emit: flagstat 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def prefix = task.ext.prefix ?: "${meta.id}" 23 | """ 24 | samtools \\ 25 | flagstat \\ 26 | --threads ${task.cpus} \\ 27 | $bam \\ 28 | > ${prefix}.flagstat 29 | 30 | cat <<-END_VERSIONS > versions.yml 31 | "${task.process}": 32 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 33 | END_VERSIONS 34 | """ 35 | 36 | stub: 37 | def prefix = task.ext.prefix ?: "${meta.id}" 38 | """ 39 | touch ${prefix}.flagstat 40 | 41 | cat <<-END_VERSIONS > versions.yml 42 | "${task.process}": 43 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 44 | END_VERSIONS 45 | """ 46 | } 47 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/flagstat/meta.yml: -------------------------------------------------------------------------------- 1 | name: samtools_flagstat 2 | description: Counts the number of alignments in a BAM/CRAM/SAM file for each FLAG type 3 | keywords: 4 | - stats 5 | - mapping 6 | - counts 7 | - bam 8 | - sam 9 | - cram 10 | tools: 11 | - samtools: 12 | description: | 13 | SAMtools is a set of utilities for interacting with and post-processing 14 | short DNA sequence read alignments in the SAM, BAM and CRAM formats, written by Heng Li. 15 | These files are generated as output by short read aligners like BWA. 16 | homepage: http://www.htslib.org/ 17 | documentation: http://www.htslib.org/doc/samtools.html 18 | doi: 10.1093/bioinformatics/btp352 19 | licence: ["MIT"] 20 | input: 21 | - meta: 22 | type: map 23 | description: | 24 | Groovy Map containing sample information 25 | e.g. [ id:'test', single_end:false ] 26 | - bam: 27 | type: file 28 | description: BAM/CRAM/SAM file 29 | pattern: "*.{bam,cram,sam}" 30 | - bai: 31 | type: file 32 | description: Index for BAM/CRAM/SAM file 33 | pattern: "*.{bai,crai,sai}" 34 | output: 35 | - meta: 36 | type: map 37 | description: | 38 | Groovy Map containing sample information 39 | e.g. [ id:'test', single_end:false ] 40 | - flagstat: 41 | type: file 42 | description: File containing samtools flagstat output 43 | pattern: "*.{flagstat}" 44 | - versions: 45 | type: file 46 | description: File containing software versions 47 | pattern: "versions.yml" 48 | authors: 49 | - "@drpatelh" 50 | maintainers: 51 | - "@drpatelh" 52 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/flagstat/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process SAMTOOLS_FLAGSTAT" 4 | script "../main.nf" 5 | process "SAMTOOLS_FLAGSTAT" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "samtools" 9 | tag "samtools/flagstat" 10 | 11 | test("BAM") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = Channel.of([ 17 | [ id:'test', single_end:false ], // meta map 18 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), 19 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam.bai', checkIfExists: true) 20 | ]) 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | assertAll ( 27 | { assert process.success }, 28 | { assert snapshot(process.out).match() } 29 | ) 30 | } 31 | } 32 | 33 | test("BAM - stub") { 34 | 35 | options "-stub" 36 | 37 | when { 38 | process { 39 | """ 40 | input[0] = Channel.of([ 41 | [ id:'test', single_end:false ], // meta map 42 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), 43 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam.bai', checkIfExists: true) 44 | ]) 45 | """ 46 | } 47 | } 48 | 49 | then { 50 | assertAll ( 51 | { assert process.success }, 52 | { assert snapshot(process.out).match() } 53 | ) 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/flagstat/tests/tags.yml: -------------------------------------------------------------------------------- 1 | samtools/flagstat: 2 | - modules/nf-core/samtools/flagstat/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/idxstats/environment.yml: -------------------------------------------------------------------------------- 1 | name: samtools_idxstats 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::samtools=1.20 8 | - bioconda::htslib=1.20 9 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/idxstats/main.nf: -------------------------------------------------------------------------------- 1 | process SAMTOOLS_IDXSTATS { 2 | tag "$meta.id" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/samtools:1.20--h50ea8bc_0' : 8 | 'biocontainers/samtools:1.20--h50ea8bc_0' }" 9 | 10 | input: 11 | tuple val(meta), path(bam), path(bai) 12 | 13 | output: 14 | tuple val(meta), path("*.idxstats"), emit: idxstats 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def prefix = task.ext.prefix ?: "${meta.id}" 23 | 24 | """ 25 | samtools \\ 26 | idxstats \\ 27 | --threads ${task.cpus-1} \\ 28 | $bam \\ 29 | > ${prefix}.idxstats 30 | 31 | cat <<-END_VERSIONS > versions.yml 32 | "${task.process}": 33 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 34 | END_VERSIONS 35 | """ 36 | 37 | stub: 38 | def prefix = task.ext.prefix ?: "${meta.id}" 39 | 40 | """ 41 | touch ${prefix}.idxstats 42 | 43 | cat <<-END_VERSIONS > versions.yml 44 | "${task.process}": 45 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 46 | END_VERSIONS 47 | """ 48 | } 49 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/idxstats/meta.yml: -------------------------------------------------------------------------------- 1 | name: samtools_idxstats 2 | description: Reports alignment summary statistics for a BAM/CRAM/SAM file 3 | keywords: 4 | - stats 5 | - mapping 6 | - counts 7 | - chromosome 8 | - bam 9 | - sam 10 | - cram 11 | tools: 12 | - samtools: 13 | description: | 14 | SAMtools is a set of utilities for interacting with and post-processing 15 | short DNA sequence read alignments in the SAM, BAM and CRAM formats, written by Heng Li. 16 | These files are generated as output by short read aligners like BWA. 17 | homepage: http://www.htslib.org/ 18 | documentation: http://www.htslib.org/doc/samtools.html 19 | doi: 10.1093/bioinformatics/btp352 20 | licence: ["MIT"] 21 | input: 22 | - meta: 23 | type: map 24 | description: | 25 | Groovy Map containing sample information 26 | e.g. [ id:'test', single_end:false ] 27 | - bam: 28 | type: file 29 | description: BAM/CRAM/SAM file 30 | pattern: "*.{bam,cram,sam}" 31 | - bai: 32 | type: file 33 | description: Index for BAM/CRAM/SAM file 34 | pattern: "*.{bai,crai,sai}" 35 | output: 36 | - meta: 37 | type: map 38 | description: | 39 | Groovy Map containing sample information 40 | e.g. [ id:'test', single_end:false ] 41 | - idxstats: 42 | type: file 43 | description: File containing samtools idxstats output 44 | pattern: "*.{idxstats}" 45 | - versions: 46 | type: file 47 | description: File containing software versions 48 | pattern: "versions.yml" 49 | authors: 50 | - "@drpatelh" 51 | maintainers: 52 | - "@drpatelh" 53 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/idxstats/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process SAMTOOLS_IDXSTATS" 4 | script "../main.nf" 5 | process "SAMTOOLS_IDXSTATS" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "samtools" 9 | tag "samtools/idxstats" 10 | 11 | test("bam") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = Channel.of([ 17 | [ id:'test', single_end:false ], // meta map 18 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), 19 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam.bai', checkIfExists: true) 20 | ]) 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | assertAll ( 27 | { assert process.success }, 28 | { assert snapshot(process.out).match() } 29 | ) 30 | } 31 | } 32 | 33 | test("bam - stub") { 34 | options "-stub" 35 | when { 36 | process { 37 | """ 38 | input[0] = Channel.of([ 39 | [ id:'test', single_end:false ], // meta map 40 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true), 41 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam.bai', checkIfExists: true) 42 | ]) 43 | """ 44 | } 45 | } 46 | 47 | then { 48 | assertAll ( 49 | { assert process.success }, 50 | { assert snapshot(process.out).match() } 51 | ) 52 | } 53 | }} 54 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/idxstats/tests/tags.yml: -------------------------------------------------------------------------------- 1 | samtools/idxstats: 2 | - modules/nf-core/samtools/idxstats/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/index/environment.yml: -------------------------------------------------------------------------------- 1 | name: samtools_index 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::samtools=1.20 8 | - bioconda::htslib=1.20 9 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/index/main.nf: -------------------------------------------------------------------------------- 1 | process SAMTOOLS_INDEX { 2 | tag "$meta.id" 3 | label 'process_low' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/samtools:1.20--h50ea8bc_0' : 8 | 'biocontainers/samtools:1.20--h50ea8bc_0' }" 9 | 10 | input: 11 | tuple val(meta), path(input) 12 | 13 | output: 14 | tuple val(meta), path("*.bai") , optional:true, emit: bai 15 | tuple val(meta), path("*.csi") , optional:true, emit: csi 16 | tuple val(meta), path("*.crai"), optional:true, emit: crai 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | """ 25 | samtools \\ 26 | index \\ 27 | -@ ${task.cpus-1} \\ 28 | $args \\ 29 | $input 30 | 31 | cat <<-END_VERSIONS > versions.yml 32 | "${task.process}": 33 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 34 | END_VERSIONS 35 | """ 36 | 37 | stub: 38 | def args = task.ext.args ?: '' 39 | def extension = file(input).getExtension() == 'cram' ? 40 | "crai" : args.contains("-c") ? "csi" : "bai" 41 | """ 42 | touch ${input}.${extension} 43 | 44 | cat <<-END_VERSIONS > versions.yml 45 | "${task.process}": 46 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 47 | END_VERSIONS 48 | """ 49 | } 50 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/index/meta.yml: -------------------------------------------------------------------------------- 1 | name: samtools_index 2 | description: Index SAM/BAM/CRAM file 3 | keywords: 4 | - index 5 | - bam 6 | - sam 7 | - cram 8 | tools: 9 | - samtools: 10 | description: | 11 | SAMtools is a set of utilities for interacting with and post-processing 12 | short DNA sequence read alignments in the SAM, BAM and CRAM formats, written by Heng Li. 13 | These files are generated as output by short read aligners like BWA. 14 | homepage: http://www.htslib.org/ 15 | documentation: http://www.htslib.org/doc/samtools.html 16 | doi: 10.1093/bioinformatics/btp352 17 | licence: ["MIT"] 18 | input: 19 | - meta: 20 | type: map 21 | description: | 22 | Groovy Map containing sample information 23 | e.g. [ id:'test', single_end:false ] 24 | - bam: 25 | type: file 26 | description: BAM/CRAM/SAM file 27 | pattern: "*.{bam,cram,sam}" 28 | output: 29 | - meta: 30 | type: map 31 | description: | 32 | Groovy Map containing sample information 33 | e.g. [ id:'test', single_end:false ] 34 | - bai: 35 | type: file 36 | description: BAM/CRAM/SAM index file 37 | pattern: "*.{bai,crai,sai}" 38 | - crai: 39 | type: file 40 | description: BAM/CRAM/SAM index file 41 | pattern: "*.{bai,crai,sai}" 42 | - csi: 43 | type: file 44 | description: CSI index file 45 | pattern: "*.{csi}" 46 | - versions: 47 | type: file 48 | description: File containing software versions 49 | pattern: "versions.yml" 50 | authors: 51 | - "@drpatelh" 52 | - "@ewels" 53 | - "@maxulysse" 54 | maintainers: 55 | - "@drpatelh" 56 | - "@ewels" 57 | - "@maxulysse" 58 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/index/tests/csi.nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: SAMTOOLS_INDEX { 4 | ext.args = '-c' 5 | } 6 | 7 | } 8 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/index/tests/tags.yml: -------------------------------------------------------------------------------- 1 | samtools/index: 2 | - modules/nf-core/samtools/index/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/sort/environment.yml: -------------------------------------------------------------------------------- 1 | name: samtools_sort 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::samtools=1.20 8 | - bioconda::htslib=1.20 9 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/sort/meta.yml: -------------------------------------------------------------------------------- 1 | name: samtools_sort 2 | description: Sort SAM/BAM/CRAM file 3 | keywords: 4 | - sort 5 | - bam 6 | - sam 7 | - cram 8 | tools: 9 | - samtools: 10 | description: | 11 | SAMtools is a set of utilities for interacting with and post-processing 12 | short DNA sequence read alignments in the SAM, BAM and CRAM formats, written by Heng Li. 13 | These files are generated as output by short read aligners like BWA. 14 | homepage: http://www.htslib.org/ 15 | documentation: http://www.htslib.org/doc/samtools.html 16 | doi: 10.1093/bioinformatics/btp352 17 | licence: ["MIT"] 18 | input: 19 | - meta: 20 | type: map 21 | description: | 22 | Groovy Map containing sample information 23 | e.g. [ id:'test', single_end:false ] 24 | - bam: 25 | type: file 26 | description: BAM/CRAM/SAM file(s) 27 | pattern: "*.{bam,cram,sam}" 28 | - meta2: 29 | type: map 30 | description: | 31 | Groovy Map containing reference information 32 | e.g. [ id:'genome' ] 33 | - fasta: 34 | type: file 35 | description: Reference genome FASTA file 36 | pattern: "*.{fa,fasta,fna}" 37 | optional: true 38 | output: 39 | - meta: 40 | type: map 41 | description: | 42 | Groovy Map containing sample information 43 | e.g. [ id:'test', single_end:false ] 44 | - bam: 45 | type: file 46 | description: Sorted BAM file 47 | pattern: "*.{bam}" 48 | - cram: 49 | type: file 50 | description: Sorted CRAM file 51 | pattern: "*.{cram}" 52 | - crai: 53 | type: file 54 | description: CRAM index file (optional) 55 | pattern: "*.crai" 56 | - csi: 57 | type: file 58 | description: BAM index file (optional) 59 | pattern: "*.csi" 60 | - versions: 61 | type: file 62 | description: File containing software versions 63 | pattern: "versions.yml" 64 | authors: 65 | - "@drpatelh" 66 | - "@ewels" 67 | - "@matthdsm" 68 | maintainers: 69 | - "@drpatelh" 70 | - "@ewels" 71 | - "@matthdsm" 72 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/sort/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: SAMTOOLS_SORT { 4 | ext.prefix = { "${meta.id}.sorted" } 5 | ext.args = "--write-index" 6 | } 7 | 8 | } 9 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/sort/tests/nextflow_cram.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: SAMTOOLS_SORT { 4 | ext.prefix = { "${meta.id}.sorted" } 5 | ext.args = "--write-index --output-fmt cram" 6 | } 7 | 8 | } 9 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/sort/tests/tags.yml: -------------------------------------------------------------------------------- 1 | samtools/sort: 2 | - modules/nf-core/samtools/sort/** 3 | - tests/modules/nf-core/samtools/sort/** 4 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/stats/environment.yml: -------------------------------------------------------------------------------- 1 | name: samtools_stats 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::samtools=1.20 8 | - bioconda::htslib=1.20 9 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/stats/main.nf: -------------------------------------------------------------------------------- 1 | process SAMTOOLS_STATS { 2 | tag "$meta.id" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/samtools:1.20--h50ea8bc_0' : 8 | 'biocontainers/samtools:1.20--h50ea8bc_0' }" 9 | 10 | input: 11 | tuple val(meta), path(input), path(input_index) 12 | tuple val(meta2), path(fasta) 13 | 14 | output: 15 | tuple val(meta), path("*.stats"), emit: stats 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | def prefix = task.ext.prefix ?: "${meta.id}" 24 | def reference = fasta ? "--reference ${fasta}" : "" 25 | """ 26 | samtools \\ 27 | stats \\ 28 | --threads ${task.cpus} \\ 29 | ${reference} \\ 30 | ${input} \\ 31 | > ${prefix}.stats 32 | 33 | cat <<-END_VERSIONS > versions.yml 34 | "${task.process}": 35 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 36 | END_VERSIONS 37 | """ 38 | 39 | stub: 40 | def prefix = task.ext.prefix ?: "${meta.id}" 41 | """ 42 | touch ${prefix}.stats 43 | 44 | cat <<-END_VERSIONS > versions.yml 45 | "${task.process}": 46 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 47 | END_VERSIONS 48 | """ 49 | } 50 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/stats/meta.yml: -------------------------------------------------------------------------------- 1 | name: samtools_stats 2 | description: Produces comprehensive statistics from SAM/BAM/CRAM file 3 | keywords: 4 | - statistics 5 | - counts 6 | - bam 7 | - sam 8 | - cram 9 | tools: 10 | - samtools: 11 | description: | 12 | SAMtools is a set of utilities for interacting with and post-processing 13 | short DNA sequence read alignments in the SAM, BAM and CRAM formats, written by Heng Li. 14 | These files are generated as output by short read aligners like BWA. 15 | homepage: http://www.htslib.org/ 16 | documentation: http://www.htslib.org/doc/samtools.html 17 | doi: 10.1093/bioinformatics/btp352 18 | licence: ["MIT"] 19 | input: 20 | - meta: 21 | type: map 22 | description: | 23 | Groovy Map containing sample information 24 | e.g. [ id:'test', single_end:false ] 25 | - input: 26 | type: file 27 | description: BAM/CRAM file from alignment 28 | pattern: "*.{bam,cram}" 29 | - input_index: 30 | type: file 31 | description: BAI/CRAI file from alignment 32 | pattern: "*.{bai,crai}" 33 | - meta2: 34 | type: map 35 | description: | 36 | Groovy Map containing reference information 37 | e.g. [ id:'genome' ] 38 | - fasta: 39 | type: file 40 | description: Reference file the CRAM was created with (optional) 41 | pattern: "*.{fasta,fa}" 42 | output: 43 | - meta: 44 | type: map 45 | description: | 46 | Groovy Map containing sample information 47 | e.g. [ id:'test', single_end:false ] 48 | - stats: 49 | type: file 50 | description: File containing samtools stats output 51 | pattern: "*.{stats}" 52 | - versions: 53 | type: file 54 | description: File containing software versions 55 | pattern: "versions.yml" 56 | authors: 57 | - "@drpatelh" 58 | - "@FriederikeHanssen" 59 | - "@ramprasadn" 60 | maintainers: 61 | - "@drpatelh" 62 | - "@FriederikeHanssen" 63 | - "@ramprasadn" 64 | -------------------------------------------------------------------------------- /modules/nf-core/samtools/stats/tests/tags.yml: -------------------------------------------------------------------------------- 1 | samtools/stats: 2 | - modules/nf-core/samtools/stats/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/subread/featurecounts/environment.yml: -------------------------------------------------------------------------------- 1 | name: subread_featurecounts 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::subread=2.0.1 8 | -------------------------------------------------------------------------------- /modules/nf-core/subread/featurecounts/main.nf: -------------------------------------------------------------------------------- 1 | process SUBREAD_FEATURECOUNTS { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/subread:2.0.1--hed695b0_0' : 8 | 'biocontainers/subread:2.0.1--hed695b0_0' }" 9 | 10 | input: 11 | tuple val(meta), path(bams), path(annotation) 12 | 13 | output: 14 | tuple val(meta), path("*featureCounts.txt") , emit: counts 15 | tuple val(meta), path("*featureCounts.txt.summary"), emit: summary 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | def prefix = task.ext.prefix ?: "${meta.id}" 24 | def paired_end = meta.single_end ? '' : '-p' 25 | 26 | def strandedness = 0 27 | if (meta.strandedness == 'forward') { 28 | strandedness = 1 29 | } else if (meta.strandedness == 'reverse') { 30 | strandedness = 2 31 | } 32 | """ 33 | featureCounts \\ 34 | $args \\ 35 | $paired_end \\ 36 | -T $task.cpus \\ 37 | -a $annotation \\ 38 | -s $strandedness \\ 39 | -o ${prefix}.featureCounts.txt \\ 40 | ${bams.join(' ')} 41 | 42 | cat <<-END_VERSIONS > versions.yml 43 | "${task.process}": 44 | subread: \$( echo \$(featureCounts -v 2>&1) | sed -e "s/featureCounts v//g") 45 | END_VERSIONS 46 | """ 47 | 48 | stub: 49 | def prefix = task.ext.prefix ?: "${meta.id}" 50 | """ 51 | touch ${prefix}.featureCounts.txt 52 | touch ${prefix}.featureCounts.txt.summary 53 | 54 | cat <<-END_VERSIONS > versions.yml 55 | "${task.process}": 56 | subread: \$( echo \$(featureCounts -v 2>&1) | sed -e "s/featureCounts v//g") 57 | END_VERSIONS 58 | """ 59 | } 60 | -------------------------------------------------------------------------------- /modules/nf-core/subread/featurecounts/meta.yml: -------------------------------------------------------------------------------- 1 | name: subread_featurecounts 2 | description: Count reads that map to genomic features 3 | keywords: 4 | - counts 5 | - fasta 6 | - genome 7 | - reference 8 | tools: 9 | - featurecounts: 10 | description: featureCounts is a highly efficient general-purpose read summarization program that counts mapped reads for genomic features such as genes, exons, promoter, gene bodies, genomic bins and chromosomal locations. It can be used to count both RNA-seq and genomic DNA-seq reads. 11 | homepage: http://bioinf.wehi.edu.au/featureCounts/ 12 | documentation: http://bioinf.wehi.edu.au/subread-package/SubreadUsersGuide.pdf 13 | doi: "10.1093/bioinformatics/btt656" 14 | licence: ["GPL v3"] 15 | input: 16 | - meta: 17 | type: map 18 | description: | 19 | Groovy Map containing sample information 20 | e.g. [ id:'test', single_end:false ] 21 | - bam: 22 | type: file 23 | description: BAM/SAM file containing read alignments 24 | pattern: "*.{bam}" 25 | - annotation: 26 | type: file 27 | description: Genomic features annotation in GTF or SAF 28 | pattern: "*.{gtf,saf}" 29 | output: 30 | - meta: 31 | type: map 32 | description: | 33 | Groovy Map containing sample information 34 | e.g. [ id:'test', single_end:false ] 35 | - counts: 36 | type: file 37 | description: Counts of reads mapping to features 38 | pattern: "*featureCounts.txt" 39 | - summary: 40 | type: file 41 | description: Summary log file 42 | pattern: "*.featureCounts.txt.summary" 43 | - versions: 44 | type: file 45 | description: File containing software versions 46 | pattern: "versions.yml" 47 | authors: 48 | - "@ntoda03" 49 | maintainers: 50 | - "@ntoda03" 51 | -------------------------------------------------------------------------------- /modules/nf-core/subread/featurecounts/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } 4 | 5 | withName: SUBREAD_FEATURECOUNTS { 6 | ext.args = '-t CDS' 7 | } 8 | 9 | } 10 | -------------------------------------------------------------------------------- /modules/nf-core/subread/featurecounts/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subread/featurecounts: 2 | - modules/nf-core/subread/featurecounts/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/trimgalore/environment.yml: -------------------------------------------------------------------------------- 1 | name: trimgalore 2 | 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | - defaults 7 | 8 | dependencies: 9 | - bioconda::cutadapt=3.4 10 | - bioconda::trim-galore=0.6.7 11 | -------------------------------------------------------------------------------- /modules/nf-core/trimgalore/tests/tags.yml: -------------------------------------------------------------------------------- 1 | trimgalore: 2 | - modules/nf-core/trimgalore/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/ucsc/bedgraphtobigwig/environment.yml: -------------------------------------------------------------------------------- 1 | name: ucsc_bedgraphtobigwig 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::ucsc-bedgraphtobigwig=445 8 | -------------------------------------------------------------------------------- /modules/nf-core/ucsc/bedgraphtobigwig/main.nf: -------------------------------------------------------------------------------- 1 | process UCSC_BEDGRAPHTOBIGWIG { 2 | tag "$meta.id" 3 | label 'process_single' 4 | 5 | // WARN: Version information not provided by tool on CLI. Please update version string below when bumping container versions. 6 | conda "${moduleDir}/environment.yml" 7 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 8 | 'https://depot.galaxyproject.org/singularity/ucsc-bedgraphtobigwig:445--h954228d_0' : 9 | 'biocontainers/ucsc-bedgraphtobigwig:445--h954228d_0' }" 10 | 11 | input: 12 | tuple val(meta), path(bedgraph) 13 | path sizes 14 | 15 | output: 16 | tuple val(meta), path("*.bigWig"), emit: bigwig 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | def prefix = task.ext.prefix ?: "${meta.id}" 25 | def VERSION = '445' // WARN: Version information not provided by tool on CLI. Please update this string when bumping container versions. 26 | """ 27 | bedGraphToBigWig \\ 28 | $bedgraph \\ 29 | $sizes \\ 30 | ${prefix}.bigWig 31 | 32 | cat <<-END_VERSIONS > versions.yml 33 | "${task.process}": 34 | ucsc: $VERSION 35 | END_VERSIONS 36 | """ 37 | 38 | stub: 39 | def prefix = task.ext.prefix ?: "${meta.id}" 40 | def VERSION = '445' // WARN: Version information not provided by tool on CLI. Please update this string when bumping container versions. 41 | """ 42 | touch ${prefix}.bigWig 43 | 44 | cat <<-END_VERSIONS > versions.yml 45 | "${task.process}": 46 | ucsc: $VERSION 47 | END_VERSIONS 48 | """ 49 | } 50 | -------------------------------------------------------------------------------- /modules/nf-core/ucsc/bedgraphtobigwig/meta.yml: -------------------------------------------------------------------------------- 1 | name: ucsc_bedgraphtobigwig 2 | description: Convert a bedGraph file to bigWig format. 3 | keywords: 4 | - bedgraph 5 | - bigwig 6 | - ucsc 7 | - bedgraphtobigwig 8 | - converter 9 | tools: 10 | - ucsc: 11 | description: Convert a bedGraph file to bigWig format. 12 | homepage: http://hgdownload.cse.ucsc.edu/admin/exe/ 13 | documentation: https://genome.ucsc.edu/goldenPath/help/bigWig.html 14 | licence: ["varies; see http://genome.ucsc.edu/license"] 15 | input: 16 | - meta: 17 | type: map 18 | description: | 19 | Groovy Map containing sample information 20 | e.g. [ id:'test', single_end:false ] 21 | - bedgraph: 22 | type: file 23 | description: bedGraph file 24 | pattern: "*.{bedGraph}" 25 | - sizes: 26 | type: file 27 | description: chromosome sizes file 28 | pattern: "*.{sizes}" 29 | output: 30 | - meta: 31 | type: map 32 | description: | 33 | Groovy Map containing sample information 34 | e.g. [ id:'test', single_end:false ] 35 | - versions: 36 | type: file 37 | description: File containing software versions 38 | pattern: "versions.yml" 39 | - bigwig: 40 | type: file 41 | description: bigWig file 42 | pattern: "*.{bigWig}" 43 | authors: 44 | - "@drpatelh" 45 | maintainers: 46 | - "@drpatelh" 47 | -------------------------------------------------------------------------------- /modules/nf-core/ucsc/bedgraphtobigwig/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process UCSC_BEDGRAPHTOBIGWIG" 4 | script "../main.nf" 5 | process "UCSC_BEDGRAPHTOBIGWIG" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "ucsc" 9 | tag "ucsc/bedgraphtobigwig" 10 | 11 | test("Should run without failures") { 12 | when { 13 | process { 14 | """ 15 | input[0] = Channel.of([ 16 | [ id:'test' ], // meta map 17 | file(params.modules_testdata_base_path + "genomics/sarscov2/illumina/bedgraph/test.bedgraph", checkIfExists: true) 18 | ]) 19 | input[1] = Channel.of(file(params.modules_testdata_base_path + "genomics/sarscov2/genome/genome.sizes", checkIfExists: true)) 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll ( 26 | { assert process.success }, 27 | { assert snapshot(process.out).match() } 28 | ) 29 | } 30 | } 31 | 32 | test("stub") { 33 | options "-stub" 34 | when { 35 | process { 36 | """ 37 | input[0] = Channel.of([ 38 | [ id:'test' ], // meta map 39 | file(params.modules_testdata_base_path + "genomics/sarscov2/illumina/bedgraph/test.bedgraph", checkIfExists: true) 40 | ]) 41 | input[1] = Channel.of(file(params.modules_testdata_base_path + "genomics/sarscov2/genome/genome.sizes", checkIfExists: true)) 42 | """ 43 | } 44 | } 45 | 46 | then { 47 | assertAll ( 48 | { assert process.success }, 49 | { assert snapshot(process.out).match() } 50 | ) 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /modules/nf-core/ucsc/bedgraphtobigwig/tests/tags.yml: -------------------------------------------------------------------------------- 1 | ucsc/bedgraphtobigwig: 2 | - modules/nf-core/ucsc/bedgraphtobigwig/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/umitools/extract/environment.yml: -------------------------------------------------------------------------------- 1 | name: umitools_extract 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::umi_tools=1.1.5 8 | -------------------------------------------------------------------------------- /modules/nf-core/umitools/extract/meta.yml: -------------------------------------------------------------------------------- 1 | name: umitools_extract 2 | description: Extracts UMI barcode from a read and add it to the read name, leaving any sample barcode in place 3 | keywords: 4 | - UMI 5 | - barcode 6 | - extract 7 | - umitools 8 | tools: 9 | - umi_tools: 10 | description: > 11 | UMI-tools contains tools for dealing with Unique Molecular Identifiers (UMIs)/Random Molecular Tags (RMTs) and single cell RNA-Seq cell barcodes 12 | documentation: https://umi-tools.readthedocs.io/en/latest/ 13 | license: "MIT" 14 | input: 15 | - meta: 16 | type: map 17 | description: | 18 | Groovy Map containing sample information 19 | e.g. [ id:'test', single_end:false ] 20 | - reads: 21 | type: list 22 | description: | 23 | List of input FASTQ files whose UMIs will be extracted. 24 | output: 25 | - meta: 26 | type: map 27 | description: | 28 | Groovy Map containing sample information 29 | e.g. [ id:'test', single_end:false ] 30 | - reads: 31 | type: file 32 | description: > 33 | Extracted FASTQ files. | For single-end reads, pattern is \${prefix}.umi_extract.fastq.gz. | For paired-end reads, pattern is \${prefix}.umi_extract_{1,2}.fastq.gz. 34 | pattern: "*.{fastq.gz}" 35 | - log: 36 | type: file 37 | description: Logfile for umi_tools 38 | pattern: "*.{log}" 39 | - versions: 40 | type: file 41 | description: File containing software versions 42 | pattern: "versions.yml" 43 | authors: 44 | - "@drpatelh" 45 | - "@grst" 46 | maintainers: 47 | - "@drpatelh" 48 | - "@grst" 49 | -------------------------------------------------------------------------------- /modules/nf-core/umitools/extract/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } 4 | 5 | withName: UMITOOLS_EXTRACT { 6 | ext.args = '--bc-pattern="NNNN"' 7 | } 8 | 9 | } 10 | -------------------------------------------------------------------------------- /modules/nf-core/umitools/extract/tests/tags.yml: -------------------------------------------------------------------------------- 1 | umitools/extract: 2 | - modules/nf-core/umitools/extract/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/untar/environment.yml: -------------------------------------------------------------------------------- 1 | name: untar 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - conda-forge::grep=3.11 8 | - conda-forge::sed=4.8 9 | - conda-forge::tar=1.34 10 | -------------------------------------------------------------------------------- /modules/nf-core/untar/meta.yml: -------------------------------------------------------------------------------- 1 | name: untar 2 | description: Extract files. 3 | keywords: 4 | - untar 5 | - uncompress 6 | - extract 7 | tools: 8 | - untar: 9 | description: | 10 | Extract tar.gz files. 11 | documentation: https://www.gnu.org/software/tar/manual/ 12 | licence: ["GPL-3.0-or-later"] 13 | input: 14 | - meta: 15 | type: map 16 | description: | 17 | Groovy Map containing sample information 18 | e.g. [ id:'test', single_end:false ] 19 | - archive: 20 | type: file 21 | description: File to be untar 22 | pattern: "*.{tar}.{gz}" 23 | output: 24 | - meta: 25 | type: map 26 | description: | 27 | Groovy Map containing sample information 28 | e.g. [ id:'test', single_end:false ] 29 | - untar: 30 | type: directory 31 | description: Directory containing contents of archive 32 | pattern: "*/" 33 | - versions: 34 | type: file 35 | description: File containing software versions 36 | pattern: "versions.yml" 37 | authors: 38 | - "@joseespinosa" 39 | - "@drpatelh" 40 | - "@matthdsm" 41 | - "@jfy133" 42 | maintainers: 43 | - "@joseespinosa" 44 | - "@drpatelh" 45 | - "@matthdsm" 46 | - "@jfy133" 47 | -------------------------------------------------------------------------------- /modules/nf-core/untar/tests/tags.yml: -------------------------------------------------------------------------------- 1 | untar: 2 | - modules/nf-core/untar/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/untarfiles/environment.yml: -------------------------------------------------------------------------------- 1 | name: untarfiles 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - conda-forge::sed=4.7 8 | - bioconda::grep=3.4 9 | - conda-forge::tar=1.34 10 | -------------------------------------------------------------------------------- /modules/nf-core/untarfiles/main.nf: -------------------------------------------------------------------------------- 1 | process UNTARFILES { 2 | tag "$archive" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/ubuntu:20.04' : 8 | 'nf-core/ubuntu:20.04' }" 9 | 10 | input: 11 | tuple val(meta), path(archive) 12 | 13 | output: 14 | tuple val(meta), path("${prefix}/**") , emit: files 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def args2 = task.ext.args2 ?: '' 23 | prefix = task.ext.prefix ?: ( meta.id ? "${meta.id}" : archive.baseName.toString().replaceFirst(/\.tar$/, "")) 24 | 25 | """ 26 | mkdir $prefix 27 | 28 | tar \\ 29 | -C $prefix \\ 30 | -xavf \\ 31 | $args \\ 32 | $archive \\ 33 | $args2 34 | 35 | cat <<-END_VERSIONS > versions.yml 36 | "${task.process}": 37 | untar: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') 38 | END_VERSIONS 39 | """ 40 | 41 | stub: 42 | prefix = task.ext.prefix ?: "${meta.id}" 43 | """ 44 | mkdir $prefix 45 | touch ${prefix}/file.txt 46 | 47 | cat <<-END_VERSIONS > versions.yml 48 | "${task.process}": 49 | untar: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') 50 | END_VERSIONS 51 | """ 52 | } 53 | -------------------------------------------------------------------------------- /modules/nf-core/untarfiles/meta.yml: -------------------------------------------------------------------------------- 1 | name: untarfiles 2 | description: Extract files. 3 | keywords: 4 | - untar 5 | - uncompress 6 | - files 7 | tools: 8 | - untar: 9 | description: | 10 | Extract tar.gz files. 11 | documentation: https://www.gnu.org/software/tar/manual/ 12 | licence: ["GPL-3.0-or-later"] 13 | input: 14 | - meta: 15 | type: map 16 | description: | 17 | Groovy Map containing sample information 18 | e.g. [ id:'test', single_end:false ] 19 | - archive: 20 | type: file 21 | description: File to be untar 22 | pattern: "*.{tar}.{gz}" 23 | output: 24 | - meta: 25 | type: map 26 | description: | 27 | Groovy Map containing sample information 28 | e.g. [ id:'test', single_end:false ] 29 | - files: 30 | type: string 31 | description: A list containing references to individual archive files 32 | pattern: "*/**" 33 | - versions: 34 | type: file 35 | description: File containing software versions 36 | pattern: "versions.yml" 37 | authors: 38 | - "@joseespinosa" 39 | - "@drpatelh" 40 | - "@matthdsm" 41 | - "@jfy133" 42 | - "@pinin4fjords" 43 | maintainers: 44 | - "@joseespinosa" 45 | - "@drpatelh" 46 | - "@matthdsm" 47 | - "@jfy133" 48 | - "@pinin4fjords" 49 | -------------------------------------------------------------------------------- /subworkflows/local/bam_bedgraph_bigwig_bedtools_ucsc.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Convert BAM to normalised bigWig via bedGraph using BEDTools and UCSC 3 | // 4 | 5 | include { BEDTOOLS_GENOMECOV } from '../../modules/local/bedtools_genomecov' 6 | include { UCSC_BEDGRAPHTOBIGWIG } from '../../modules/nf-core/ucsc/bedgraphtobigwig/main' 7 | 8 | workflow BAM_BEDGRAPH_BIGWIG_BEDTOOLS_UCSC { 9 | take: 10 | ch_bam_flagstat // channel: [ val(meta), [bam], [flagstat] ] 11 | ch_chrom_sizes // channel: [ bed ] 12 | 13 | main: 14 | 15 | ch_versions = Channel.empty() 16 | 17 | // 18 | // Create bedGraph coverage track 19 | // 20 | BEDTOOLS_GENOMECOV ( 21 | ch_bam_flagstat 22 | ) 23 | ch_versions = ch_versions.mix(BEDTOOLS_GENOMECOV.out.versions.first()) 24 | 25 | // 26 | // Create bigWig coverage tracks 27 | // 28 | UCSC_BEDGRAPHTOBIGWIG ( 29 | BEDTOOLS_GENOMECOV.out.bedgraph, 30 | ch_chrom_sizes 31 | ) 32 | ch_versions = ch_versions.mix(UCSC_BEDGRAPHTOBIGWIG.out.versions.first()) 33 | 34 | emit: 35 | bedgraph = BEDTOOLS_GENOMECOV.out.bedgraph // channel: [ val(meta), [ bedgraph ] ] 36 | scale_factor = BEDTOOLS_GENOMECOV.out.scale_factor // channel: [ val(meta), [ txt ] ] 37 | 38 | bigwig = UCSC_BEDGRAPHTOBIGWIG.out.bigwig // channel: [ val(meta), [ bigwig ] ] 39 | 40 | versions = ch_versions // channel: [ versions.yml ] 41 | } 42 | -------------------------------------------------------------------------------- /subworkflows/local/input_check.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Check input samplesheet and get read channels 3 | // 4 | 5 | include { SAMPLESHEET_CHECK } from '../../modules/local/samplesheet_check' 6 | 7 | workflow INPUT_CHECK { 8 | take: 9 | samplesheet // file: /path/to/samplesheet.csv 10 | seq_center // string: sequencing center for read group 11 | 12 | main: 13 | SAMPLESHEET_CHECK ( samplesheet ) 14 | .csv 15 | .splitCsv ( header:true, sep:',' ) 16 | .map { create_fastq_channel(it, seq_center) } 17 | .set { reads } 18 | 19 | emit: 20 | reads // channel: [ val(meta), [ reads ] ] 21 | versions = SAMPLESHEET_CHECK.out.versions // channel: [ versions.yml ] 22 | } 23 | 24 | // Function to get list of [ meta, [ fastq_1, fastq_2 ] ] 25 | def create_fastq_channel(LinkedHashMap row, String seq_center) { 26 | def meta = [:] 27 | meta.id = row.sample 28 | meta.single_end = row.single_end.toBoolean() 29 | meta.antibody = row.antibody 30 | meta.control = row.control 31 | 32 | def read_group = "\'@RG\\tID:${meta.id}\\tSM:${meta.id - ~/_T\d+$/}\\tPL:ILLUMINA\\tLB:${meta.id}\\tPU:1\'" 33 | if (seq_center) { 34 | read_group = "\'@RG\\tID:${meta.id}\\tSM:${meta.id - ~/_T\d+$/}\\tPL:ILLUMINA\\tLB:${meta.id}\\tPU:1\\tCN:${seq_center}\'" 35 | } 36 | meta.read_group = read_group 37 | 38 | // add path(s) of the fastq file(s) to the meta map 39 | def fastq_meta = [] 40 | if (!file(row.fastq_1).exists()) { 41 | exit 1, "ERROR: Please check input samplesheet -> Read 1 FastQ file does not exist!\n${row.fastq_1}" 42 | } 43 | if (meta.single_end) { 44 | fastq_meta = [ meta, [ file(row.fastq_1) ] ] 45 | } else { 46 | if (!file(row.fastq_2).exists()) { 47 | exit 1, "ERROR: Please check input samplesheet -> Read 2 FastQ file does not exist!\n${row.fastq_2}" 48 | } 49 | fastq_meta = [ meta, [ file(row.fastq_1), file(row.fastq_2) ] ] 50 | } 51 | return fastq_meta 52 | } 53 | -------------------------------------------------------------------------------- /subworkflows/nf-core/bam_markduplicates_picard/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/bam_markduplicates_picard: 2 | - subworkflows/nf-core/bam_markduplicates_picard/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/bam_sort_stats_samtools/main.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Sort, index BAM file and run samtools stats, flagstat and idxstats 3 | // 4 | 5 | include { SAMTOOLS_SORT } from '../../../modules/nf-core/samtools/sort/main' 6 | include { SAMTOOLS_INDEX } from '../../../modules/nf-core/samtools/index/main' 7 | include { BAM_STATS_SAMTOOLS } from '../bam_stats_samtools/main' 8 | 9 | workflow BAM_SORT_STATS_SAMTOOLS { 10 | take: 11 | ch_bam // channel: [ val(meta), [ bam ] ] 12 | ch_fasta // channel: [ val(meta), path(fasta) ] 13 | 14 | main: 15 | 16 | ch_versions = Channel.empty() 17 | 18 | SAMTOOLS_SORT ( ch_bam, ch_fasta ) 19 | ch_versions = ch_versions.mix(SAMTOOLS_SORT.out.versions.first()) 20 | 21 | SAMTOOLS_INDEX ( SAMTOOLS_SORT.out.bam ) 22 | ch_versions = ch_versions.mix(SAMTOOLS_INDEX.out.versions.first()) 23 | 24 | SAMTOOLS_SORT.out.bam 25 | .join(SAMTOOLS_INDEX.out.bai, by: [0], remainder: true) 26 | .join(SAMTOOLS_INDEX.out.csi, by: [0], remainder: true) 27 | .map { 28 | meta, bam, bai, csi -> 29 | if (bai) { 30 | [ meta, bam, bai ] 31 | } else { 32 | [ meta, bam, csi ] 33 | } 34 | } 35 | .set { ch_bam_bai } 36 | 37 | BAM_STATS_SAMTOOLS ( ch_bam_bai, ch_fasta ) 38 | ch_versions = ch_versions.mix(BAM_STATS_SAMTOOLS.out.versions) 39 | 40 | emit: 41 | bam = SAMTOOLS_SORT.out.bam // channel: [ val(meta), [ bam ] ] 42 | bai = SAMTOOLS_INDEX.out.bai // channel: [ val(meta), [ bai ] ] 43 | csi = SAMTOOLS_INDEX.out.csi // channel: [ val(meta), [ csi ] ] 44 | 45 | stats = BAM_STATS_SAMTOOLS.out.stats // channel: [ val(meta), [ stats ] ] 46 | flagstat = BAM_STATS_SAMTOOLS.out.flagstat // channel: [ val(meta), [ flagstat ] ] 47 | idxstats = BAM_STATS_SAMTOOLS.out.idxstats // channel: [ val(meta), [ idxstats ] ] 48 | 49 | versions = ch_versions // channel: [ versions.yml ] 50 | } 51 | -------------------------------------------------------------------------------- /subworkflows/nf-core/bam_sort_stats_samtools/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: bam_sort_stats_samtools 3 | description: Sort SAM/BAM/CRAM file 4 | keywords: 5 | - sort 6 | - bam 7 | - sam 8 | - cram 9 | components: 10 | - samtools/sort 11 | - samtools/index 12 | - samtools/stats 13 | - samtools/idxstats 14 | - samtools/flagstat 15 | - bam_stats_samtools 16 | input: 17 | - meta: 18 | type: map 19 | description: | 20 | Groovy Map containing sample information 21 | e.g. [ id:'test', single_end:false ] 22 | - bam: 23 | type: file 24 | description: BAM/CRAM/SAM file 25 | pattern: "*.{bam,cram,sam}" 26 | - fasta: 27 | type: file 28 | description: Reference genome fasta file 29 | pattern: "*.{fasta,fa}" 30 | # TODO Update when we decide on a standard for subworkflow docs 31 | output: 32 | - meta: 33 | type: map 34 | description: | 35 | Groovy Map containing sample information 36 | e.g. [ id:'test', single_end:false ] 37 | - bam: 38 | type: file 39 | description: Sorted BAM/CRAM/SAM file 40 | pattern: "*.{bam,cram,sam}" 41 | - bai: 42 | type: file 43 | description: BAM/CRAM/SAM index file 44 | pattern: "*.{bai,crai,sai}" 45 | - crai: 46 | type: file 47 | description: BAM/CRAM/SAM index file 48 | pattern: "*.{bai,crai,sai}" 49 | - stats: 50 | type: file 51 | description: File containing samtools stats output 52 | pattern: "*.{stats}" 53 | - flagstat: 54 | type: file 55 | description: File containing samtools flagstat output 56 | pattern: "*.{flagstat}" 57 | - idxstats: 58 | type: file 59 | description: File containing samtools idxstats output 60 | pattern: "*.{idxstats}" 61 | - versions: 62 | type: file 63 | description: File containing software versions 64 | pattern: "versions.yml" 65 | authors: 66 | - "@drpatelh" 67 | - "@ewels" 68 | maintainers: 69 | - "@drpatelh" 70 | - "@ewels" 71 | -------------------------------------------------------------------------------- /subworkflows/nf-core/bam_sort_stats_samtools/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/bam_sort_stats_samtools: 2 | - subworkflows/nf-core/bam_sort_stats_samtools/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/bam_stats_samtools/main.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Run SAMtools stats, flagstat and idxstats 3 | // 4 | 5 | include { SAMTOOLS_STATS } from '../../../modules/nf-core/samtools/stats/main' 6 | include { SAMTOOLS_IDXSTATS } from '../../../modules/nf-core/samtools/idxstats/main' 7 | include { SAMTOOLS_FLAGSTAT } from '../../../modules/nf-core/samtools/flagstat/main' 8 | 9 | workflow BAM_STATS_SAMTOOLS { 10 | take: 11 | ch_bam_bai // channel: [ val(meta), path(bam), path(bai) ] 12 | ch_fasta // channel: [ val(meta), path(fasta) ] 13 | 14 | main: 15 | ch_versions = Channel.empty() 16 | 17 | SAMTOOLS_STATS ( ch_bam_bai, ch_fasta ) 18 | ch_versions = ch_versions.mix(SAMTOOLS_STATS.out.versions) 19 | 20 | SAMTOOLS_FLAGSTAT ( ch_bam_bai ) 21 | ch_versions = ch_versions.mix(SAMTOOLS_FLAGSTAT.out.versions) 22 | 23 | SAMTOOLS_IDXSTATS ( ch_bam_bai ) 24 | ch_versions = ch_versions.mix(SAMTOOLS_IDXSTATS.out.versions) 25 | 26 | emit: 27 | stats = SAMTOOLS_STATS.out.stats // channel: [ val(meta), path(stats) ] 28 | flagstat = SAMTOOLS_FLAGSTAT.out.flagstat // channel: [ val(meta), path(flagstat) ] 29 | idxstats = SAMTOOLS_IDXSTATS.out.idxstats // channel: [ val(meta), path(idxstats) ] 30 | 31 | versions = ch_versions // channel: [ path(versions.yml) ] 32 | } 33 | -------------------------------------------------------------------------------- /subworkflows/nf-core/bam_stats_samtools/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: bam_stats_samtools 3 | description: Produces comprehensive statistics from SAM/BAM/CRAM file 4 | keywords: 5 | - statistics 6 | - counts 7 | - bam 8 | - sam 9 | - cram 10 | components: 11 | - samtools/stats 12 | - samtools/idxstats 13 | - samtools/flagstat 14 | input: 15 | - ch_bam_bai: 16 | description: | 17 | The input channel containing the BAM/CRAM and it's index 18 | Structure: [ val(meta), path(bam), path(bai) ] 19 | - ch_fasta: 20 | description: | 21 | Reference genome fasta file 22 | Structure: [ path(fasta) ] 23 | output: 24 | - stats: 25 | description: | 26 | File containing samtools stats output 27 | Structure: [ val(meta), path(stats) ] 28 | - flagstat: 29 | description: | 30 | File containing samtools flagstat output 31 | Structure: [ val(meta), path(flagstat) ] 32 | - idxstats: 33 | description: | 34 | File containing samtools idxstats output 35 | Structure: [ val(meta), path(idxstats)] 36 | - versions: 37 | description: | 38 | Files containing software versions 39 | Structure: [ path(versions.yml) ] 40 | authors: 41 | - "@drpatelh" 42 | maintainers: 43 | - "@drpatelh" 44 | -------------------------------------------------------------------------------- /subworkflows/nf-core/bam_stats_samtools/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/bam_stats_samtools: 2 | - subworkflows/nf-core/bam_stats_samtools/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_align_bowtie2/main.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Alignment with Bowtie2 3 | // 4 | 5 | include { BOWTIE2_ALIGN } from '../../../modules/nf-core/bowtie2/align/main' 6 | include { BAM_SORT_STATS_SAMTOOLS } from '../bam_sort_stats_samtools/main' 7 | 8 | workflow FASTQ_ALIGN_BOWTIE2 { 9 | take: 10 | ch_reads // channel: [ val(meta), [ reads ] ] 11 | ch_index // channel: /path/to/bowtie2/index/ 12 | save_unaligned // val 13 | sort_bam // val 14 | ch_fasta // channel: /path/to/reference.fasta 15 | 16 | main: 17 | 18 | ch_versions = Channel.empty() 19 | 20 | // 21 | // Map reads with Bowtie2 22 | // 23 | BOWTIE2_ALIGN ( ch_reads, ch_index, ch_fasta, save_unaligned, sort_bam ) 24 | ch_versions = ch_versions.mix(BOWTIE2_ALIGN.out.versions) 25 | 26 | // 27 | // Sort, index BAM file and run samtools stats, flagstat and idxstats 28 | // 29 | BAM_SORT_STATS_SAMTOOLS ( BOWTIE2_ALIGN.out.bam, ch_fasta ) 30 | ch_versions = ch_versions.mix(BAM_SORT_STATS_SAMTOOLS.out.versions) 31 | 32 | emit: 33 | bam_orig = BOWTIE2_ALIGN.out.bam // channel: [ val(meta), aligned ] 34 | log_out = BOWTIE2_ALIGN.out.log // channel: [ val(meta), log ] 35 | fastq = BOWTIE2_ALIGN.out.fastq // channel: [ val(meta), fastq ] 36 | 37 | bam = BAM_SORT_STATS_SAMTOOLS.out.bam // channel: [ val(meta), [ bam ] ] 38 | bai = BAM_SORT_STATS_SAMTOOLS.out.bai // channel: [ val(meta), [ bai ] ] 39 | csi = BAM_SORT_STATS_SAMTOOLS.out.csi // channel: [ val(meta), [ csi ] ] 40 | stats = BAM_SORT_STATS_SAMTOOLS.out.stats // channel: [ val(meta), [ stats ] ] 41 | flagstat = BAM_SORT_STATS_SAMTOOLS.out.flagstat // channel: [ val(meta), [ flagstat ] ] 42 | idxstats = BAM_SORT_STATS_SAMTOOLS.out.idxstats // channel: [ val(meta), [ idxstats ] ] 43 | 44 | versions = ch_versions // channel: [ versions.yml ] 45 | } 46 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_align_bowtie2/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: fastq_align_bowtie2 3 | description: Align reads to a reference genome using bowtie2 then sort with samtools 4 | keywords: 5 | - align 6 | - fasta 7 | - genome 8 | - reference 9 | components: 10 | - bowtie2/align 11 | - samtools/sort 12 | - samtools/index 13 | - samtools/stats 14 | - samtools/idxstats 15 | - samtools/flagstat 16 | - bam_sort_stats_samtools 17 | input: 18 | - meta: 19 | type: map 20 | description: | 21 | Groovy Map containing sample information 22 | e.g. [ id:'test', single_end:false ] 23 | - ch_reads: 24 | type: file 25 | description: | 26 | List of input FastQ files of size 1 and 2 for single-end and paired-end data, 27 | respectively. 28 | - ch_index: 29 | type: file 30 | description: Bowtie2 genome index files 31 | pattern: "*.ebwt" 32 | - save_unaligned: 33 | type: boolean 34 | description: | 35 | Save reads that do not map to the reference (true) or discard them (false) 36 | (default: false) 37 | - sort_bam: 38 | type: boolean 39 | description: | 40 | Use samtools sort (true) or samtools view (false) 41 | default: false 42 | - ch_fasta: 43 | type: file 44 | description: Reference fasta file 45 | pattern: "*.{fasta,fa}" 46 | # TODO Update when we decide on a standard for subworkflow docs 47 | output: 48 | - bam: 49 | type: file 50 | description: Output BAM file containing read alignments 51 | pattern: "*.{bam}" 52 | - versions: 53 | type: file 54 | description: File containing software versions 55 | pattern: "versions.yml" 56 | - fastq: 57 | type: file 58 | description: Unaligned FastQ files 59 | pattern: "*.fastq.gz" 60 | - log: 61 | type: file 62 | description: Alignment log 63 | pattern: "*.log" 64 | authors: 65 | - "@drpatelh" 66 | maintainers: 67 | - "@drpatelh" 68 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_align_bowtie2/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: '.*:BAM_SORT_STATS_SAMTOOLS:SAMTOOLS_.*' { 3 | ext.prefix = { "${meta.id}.sorted" } 4 | } 5 | withName: '.*:BAM_SORT_STATS_SAMTOOLS:BAM_STATS_SAMTOOLS:.*' { 6 | ext.prefix = { "${meta.id}.sorted.bam" } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_align_bowtie2/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/fastq_align_bowtie2: 2 | - subworkflows/nf-core/fastq_align_bowtie2/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_align_bwa/main.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Alignment with BWA 3 | // 4 | 5 | include { BWA_MEM } from '../../../modules/nf-core/bwa/mem/main' 6 | include { BAM_SORT_STATS_SAMTOOLS } from '../bam_sort_stats_samtools/main' 7 | 8 | workflow FASTQ_ALIGN_BWA { 9 | take: 10 | ch_reads // channel (mandatory): [ val(meta), [ path(reads) ] ] 11 | ch_index // channel (mandatory): [ val(meta2), path(index) ] 12 | val_sort_bam // boolean (mandatory): true or false 13 | ch_fasta // channel (optional) : [ val(meta3), path(fasta) ] 14 | 15 | main: 16 | ch_versions = Channel.empty() 17 | 18 | // 19 | // Map reads with BWA 20 | // 21 | 22 | BWA_MEM ( ch_reads, ch_index, ch_fasta, val_sort_bam ) 23 | ch_versions = ch_versions.mix(BWA_MEM.out.versions.first()) 24 | 25 | // 26 | // Sort, index BAM file and run samtools stats, flagstat and idxstats 27 | // 28 | 29 | BAM_SORT_STATS_SAMTOOLS ( BWA_MEM.out.bam, ch_fasta ) 30 | ch_versions = ch_versions.mix(BAM_SORT_STATS_SAMTOOLS.out.versions) 31 | 32 | emit: 33 | bam_orig = BWA_MEM.out.bam // channel: [ val(meta), path(bam) ] 34 | 35 | bam = BAM_SORT_STATS_SAMTOOLS.out.bam // channel: [ val(meta), path(bam) ] 36 | bai = BAM_SORT_STATS_SAMTOOLS.out.bai // channel: [ val(meta), path(bai) ] 37 | csi = BAM_SORT_STATS_SAMTOOLS.out.csi // channel: [ val(meta), path(csi) ] 38 | stats = BAM_SORT_STATS_SAMTOOLS.out.stats // channel: [ val(meta), path(stats) ] 39 | flagstat = BAM_SORT_STATS_SAMTOOLS.out.flagstat // channel: [ val(meta), path(flagstat) ] 40 | idxstats = BAM_SORT_STATS_SAMTOOLS.out.idxstats // channel: [ val(meta), path(idxstats) ] 41 | 42 | versions = ch_versions // channel: [ path(versions.yml) ] 43 | } 44 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_align_bwa/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: '.*:BAM_SORT_STATS_SAMTOOLS:SAMTOOLS_.*' { 3 | ext.prefix = { "${meta.id}.sorted" } 4 | } 5 | withName: '.*:BAM_SORT_STATS_SAMTOOLS:BAM_STATS_SAMTOOLS:.*' { 6 | ext.prefix = { "${meta.id}.sorted.bam" } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_align_bwa/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/fastq_align_bwa: 2 | - subworkflows/nf-core/fastq_align_bwa/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_align_chromap/main.nf: -------------------------------------------------------------------------------- 1 | /* 2 | * Map reads, sort, index BAM file and run samtools stats, flagstat and idxstats 3 | */ 4 | 5 | include { CHROMAP_CHROMAP } from '../../../modules/nf-core/chromap/chromap/main' 6 | include { BAM_SORT_STATS_SAMTOOLS } from '../bam_sort_stats_samtools/main' 7 | 8 | workflow FASTQ_ALIGN_CHROMAP { 9 | take: 10 | ch_reads // channel (mandatory): [ val(meta), [ reads ] ] 11 | ch_index // channel (mandatory): [ val(meta2, [ index ] ] 12 | ch_fasta // channel (mandatory): [ val(meta2, [ fasta ] ] 13 | ch_barcodes // channel (optional): [ barcodes ] 14 | ch_whitelist // channel (optional): [ whitelist ] 15 | ch_chr_order // channel (optional): [ chr_order ] 16 | ch_pairs_chr_order // channel (optional): [ pairs_chr_order ] 17 | 18 | main: 19 | ch_versions = Channel.empty() 20 | 21 | // 22 | // Map reads with CHROMAP 23 | // 24 | CHROMAP_CHROMAP(ch_reads, ch_fasta, ch_index, ch_barcodes, ch_whitelist, ch_chr_order, ch_pairs_chr_order) 25 | ch_versions = ch_versions.mix(CHROMAP_CHROMAP.out.versions) 26 | 27 | // 28 | // Sort, index BAM file and run samtools stats, flagstat and idxstats 29 | // 30 | BAM_SORT_STATS_SAMTOOLS(CHROMAP_CHROMAP.out.bam, ch_fasta) 31 | ch_versions = ch_versions.mix(BAM_SORT_STATS_SAMTOOLS.out.versions) 32 | 33 | emit: 34 | bam = BAM_SORT_STATS_SAMTOOLS.out.bam // channel: [ val(meta), [ bam ] ] 35 | bai = BAM_SORT_STATS_SAMTOOLS.out.bai // channel: [ val(meta), [ bai ] ] 36 | stats = BAM_SORT_STATS_SAMTOOLS.out.stats // channel: [ val(meta), [ stats ] ] 37 | flagstat = BAM_SORT_STATS_SAMTOOLS.out.flagstat // channel: [ val(meta), [ flagstat ] ] 38 | idxstats = BAM_SORT_STATS_SAMTOOLS.out.idxstats // channel: [ val(meta), [ idxstats ] ] 39 | 40 | versions = ch_versions // path: versions.yml 41 | } 42 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_fastqc_umitools_trimgalore/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: UMITOOLS_EXTRACT { 3 | ext.args = '--bc-pattern="NNNN"' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_fastqc_umitools_trimgalore/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/fastq_fastqc_umitools_trimgalore: 2 | - subworkflows/nf-core/fastq_fastqc_umitools_trimgalore/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NEXTFLOW_PIPELINE" 3 | description: Subworkflow with functionality that may be useful for any Nextflow pipeline 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - version 9 | components: [] 10 | input: 11 | - print_version: 12 | type: boolean 13 | description: | 14 | Print the version of the pipeline and exit 15 | - dump_parameters: 16 | type: boolean 17 | description: | 18 | Dump the parameters of the pipeline to a JSON file 19 | - output_directory: 20 | type: directory 21 | description: Path to output dir to write JSON file to. 22 | pattern: "results/" 23 | - check_conda_channel: 24 | type: boolean 25 | description: | 26 | Check if the conda channel priority is correct. 27 | output: 28 | - dummy_emit: 29 | type: boolean 30 | description: | 31 | Dummy emit to make nf-core subworkflows lint happy 32 | authors: 33 | - "@adamrtalbot" 34 | - "@drpatelh" 35 | maintainers: 36 | - "@adamrtalbot" 37 | - "@drpatelh" 38 | - "@maxulysse" 39 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test: -------------------------------------------------------------------------------- 1 | 2 | nextflow_function { 3 | 4 | name "Test Functions" 5 | script "subworkflows/nf-core/utils_nextflow_pipeline/main.nf" 6 | config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" 7 | tag 'subworkflows' 8 | tag 'utils_nextflow_pipeline' 9 | tag 'subworkflows/utils_nextflow_pipeline' 10 | 11 | test("Test Function getWorkflowVersion") { 12 | 13 | function "getWorkflowVersion" 14 | 15 | then { 16 | assertAll( 17 | { assert function.success }, 18 | { assert snapshot(function.result).match() } 19 | ) 20 | } 21 | } 22 | 23 | test("Test Function dumpParametersToJSON") { 24 | 25 | function "dumpParametersToJSON" 26 | 27 | when { 28 | function { 29 | """ 30 | // define inputs of the function here. Example: 31 | input[0] = "$outputDir" 32 | """.stripIndent() 33 | } 34 | } 35 | 36 | then { 37 | assertAll( 38 | { assert function.success } 39 | ) 40 | } 41 | } 42 | 43 | test("Test Function checkCondaChannels") { 44 | 45 | function "checkCondaChannels" 46 | 47 | then { 48 | assertAll( 49 | { assert function.success }, 50 | { assert snapshot(function.result).match() } 51 | ) 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Test Function getWorkflowVersion": { 3 | "content": [ 4 | "v9.9.9" 5 | ], 6 | "meta": { 7 | "nf-test": "0.8.4", 8 | "nextflow": "23.10.1" 9 | }, 10 | "timestamp": "2024-02-28T12:02:05.308243" 11 | }, 12 | "Test Function checkCondaChannels": { 13 | "content": null, 14 | "meta": { 15 | "nf-test": "0.8.4", 16 | "nextflow": "23.10.1" 17 | }, 18 | "timestamp": "2024-02-28T12:02:12.425833" 19 | } 20 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | manifest { 2 | name = 'nextflow_workflow' 3 | author = """nf-core""" 4 | homePage = 'https://127.0.0.1' 5 | description = """Dummy pipeline""" 6 | nextflowVersion = '!>=23.04.0' 7 | version = '9.9.9' 8 | doi = 'https://doi.org/10.5281/zenodo.5070524' 9 | } 10 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/utils_nextflow_pipeline: 2 | - subworkflows/nf-core/utils_nextflow_pipeline/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NFCORE_PIPELINE" 3 | description: Subworkflow with utility functions specific to the nf-core pipeline template 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - version 9 | components: [] 10 | input: 11 | - nextflow_cli_args: 12 | type: list 13 | description: | 14 | Nextflow CLI positional arguments 15 | output: 16 | - success: 17 | type: boolean 18 | description: | 19 | Dummy output to indicate success 20 | authors: 21 | - "@adamrtalbot" 22 | maintainers: 23 | - "@adamrtalbot" 24 | - "@maxulysse" 25 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow UTILS_NFCORE_PIPELINE" 4 | script "../main.nf" 5 | config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" 6 | workflow "UTILS_NFCORE_PIPELINE" 7 | tag "subworkflows" 8 | tag "subworkflows_nfcore" 9 | tag "utils_nfcore_pipeline" 10 | tag "subworkflows/utils_nfcore_pipeline" 11 | 12 | test("Should run without failures") { 13 | 14 | when { 15 | workflow { 16 | """ 17 | input[0] = [] 18 | """ 19 | } 20 | } 21 | 22 | then { 23 | assertAll( 24 | { assert workflow.success }, 25 | { assert snapshot(workflow.out).match() } 26 | ) 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | true 7 | ], 8 | "valid_config": [ 9 | true 10 | ] 11 | } 12 | ], 13 | "meta": { 14 | "nf-test": "0.8.4", 15 | "nextflow": "23.10.1" 16 | }, 17 | "timestamp": "2024-02-28T12:03:25.726491" 18 | } 19 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | manifest { 2 | name = 'nextflow_workflow' 3 | author = """nf-core""" 4 | homePage = 'https://127.0.0.1' 5 | description = """Dummy pipeline""" 6 | nextflowVersion = '!>=23.04.0' 7 | version = '9.9.9' 8 | doi = 'https://doi.org/10.5281/zenodo.5070524' 9 | } 10 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/utils_nfcore_pipeline: 2 | - subworkflows/nf-core/utils_nfcore_pipeline/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NFVALIDATION_PLUGIN" 3 | description: Use nf-validation to initiate and validate a pipeline 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - validation 9 | components: [] 10 | input: 11 | - print_help: 12 | type: boolean 13 | description: | 14 | Print help message and exit 15 | - workflow_command: 16 | type: string 17 | description: | 18 | The command to run the workflow e.g. "nextflow run main.nf" 19 | - pre_help_text: 20 | type: string 21 | description: | 22 | Text to print before the help message 23 | - post_help_text: 24 | type: string 25 | description: | 26 | Text to print after the help message 27 | - validate_params: 28 | type: boolean 29 | description: | 30 | Validate the parameters and error if invalid. 31 | - schema_filename: 32 | type: string 33 | description: | 34 | The filename of the schema to validate against. 35 | output: 36 | - dummy_emit: 37 | type: boolean 38 | description: | 39 | Dummy emit to make nf-core subworkflows lint happy 40 | authors: 41 | - "@adamrtalbot" 42 | maintainers: 43 | - "@adamrtalbot" 44 | - "@maxulysse" 45 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/utils_nfvalidation_plugin: 2 | - subworkflows/nf-core/utils_nfvalidation_plugin/** 3 | -------------------------------------------------------------------------------- /tower.yml: -------------------------------------------------------------------------------- 1 | reports: 2 | multiqc_report.html: 3 | display: "MultiQC HTML report" 4 | macs3_peak.plots.pdf: 5 | display: "All samples MACS3 peak QC PDF plots" 6 | macs3_annotatePeaks.plots.pdf: 7 | display: "All samples HOMER annotatePeaks.pl QC PDF plots" 8 | "*.consensus_peaks.plots.pdf": 9 | display: "Consensus peaks DESeq2 QC PDF plots" 10 | "*.consensus_peaks.boolean.intersect.plot.pdf": 11 | display: "Consensus peaks UpSetR intersection PDF plots" 12 | "*.consensus_peaks.boolean.annotatePeaks.txt": 13 | display: "Consensus peaks annotated by HOMER" 14 | "*.plotHeatmap.pdf": 15 | display: "Per-sample deepTools plotHeatmap PDF plots" 16 | "*_peaks.broadPeak": 17 | display: "Per-sample MACS3 broadPeak file" 18 | "*_peaks.narrowPeak": 19 | display: "Per-sample MACS3 narrowPeak file" 20 | --------------------------------------------------------------------------------