├── .devcontainer └── devcontainer.json ├── .editorconfig ├── .gitattributes ├── .github ├── .dockstore.yml ├── CODEOWNERS ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ └── feature_request.yml ├── PULL_REQUEST_TEMPLATE.md ├── actions │ ├── get-shards │ │ └── action.yml │ └── nf-test │ │ └── action.yml └── workflows │ ├── awsfulltest.yml │ ├── awstest.yml │ ├── branch.yml │ ├── build_reference.yml │ ├── clean-up.yml │ ├── download_pipeline.yml │ ├── fix-linting.yml │ ├── linting.yml │ ├── linting_comment.yml │ ├── nf-test.yml │ ├── release-announcements.yml │ └── template_version_comment.yml ├── .gitignore ├── .gitpod.yml ├── .nf-core.yml ├── .pre-commit-config.yaml ├── .prettierignore ├── .prettierrc.yml ├── .vscode └── settings.json ├── CHANGELOG.md ├── CITATIONS.md ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── assets ├── adaptivecard.json ├── email_template.html ├── email_template.txt ├── methods_description_template.yml ├── multiqc_config.yml ├── nf-core-references_logo_light.png ├── sendmail_template.txt └── slackreport.json ├── conf ├── base.config ├── modules.config ├── prepare_genome.config ├── test.config └── test_full.config ├── docs ├── README.md ├── images │ ├── nf-core-references_logo_dark.png │ ├── nf-core-references_logo_dark.svg │ ├── nf-core-references_logo_light.png │ ├── nf-core-references_logo_light.svg │ ├── nf-core-references_metro_map_color.png │ └── nf-core-references_metro_map_color.svg ├── output.md ├── retreat-brainstrorming.md └── usage.md ├── main.nf ├── modules.json ├── modules ├── nf-core │ ├── bbmap │ │ └── bbsplit │ │ │ ├── environment.yml │ │ │ ├── main.nf │ │ │ ├── meta.yml │ │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── custom │ │ └── catadditionalfasta │ │ │ ├── environment.yml │ │ │ ├── main.nf │ │ │ ├── meta.yml │ │ │ ├── templates │ │ │ └── fasta2gtf.py │ │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── gunzip │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── nextflow.config │ ├── multiqc │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── nextflow.config │ ├── sortmerna │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── indexing_only.config │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── premade_index.config │ ├── untar │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ └── unzip │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap └── nf-side │ ├── bowtie │ └── build │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── bowtie2 │ └── build │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── bwa │ └── index │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── bwamem2 │ └── index │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── dragmap │ └── hashtable │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── gatk4 │ └── createsequencedictionary │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── gawk │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── nextflow.config │ ├── gffread │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow-fasta.config │ │ ├── nextflow-gff3.config │ │ └── nextflow.config │ ├── hisat2 │ ├── build │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ └── extractsplicesites │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── kallisto │ └── index │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── msisensorpro │ └── scan │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── rsem │ └── preparereference │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── salmon │ └── index │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── samtools │ └── faidx │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── nextflow2.config │ ├── star │ └── genomegenerate │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ └── tabix │ ├── bgziptabix │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── tabix_csi.config │ │ └── tabix_tbi.config │ └── tabix │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ ├── main.nf.test │ ├── main.nf.test.snap │ ├── tabix_bed.config │ ├── tabix_gff.config │ ├── tabix_vcf_csi.config │ └── tabix_vcf_tbi.config ├── nextflow.config ├── nextflow_schema.json ├── nf-test.config ├── ro-crate-metadata.json ├── subworkflows ├── local │ ├── datasheet_to_channel │ │ └── main.nf │ └── utils_nfcore_references_pipeline │ │ └── main.nf ├── nf-core │ ├── archive_extract │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── utils_nextflow_pipeline │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.function.nf.test │ │ │ ├── main.function.nf.test.snap │ │ │ ├── main.workflow.nf.test │ │ │ └── nextflow.config │ ├── utils_nfcore_pipeline │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.function.nf.test │ │ │ ├── main.function.nf.test.snap │ │ │ ├── main.workflow.nf.test │ │ │ ├── main.workflow.nf.test.snap │ │ │ └── nextflow.config │ └── utils_nfschema_plugin │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── nextflow.config │ │ └── nextflow_schema.json └── nf-side │ ├── prepare_genome_dnaseq │ ├── main.nf │ ├── nextflow.config │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── nextflow.config │ ├── prepare_genome_rnaseq │ ├── main.nf │ ├── nextflow.config │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── nextflow.config │ └── utils_references │ ├── README.md │ ├── main.nf │ ├── meta.yml │ ├── schema_references.json │ └── tests │ ├── main.nf.test │ ├── main.nf.test.snap │ └── nextflow.config ├── tests ├── .nftignore ├── createsequencedictionary.nf.test ├── createsequencedictionary.nf.test.snap ├── default.nf.test ├── default.nf.test.snap ├── hisat2.nf.test ├── hisat2.nf.test.snap ├── kallisto.nf.test ├── kallisto.nf.test.snap ├── multiple.nf.test ├── multiple.nf.test.snap ├── nextflow.config ├── rsem.nf.test ├── rsem.nf.test.snap ├── salmon.nf.test ├── salmon.nf.test.snap ├── samtools.nf.test ├── samtools.nf.test.snap ├── sarek.nf.test ├── sarek.nf.test.snap ├── tabix.nf.test ├── tabix.nf.test.snap ├── wbcel235.nf.test └── wbcel235.nf.test.snap ├── tower.yml └── workflows └── references └── main.nf /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nfcore", 3 | "image": "nfcore/gitpod:latest", 4 | "remoteUser": "gitpod", 5 | "runArgs": ["--privileged"], 6 | 7 | // Configure tool-specific properties. 8 | "customizations": { 9 | // Configure properties specific to VS Code. 10 | "vscode": { 11 | // Set *default* container specific settings.json values on container create. 12 | "settings": { 13 | "python.defaultInterpreterPath": "/opt/conda/bin/python" 14 | }, 15 | 16 | // Add the IDs of extensions you want installed when the container is created. 17 | "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"] 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | end_of_line = lf 6 | insert_final_newline = true 7 | trim_trailing_whitespace = true 8 | indent_size = 4 9 | indent_style = space 10 | 11 | [*.{md,yml,yaml,html,css,scss,js}] 12 | indent_size = 2 13 | 14 | # These files are edited and tested upstream in nf-core/modules 15 | [/modules/nf-core/**] 16 | charset = unset 17 | end_of_line = unset 18 | insert_final_newline = unset 19 | trim_trailing_whitespace = unset 20 | indent_style = unset 21 | [/subworkflows/nf-core/**] 22 | charset = unset 23 | end_of_line = unset 24 | insert_final_newline = unset 25 | trim_trailing_whitespace = unset 26 | indent_style = unset 27 | 28 | # These files are edited and tested upstream in nf-side/modules 29 | [/modules/nf-side/**] 30 | charset = unset 31 | end_of_line = unset 32 | insert_final_newline = unset 33 | trim_trailing_whitespace = unset 34 | indent_style = unset 35 | [/subworkflows/nf-side/**] 36 | charset = unset 37 | end_of_line = unset 38 | insert_final_newline = unset 39 | trim_trailing_whitespace = unset 40 | indent_style = unset 41 | 42 | [/assets/email*] 43 | indent_size = unset 44 | 45 | # ignore python and markdown 46 | [*.{py,md}] 47 | indent_style = unset 48 | 49 | # ignore ro-crate metadata files 50 | [**/ro-crate-metadata.json] 51 | insert_final_newline = unset 52 | 53 | # ignore nf-test snapshots 54 | [*.nf.test.snap] 55 | charset = unset 56 | end_of_line = unset 57 | insert_final_newline = unset 58 | trim_trailing_whitespace = unset 59 | indent_style = unset 60 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.config linguist-language=nextflow 2 | *.nf.test linguist-language=nextflow 3 | modules/nf-core/** linguist-generated 4 | subworkflows/nf-core/** linguist-generated 5 | -------------------------------------------------------------------------------- /.github/.dockstore.yml: -------------------------------------------------------------------------------- 1 | # Dockstore config version, not pipeline version 2 | version: 1.2 3 | workflows: 4 | - subclass: nfl 5 | primaryDescriptorPath: /nextflow.config 6 | publish: True 7 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @maxulysse 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug report 2 | description: Report something that is broken or incorrect 3 | labels: bug 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Before you post this issue, please check the documentation: 9 | 10 | - [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) 11 | - [nf-core/references pipeline documentation](https://nf-co.re/references/usage) 12 | - type: textarea 13 | id: description 14 | attributes: 15 | label: Description of the bug 16 | description: A clear and concise description of what the bug is. 17 | validations: 18 | required: true 19 | 20 | - type: textarea 21 | id: command_used 22 | attributes: 23 | label: Command used and terminal output 24 | description: Steps to reproduce the behaviour. Please paste the command you used to launch the pipeline and the output from your terminal. 25 | render: console 26 | placeholder: | 27 | $ nextflow run ... 28 | 29 | Some output where something broke 30 | 31 | - type: textarea 32 | id: files 33 | attributes: 34 | label: Relevant files 35 | description: | 36 | Please drag and drop the relevant files here. Create a `.zip` archive if the extension is not allowed. 37 | Your verbose log file `.nextflow.log` is often useful _(this is a hidden file in the directory where you launched the pipeline)_ as well as custom Nextflow configuration files. 38 | 39 | - type: textarea 40 | id: system 41 | attributes: 42 | label: System information 43 | description: | 44 | * Nextflow version _(eg. 23.04.0)_ 45 | * Hardware _(eg. HPC, Desktop, Cloud)_ 46 | * Executor _(eg. slurm, local, awsbatch)_ 47 | * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter, Charliecloud, or Apptainer)_ 48 | * OS _(eg. CentOS Linux, macOS, Linux Mint)_ 49 | * Version of nf-core/references _(eg. 1.1, 1.5, 1.8.2)_ 50 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | contact_links: 2 | - name: Join nf-core 3 | url: https://nf-co.re/join 4 | about: Please join the nf-core community here 5 | - name: "Slack #references channel" 6 | url: https://nfcore.slack.com/channels/references 7 | about: Discussion about the nf-core/references pipeline 8 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature request 2 | description: Suggest an idea for the nf-core/references pipeline 3 | labels: enhancement 4 | body: 5 | - type: textarea 6 | id: description 7 | attributes: 8 | label: Description of feature 9 | description: Please describe your suggestion for a new feature. It might help to describe a problem or use case, plus any alternatives that you have considered. 10 | validations: 11 | required: true 12 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 13 | 14 | ## PR checklist 15 | 16 | - [ ] This comment contains a description of changes (with reason). 17 | - [ ] If you've fixed a bug or added code that should be tested, add tests! 18 | - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/references/tree/main/.github/CONTRIBUTING.md) 19 | - [ ] If necessary, also make a PR on the nf-core/references _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. 20 | - [ ] Make sure your code lints (`nf-core pipelines lint`). 21 | - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). 22 | - [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir `). 23 | - [ ] Usage Documentation in `docs/usage.md` is updated. 24 | - [ ] Output Documentation in `docs/output.md` is updated. 25 | - [ ] `CHANGELOG.md` is updated. 26 | - [ ] `README.md` is updated (including new tool citations and authors/contributors). 27 | -------------------------------------------------------------------------------- /.github/actions/get-shards/action.yml: -------------------------------------------------------------------------------- 1 | name: "Get number of shards" 2 | description: "Get the number of nf-test shards for the current CI job" 3 | inputs: 4 | max_shards: 5 | description: "Maximum number of shards allowed" 6 | required: true 7 | paths: 8 | description: "Component paths to test" 9 | required: false 10 | tags: 11 | description: "Tags to pass as argument for nf-test --tag parameter" 12 | required: false 13 | outputs: 14 | shard: 15 | description: "Array of shard numbers" 16 | value: ${{ steps.shards.outputs.shard }} 17 | total_shards: 18 | description: "Total number of shards" 19 | value: ${{ steps.shards.outputs.total_shards }} 20 | runs: 21 | using: "composite" 22 | steps: 23 | - name: Install nf-test 24 | uses: nf-core/setup-nf-test@v1 25 | with: 26 | version: ${{ env.NFT_VER }} 27 | - name: Get number of shards 28 | id: shards 29 | shell: bash 30 | run: | 31 | # Run nf-test with dynamic parameter 32 | nftest_output=$(nf-test test \ 33 | --profile +docker \ 34 | $(if [ -n "${{ inputs.tags }}" ]; then echo "--tag ${{ inputs.tags }}"; fi) \ 35 | --dry-run \ 36 | --ci \ 37 | --changed-since HEAD^) || { 38 | echo "nf-test command failed with exit code $?" 39 | echo "Full output: $nftest_output" 40 | exit 1 41 | } 42 | echo "nf-test dry-run output: $nftest_output" 43 | 44 | # Default values for shard and total_shards 45 | shard="[]" 46 | total_shards=0 47 | 48 | # Check if there are related tests 49 | if echo "$nftest_output" | grep -q 'No tests to execute'; then 50 | echo "No related tests found." 51 | else 52 | # Extract the number of related tests 53 | number_of_shards=$(echo "$nftest_output" | sed -n 's|.*Executed \([0-9]*\) tests.*|\1|p') 54 | if [[ -n "$number_of_shards" && "$number_of_shards" -gt 0 ]]; then 55 | shards_to_run=$(( $number_of_shards < ${{ inputs.max_shards }} ? $number_of_shards : ${{ inputs.max_shards }} )) 56 | shard=$(seq 1 "$shards_to_run" | jq -R . | jq -c -s .) 57 | total_shards="$shards_to_run" 58 | else 59 | echo "Unexpected output format. Falling back to default values." 60 | fi 61 | fi 62 | 63 | # Write to GitHub Actions outputs 64 | echo "shard=$shard" >> $GITHUB_OUTPUT 65 | echo "total_shards=$total_shards" >> $GITHUB_OUTPUT 66 | 67 | # Debugging output 68 | echo "Final shard array: $shard" 69 | echo "Total number of shards: $total_shards" 70 | -------------------------------------------------------------------------------- /.github/workflows/awsfulltest.yml: -------------------------------------------------------------------------------- 1 | name: nf-core AWS full size tests 2 | # This workflow is triggered on PRs opened against the main/master branch. 3 | # It can be additionally triggered manually with GitHub actions workflow dispatch button. 4 | # It runs the -profile 'test_full' on AWS batch 5 | 6 | on: 7 | workflow_dispatch: 8 | pull_request_review: 9 | types: [submitted] 10 | release: 11 | types: [published] 12 | 13 | jobs: 14 | run-platform: 15 | name: Run AWS full tests 16 | # run only if the PR is approved by at least 2 reviewers and against the master/main branch or manually triggered 17 | if: github.repository == 'nf-core/references' && github.event.review.state == 'approved' && (github.event.pull_request.base.ref == 'master' || github.event.pull_request.base.ref == 'main') || github.event_name == 'workflow_dispatch' 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Set revision variable 21 | id: revision 22 | run: | 23 | echo "revision=${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'release') && github.sha || 'dev' }}" >> "$GITHUB_OUTPUT" 24 | 25 | - name: Launch workflow via Seqera Platform 26 | uses: seqeralabs/action-tower-launch@v2 27 | # TODO nf-core: You can customise AWS full pipeline tests as required 28 | # Add full size test data (but still relatively small datasets for few samples) 29 | # on the `test_full.config` test runs with only one set of parameters 30 | with: 31 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 32 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 33 | compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} 34 | revision: ${{ steps.revision.outputs.revision }} 35 | workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/references/work-${{ steps.revision.outputs.revision }} 36 | parameters: | 37 | { 38 | "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", 39 | "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/references/results-${{ steps.revision.outputs.revision }}" 40 | } 41 | profiles: test_full 42 | 43 | - uses: actions/upload-artifact@v4 44 | with: 45 | name: Seqera Platform debug log file 46 | path: | 47 | seqera_platform_action_*.log 48 | seqera_platform_action_*.json 49 | -------------------------------------------------------------------------------- /.github/workflows/awstest.yml: -------------------------------------------------------------------------------- 1 | name: nf-core AWS test 2 | # This workflow can be triggered manually with the GitHub actions workflow dispatch button. 3 | # It runs the -profile 'test' on AWS batch 4 | 5 | on: 6 | workflow_dispatch: 7 | jobs: 8 | run-platform: 9 | name: Run AWS tests 10 | if: github.repository == 'nf-core/references' 11 | runs-on: ubuntu-latest 12 | steps: 13 | # Launch workflow using Seqera Platform CLI tool action 14 | - name: Launch workflow via Seqera Platform 15 | uses: seqeralabs/action-tower-launch@v2 16 | with: 17 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 18 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 19 | compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} 20 | revision: ${{ github.sha }} 21 | workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/references/work-${{ github.sha }} 22 | parameters: | 23 | { 24 | "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/references/results-test-${{ github.sha }}" 25 | } 26 | profiles: test 27 | 28 | - uses: actions/upload-artifact@v4 29 | with: 30 | name: Seqera Platform debug log file 31 | path: | 32 | seqera_platform_action_*.log 33 | seqera_platform_action_*.json 34 | -------------------------------------------------------------------------------- /.github/workflows/branch.yml: -------------------------------------------------------------------------------- 1 | name: nf-core branch protection 2 | # This workflow is triggered on PRs to `main`/`master` branch on the repository 3 | # It fails when someone tries to make a PR against the nf-core `main`/`master` branch instead of `dev` 4 | on: 5 | pull_request_target: 6 | branches: 7 | - main 8 | - master 9 | 10 | jobs: 11 | test: 12 | runs-on: ubuntu-latest 13 | steps: 14 | # PRs to the nf-core repo main/master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches 15 | - name: Check PRs 16 | if: github.repository == 'nf-core/references' 17 | run: | 18 | { [[ ${{github.event.pull_request.head.repo.full_name }} == nf-core/references ]] && [[ $GITHUB_HEAD_REF == "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] 19 | 20 | # If the above check failed, post a comment on the PR explaining the failure 21 | # NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets 22 | - name: Post PR comment 23 | if: failure() 24 | uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 25 | with: 26 | message: | 27 | ## This PR is against the `${{github.event.pull_request.base.ref}}` branch :x: 28 | 29 | * Do not close this PR 30 | * Click _Edit_ and change the `base` to `dev` 31 | * This CI test will remain failed until you push a new commit 32 | 33 | --- 34 | 35 | Hi @${{ github.event.pull_request.user.login }}, 36 | 37 | It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) ${{github.event.pull_request.base.ref}} branch. 38 | The ${{github.event.pull_request.base.ref}} branch on nf-core repositories should always contain code from the latest release. 39 | Because of this, PRs to ${{github.event.pull_request.base.ref}} are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. 40 | 41 | You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. 42 | Note that even after this, the test will continue to show as failing until you push a new commit. 43 | 44 | Thanks again for your contribution! 45 | repo-token: ${{ secrets.GITHUB_TOKEN }} 46 | allow-repeats: false 47 | -------------------------------------------------------------------------------- /.github/workflows/build_reference.yml: -------------------------------------------------------------------------------- 1 | name: Build reference genomes that changed 2 | on: 3 | push: 4 | branches: 5 | - main 6 | paths: 7 | - "assets/genomes/*.yml" 8 | 9 | jobs: 10 | run-tower: 11 | name: Run AWS full tests 12 | if: github.repository == 'nf-core/nascent' 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: Find changed genomes 16 | id: changed-genome-files 17 | uses: tj-actions/changed-files@v42 18 | with: 19 | files: | 20 | assets/genomes/*.yml 21 | - name: Concatinate all the yamls together 22 | if: steps.changed-files-specific.outputs.any_changed == 'true' 23 | env: 24 | CHANGED_FILES: ${{ steps.changed-files-specific.outputs.all_changed_files }} 25 | run: cat ${CHANGED_FILES} > samplesheet.yml 26 | # - name: Upload samplesheet.yml to s3 or Tower Datasets 27 | # run: TODO 28 | - name: Launch workflow via tower 29 | uses: seqeralabs/action-tower-launch@v2 30 | with: 31 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 32 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 33 | compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} 34 | revision: ${{ github.sha }} 35 | workdir: s3://${{ secrets.AWS_S3_SCRATCH_BUCKET }}/work 36 | parameters: | 37 | { 38 | "input": "samplesheet.yml" 39 | "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", 40 | "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/nascent/results-${{ github.sha }}" 41 | } 42 | profiles: cloud 43 | 44 | - uses: actions/upload-artifact@v4 45 | with: 46 | name: Tower debug log file 47 | path: | 48 | tower_action_*.log 49 | tower_action_*.json 50 | -------------------------------------------------------------------------------- /.github/workflows/clean-up.yml: -------------------------------------------------------------------------------- 1 | name: "Close user-tagged issues and PRs" 2 | on: 3 | schedule: 4 | - cron: "0 0 * * 0" # Once a week 5 | 6 | jobs: 7 | clean-up: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | issues: write 11 | pull-requests: write 12 | steps: 13 | - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 14 | with: 15 | stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." 16 | stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." 17 | close-issue-message: "This issue was closed because it has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor and then staled for 20 days with no activity." 18 | days-before-stale: 30 19 | days-before-close: 20 20 | days-before-pr-close: -1 21 | any-of-labels: "awaiting-changes,awaiting-feedback" 22 | exempt-issue-labels: "WIP" 23 | exempt-pr-labels: "WIP" 24 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 25 | -------------------------------------------------------------------------------- /.github/workflows/linting_comment.yml: -------------------------------------------------------------------------------- 1 | name: nf-core linting comment 2 | # This workflow is triggered after the linting action is complete 3 | # It posts an automated comment to the PR, even if the PR is coming from a fork 4 | 5 | on: 6 | workflow_run: 7 | workflows: ["nf-core linting"] 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Download lint results 14 | uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 15 | with: 16 | workflow: linting.yml 17 | workflow_conclusion: completed 18 | 19 | - name: Get PR number 20 | id: pr_number 21 | run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT 22 | 23 | - name: Post PR comment 24 | uses: marocchino/sticky-pull-request-comment@331f8f5b4215f0445d3c07b4967662a32a2d3e31 # v2 25 | with: 26 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 27 | number: ${{ steps.pr_number.outputs.pr_number }} 28 | path: linting-logs/lint_results.md 29 | -------------------------------------------------------------------------------- /.github/workflows/release-announcements.yml: -------------------------------------------------------------------------------- 1 | name: release-announcements 2 | # Automatic release toot and tweet anouncements 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | 8 | jobs: 9 | toot: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: get topics and convert to hashtags 13 | id: get_topics 14 | run: | 15 | echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" | sed 's/-//g' >> $GITHUB_OUTPUT 16 | 17 | - uses: rzr/fediverse-action@master 18 | with: 19 | access-token: ${{ secrets.MASTODON_ACCESS_TOKEN }} 20 | host: "mstdn.science" # custom host if not "mastodon.social" (default) 21 | # GitHub event payload 22 | # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#release 23 | message: | 24 | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! 25 | 26 | Please see the changelog: ${{ github.event.release.html_url }} 27 | 28 | ${{ steps.get_topics.outputs.topics }} #nfcore #openscience #nextflow #bioinformatics 29 | 30 | bsky-post: 31 | runs-on: ubuntu-latest 32 | steps: 33 | - uses: zentered/bluesky-post-action@80dbe0a7697de18c15ad22f4619919ceb5ccf597 # v0.1.0 34 | with: 35 | post: | 36 | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! 37 | 38 | Please see the changelog: ${{ github.event.release.html_url }} 39 | env: 40 | BSKY_IDENTIFIER: ${{ secrets.BSKY_IDENTIFIER }} 41 | BSKY_PASSWORD: ${{ secrets.BSKY_PASSWORD }} 42 | # 43 | -------------------------------------------------------------------------------- /.github/workflows/template_version_comment.yml: -------------------------------------------------------------------------------- 1 | name: nf-core template version comment 2 | # This workflow is triggered on PRs to check if the pipeline template version matches the latest nf-core version. 3 | # It posts a comment to the PR, even if it comes from a fork. 4 | 5 | on: pull_request_target 6 | 7 | jobs: 8 | template_version: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Check out pipeline code 12 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 13 | with: 14 | ref: ${{ github.event.pull_request.head.sha }} 15 | 16 | - name: Read template version from .nf-core.yml 17 | uses: nichmor/minimal-read-yaml@v0.0.2 18 | id: read_yml 19 | with: 20 | config: ${{ github.workspace }}/.nf-core.yml 21 | 22 | - name: Install nf-core 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install nf-core==${{ steps.read_yml.outputs['nf_core_version'] }} 26 | 27 | - name: Check nf-core outdated 28 | id: nf_core_outdated 29 | run: echo "OUTPUT=$(pip list --outdated | grep nf-core)" >> ${GITHUB_ENV} 30 | 31 | - name: Post nf-core template version comment 32 | uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 33 | if: | 34 | contains(env.OUTPUT, 'nf-core') 35 | with: 36 | repo-token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} 37 | allow-repeats: false 38 | message: | 39 | > [!WARNING] 40 | > Newer version of the nf-core template is available. 41 | > 42 | > Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}. 43 | > Please update your pipeline to the latest version. 44 | > 45 | > For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync). 46 | # 47 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .nextflow* 2 | work/ 3 | data/ 4 | results/ 5 | .DS_Store 6 | testing/ 7 | testing* 8 | *.pyc 9 | null/ 10 | .idea 11 | *.log 12 | tmp/ 13 | .nf-test 14 | .vscode 15 | -------------------------------------------------------------------------------- /.gitpod.yml: -------------------------------------------------------------------------------- 1 | image: nfcore/gitpod:latest 2 | tasks: 3 | - name: Update Nextflow and setup pre-commit 4 | command: | 5 | pre-commit install --install-hooks 6 | nextflow self-update 7 | 8 | vscode: 9 | extensions: 10 | - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack 11 | -------------------------------------------------------------------------------- /.nf-core.yml: -------------------------------------------------------------------------------- 1 | lint: 2 | actions_awsfulltest: false 3 | files_exist: 4 | - .github/workflows/ci.yml 5 | - conf/igenomes.config 6 | - conf/igenomes_ignored.config 7 | files_unchanged: 8 | - .github/PULL_REQUEST_TEMPLATE.md 9 | - .github/workflows/linting.yml 10 | - assets/nf-core-references_logo_light.png 11 | - docs/images/nf-core-references_logo_light.png 12 | - docs/images/nf-core-references_logo_dark.png 13 | nf_core_version: 3.2.1 14 | repository_type: pipeline 15 | template: 16 | author: "@maxulysse" 17 | description: help community build references 18 | force: false 19 | is_nfcore: true 20 | name: references 21 | org: nf-core 22 | outdir: . 23 | skip_features: 24 | - fastqc 25 | - igenomes 26 | version: 1.0dev 27 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/mirrors-prettier 3 | rev: "v3.1.0" 4 | hooks: 5 | - id: prettier 6 | additional_dependencies: 7 | - prettier@3.2.5 8 | 9 | - repo: https://github.com/editorconfig-checker/editorconfig-checker.python 10 | rev: "3.1.2" 11 | hooks: 12 | - id: editorconfig-checker 13 | alias: ec 14 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | email_template.html 2 | adaptivecard.json 3 | slackreport.json 4 | .nextflow* 5 | work/ 6 | data/ 7 | results/ 8 | .DS_Store 9 | testing/ 10 | testing* 11 | *.pyc 12 | bin/ 13 | ro-crate-metadata.json 14 | -------------------------------------------------------------------------------- /.prettierrc.yml: -------------------------------------------------------------------------------- 1 | printWidth: 120 2 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "markdown.styles": ["public/vscode_markdown.css"] 3 | } 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) The nf-core/references team 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /assets/email_template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | nf-core/references Pipeline Report 9 | 10 | 11 |
12 | 13 | 14 | 15 |

nf-core/references ${version}

16 |

Run Name: $runName

17 | 18 | <% if (!success){ 19 | out << """ 20 |
21 |

nf-core/references execution completed unsuccessfully!

22 |

The exit status of the task that caused the workflow execution to fail was: $exitStatus.

23 |

The full error message was:

24 |
${errorReport}
25 |
26 | """ 27 | } else { 28 | out << """ 29 |
30 | nf-core/references execution completed successfully! 31 |
32 | """ 33 | } 34 | %> 35 | 36 |

The workflow was completed at $dateComplete (duration: $duration)

37 |

The command used to launch the workflow was as follows:

38 |
$commandLine
39 | 40 |

Pipeline Configuration:

41 | 42 | 43 | <% out << summary.collect{ k,v -> "" }.join("\n") %> 44 | 45 |
$k
$v
46 | 47 |

nf-core/references

48 |

https://github.com/nf-core/references

49 | 50 |
51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /assets/email_template.txt: -------------------------------------------------------------------------------- 1 | ---------------------------------------------------- 2 | ,--./,-. 3 | ___ __ __ __ ___ /,-._.--~\\ 4 | |\\ | |__ __ / ` / \\ |__) |__ } { 5 | | \\| | \\__, \\__/ | \\ |___ \\`-._,-`-, 6 | `._,._,' 7 | nf-core/references ${version} 8 | ---------------------------------------------------- 9 | Run Name: $runName 10 | 11 | <% if (success){ 12 | out << "## nf-core/references execution completed successfully! ##" 13 | } else { 14 | out << """#################################################### 15 | ## nf-core/references execution completed unsuccessfully! ## 16 | #################################################### 17 | The exit status of the task that caused the workflow execution to fail was: $exitStatus. 18 | The full error message was: 19 | 20 | ${errorReport} 21 | """ 22 | } %> 23 | 24 | 25 | The workflow was completed at $dateComplete (duration: $duration) 26 | 27 | The command used to launch the workflow was as follows: 28 | 29 | $commandLine 30 | 31 | 32 | 33 | Pipeline Configuration: 34 | ----------------------- 35 | <% out << summary.collect{ k,v -> " - $k: $v" }.join("\n") %> 36 | 37 | -- 38 | nf-core/references 39 | https://github.com/nf-core/references 40 | -------------------------------------------------------------------------------- /assets/multiqc_config.yml: -------------------------------------------------------------------------------- 1 | report_comment: > 2 | This report has been generated by the nf-core/references 3 | analysis pipeline. For information about how to interpret these results, please see the 4 | documentation. 5 | report_section_order: 6 | "nf-core-references-methods-description": 7 | order: -1000 8 | software_versions: 9 | order: -1001 10 | "nf-core-references-summary": 11 | order: -1002 12 | 13 | export_plots: true 14 | 15 | disable_version_detection: true 16 | -------------------------------------------------------------------------------- /assets/nf-core-references_logo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/references/527f23b2d2d623eeffb7cf9ba468e89d29ba350c/assets/nf-core-references_logo_light.png -------------------------------------------------------------------------------- /assets/sendmail_template.txt: -------------------------------------------------------------------------------- 1 | To: $email 2 | Subject: $subject 3 | Mime-Version: 1.0 4 | Content-Type: multipart/related;boundary="nfcoremimeboundary" 5 | 6 | --nfcoremimeboundary 7 | Content-Type: text/html; charset=utf-8 8 | 9 | $email_html 10 | 11 | --nfcoremimeboundary 12 | Content-Type: image/png;name="nf-core-references_logo.png" 13 | Content-Transfer-Encoding: base64 14 | Content-ID: 15 | Content-Disposition: inline; filename="nf-core-references_logo_light.png" 16 | 17 | <% out << new File("$projectDir/assets/nf-core-references_logo_light.png"). 18 | bytes. 19 | encodeBase64(). 20 | toString(). 21 | tokenize( '\n' )*. 22 | toList()*. 23 | collate( 76 )*. 24 | collect { it.join() }. 25 | flatten(). 26 | join( '\n' ) %> 27 | 28 | <% 29 | if (mqcFile){ 30 | def mqcFileObj = new File("$mqcFile") 31 | if (mqcFileObj.length() < mqcMaxSize){ 32 | out << """ 33 | --nfcoremimeboundary 34 | Content-Type: text/html; name=\"multiqc_report\" 35 | Content-Transfer-Encoding: base64 36 | Content-ID: 37 | Content-Disposition: attachment; filename=\"${mqcFileObj.getName()}\" 38 | 39 | ${mqcFileObj. 40 | bytes. 41 | encodeBase64(). 42 | toString(). 43 | tokenize( '\n' )*. 44 | toList()*. 45 | collate( 76 )*. 46 | collect { it.join() }. 47 | flatten(). 48 | join( '\n' )} 49 | """ 50 | }} 51 | %> 52 | 53 | --nfcoremimeboundary-- 54 | -------------------------------------------------------------------------------- /assets/slackreport.json: -------------------------------------------------------------------------------- 1 | { 2 | "attachments": [ 3 | { 4 | "fallback": "Plain-text summary of the attachment.", 5 | "color": "<% if (success) { %>good<% } else { %>danger<%} %>", 6 | "author_name": "nf-core/references ${version} - ${runName}", 7 | "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", 8 | "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", 9 | "fields": [ 10 | { 11 | "title": "Command used to launch the workflow", 12 | "value": "```${commandLine}```", 13 | "short": false 14 | } 15 | <% 16 | if (!success) { %> 17 | , 18 | { 19 | "title": "Full error message", 20 | "value": "```${errorReport}```", 21 | "short": false 22 | }, 23 | { 24 | "title": "Pipeline configuration", 25 | "value": "<% out << summary.collect{ k,v -> k == "hook_url" ? "_${k}_: (_hidden_)" : ( ( v.class.toString().contains('Path') || ( v.class.toString().contains('String') && v.contains('/') ) ) ? "_${k}_: `${v}`" : (v.class.toString().contains('DateTime') ? ("_${k}_: " + v.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM))) : "_${k}_: ${v}") ) }.join(",\n") %>", 26 | "short": false 27 | } 28 | <% } 29 | %> 30 | ], 31 | "footer": "Completed at <% out << dateComplete.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM)) %> (duration: ${duration})" 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /conf/base.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | nf-core/references Nextflow base config file 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | A 'blank slate' config file, appropriate for general use on most high performance 6 | compute environments. Assumes that all software is installed and available on 7 | the PATH. Runs in `local` mode - all jobs will be run on the logged in environment. 8 | ---------------------------------------------------------------------------------------- 9 | */ 10 | 11 | process { 12 | 13 | cpus = { 1 * task.attempt } 14 | memory = { 6.GB * task.attempt } 15 | time = { 4.h * task.attempt } 16 | 17 | errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } 18 | maxRetries = 1 19 | maxErrors = '-1' 20 | 21 | // Process-specific resource requirements 22 | // NOTE - Please try and reuse the labels below as much as possible. 23 | // These labels are used and recognised by default in DSL2 files hosted on nf-core/modules. 24 | // If possible, it would be nice to keep the same label naming convention when 25 | // adding in your local modules too. 26 | // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors 27 | withLabel:process_single { 28 | cpus = { 1 } 29 | memory = { 6.GB * task.attempt } 30 | time = { 4.h * task.attempt } 31 | } 32 | withLabel:process_low { 33 | cpus = { 2 * task.attempt } 34 | memory = { 12.GB * task.attempt } 35 | time = { 4.h * task.attempt } 36 | } 37 | withLabel:process_medium { 38 | cpus = { 6 * task.attempt } 39 | memory = { 36.GB * task.attempt } 40 | time = { 8.h * task.attempt } 41 | } 42 | withLabel:process_high { 43 | cpus = { 12 * task.attempt } 44 | memory = { 72.GB * task.attempt } 45 | time = { 16.h * task.attempt } 46 | } 47 | withLabel:process_long { 48 | time = { 20.h * task.attempt } 49 | } 50 | withLabel:process_high_memory { 51 | memory = { 200.GB * task.attempt } 52 | } 53 | withLabel:error_ignore { 54 | errorStrategy = 'ignore' 55 | } 56 | withLabel:error_retry { 57 | errorStrategy = 'retry' 58 | maxRetries = 2 59 | } 60 | errorStrategy = 'ignore' 61 | } 62 | -------------------------------------------------------------------------------- /conf/modules.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Config file for defining DSL2 per module options 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Available keys to override module options: 6 | ext.args = Additional arguments appended to command in module. 7 | ext.args2 = Second set of arguments appended to command in module (multi-tool modules). 8 | ext.args3 = Third set of arguments appended to command in module (multi-tool modules). 9 | ext.prefix = File name prefix for output files. 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | withName: 'MULTIQC' { 15 | ext.args = { params.multiqc_title ? "--title \"$params.multiqc_title\"" : '' } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /conf/test.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/references -profile test, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | resourceLimits = [ 15 | cpus: 4, 16 | memory: '15.GB', 17 | time: '1.h' 18 | ] 19 | withName:BWAMEM2_INDEX { 20 | memory = { 6.GB } 21 | } 22 | } 23 | 24 | params { 25 | config_profile_name = 'Test profile' 26 | config_profile_description = 'Minimal test dataset to check pipeline function' 27 | 28 | // Input data 29 | input = 'https://raw.githubusercontent.com/nf-core/references-datasheets/latest/genomes_source/Homo_sapiens/nf-core/GRCh38_chr21_no_gtf.yml' 30 | 31 | // Modules testdata base path 32 | modules_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/' 33 | } 34 | -------------------------------------------------------------------------------- /conf/test_full.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/references -profile test, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | params { 14 | config_profile_name = 'Test profile' 15 | config_profile_description = 'Full test dataset to check pipeline function' 16 | 17 | // Input data 18 | input = 'https://raw.githubusercontent.com/nf-core/test-datasets/refs/heads/references/references/GRCh38_chr21/GRCh38_chr21.yml' 19 | } 20 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # nf-core/references: Documentation 2 | 3 | The nf-core/references documentation is split into the following pages: 4 | 5 | - [Usage](usage.md) 6 | - An overview of how the pipeline works, how to run it and a description of all of the different command-line flags. 7 | - [Output](output.md) 8 | - An overview of the different results produced by the pipeline and how to interpret them. 9 | 10 | You can find a lot more documentation about installing, configuring and running nf-core pipelines on the website: [https://nf-co.re](https://nf-co.re) 11 | -------------------------------------------------------------------------------- /docs/images/nf-core-references_logo_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/references/527f23b2d2d623eeffb7cf9ba468e89d29ba350c/docs/images/nf-core-references_logo_dark.png -------------------------------------------------------------------------------- /docs/images/nf-core-references_logo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/references/527f23b2d2d623eeffb7cf9ba468e89d29ba350c/docs/images/nf-core-references_logo_light.png -------------------------------------------------------------------------------- /docs/images/nf-core-references_metro_map_color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/references/527f23b2d2d623eeffb7cf9ba468e89d29ba350c/docs/images/nf-core-references_metro_map_color.png -------------------------------------------------------------------------------- /docs/output.md: -------------------------------------------------------------------------------- 1 | # nf-core/references: Output 2 | 3 | ## Introduction 4 | 5 | This document describes the output produced by the pipeline. Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline. 6 | 7 | The directories listed below will be created in the results directory after the pipeline has finished. All paths are relative to the top-level results directory. 8 | 9 | 10 | 11 | ## Pipeline overview 12 | 13 | The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data using the following steps: 14 | 15 | - [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline 16 | - [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution 17 | 18 | ### MultiQC 19 | 20 |
21 | Output files 22 | 23 | - `multiqc/` 24 | - `multiqc_report.html`: a standalone HTML file that can be viewed in your web browser. 25 | - `multiqc_data/`: directory containing parsed statistics from the different tools used in the pipeline. 26 | - `multiqc_plots/`: directory containing static images from the report in various formats. 27 | 28 |
29 | 30 | [MultiQC](http://multiqc.info) is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. 31 | 32 | Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQC. The pipeline has special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see .### Pipeline information 33 | 34 |
35 | Output files 36 | 37 | - `pipeline_info/` 38 | - Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. 39 | - Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. 40 | - Reformatted samplesheet files used as input to the pipeline: `samplesheet.valid.csv`. 41 | - Parameters used by the pipeline run: `params.json`. 42 | 43 |
44 | 45 | [Nextflow](https://www.nextflow.io/docs/latest/tracing.html) provides excellent functionality for generating various reports relevant to the running and execution of the pipeline. This will allow you to troubleshoot errors with the running of the pipeline, and also provide you with other information such as launch commands, run times and resource usage. 46 | -------------------------------------------------------------------------------- /docs/retreat-brainstrorming.md: -------------------------------------------------------------------------------- 1 | # Brainstorming 2 | 3 | ## Generate 4 | 5 | - md5 checksums (validate downloads if possible) 6 | 7 | ## Track within the pipeline 8 | 9 | - software_versions 10 | - copy of command.sh (or just save Nextflow report?) 11 | - Asset input paths 12 | - Show skipped reference types if already existed 13 | - Allow appending to the readme (treat like changelog), in case new asset types added 14 | 15 | ## Strategy 16 | 17 | When adding a new asset, build for the latest reference versions only. Do all genomes. 18 | Optionally backfill old releases on demand if specifically triggered. 19 | -------------------------------------------------------------------------------- /modules/nf-core/bbmap/bbsplit/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::bbmap=39.18 8 | - pigz=2.8 9 | -------------------------------------------------------------------------------- /modules/nf-core/custom/catadditionalfasta/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - conda-forge::python=3.12.2 8 | -------------------------------------------------------------------------------- /modules/nf-core/custom/catadditionalfasta/main.nf: -------------------------------------------------------------------------------- 1 | process CUSTOM_CATADDITIONALFASTA { 2 | tag "$meta.id" 3 | 4 | conda "${moduleDir}/environment.yml" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/python:3.12' : 7 | 'biocontainers/python:3.12' }" 8 | 9 | input: 10 | tuple val(meta), path(fasta), path(gtf) 11 | tuple val(meta2), path(add_fasta) 12 | val biotype 13 | 14 | output: 15 | tuple val(meta), path("*/*.fasta") , emit: fasta 16 | tuple val(meta), path("*/*.gtf") , emit: gtf 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | template 'fasta2gtf.py' 24 | 25 | stub: 26 | def prefix = task.ext.prefix ?: "${meta.id}" 27 | """ 28 | mkdir out 29 | touch out/genome_transcriptome.fasta 30 | touch out/genome_transcriptome.gtf 31 | 32 | cat <<-END_VERSIONS > versions.yml 33 | "${task.process}": 34 | python: \$(python --version | grep -v "Python ") 35 | END_VERSIONS 36 | """ 37 | } 38 | -------------------------------------------------------------------------------- /modules/nf-core/custom/catadditionalfasta/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json 2 | name: "custom_catadditionalfasta" 3 | description: Custom module to Add a new fasta file to an old one and update an associated 4 | GTF 5 | keywords: 6 | - fasta 7 | - gtf 8 | - genomics 9 | tools: 10 | - "custom": 11 | description: "Custom module to Add a new fasta file to an old one and update an 12 | associated GTF" 13 | tool_dev_url: "https://github.com/nf-core/modules/blob/master/modules/nf-core/custom/catadditionalfasta/main.nf" 14 | licence: ["MIT"] 15 | identifier: "" 16 | 17 | input: 18 | - - meta: 19 | type: map 20 | description: | 21 | Groovy Map containing fasta information 22 | - fasta: 23 | type: file 24 | description: FASTA-format sequence file 25 | pattern: "*.{fasta,fa}" 26 | - gtf: 27 | type: file 28 | description: GTF-format annotation file for fasta 29 | pattern: "*.gtf" 30 | - - meta2: 31 | type: map 32 | description: | 33 | Groovy Map containing additional fasta information 34 | - add_fasta: 35 | type: file 36 | description: FASTA-format file of additional sequences 37 | pattern: "*.fa" 38 | - - biotype: 39 | type: string 40 | description: Biotype to apply to new GTF entries 41 | output: 42 | - fasta: 43 | - meta: 44 | type: map 45 | description: | 46 | Groovy Map containing fasta information 47 | - "*/*.fasta": 48 | type: file 49 | description: FASTA-format combined sequence file 50 | pattern: "*.{fasta,fa}" 51 | - gtf: 52 | - meta: 53 | type: map 54 | description: | 55 | Groovy Map containing fasta information 56 | - "*/*.gtf": 57 | type: file 58 | description: GTF-format combined annotation file 59 | pattern: "*.gtf" 60 | - versions: 61 | - versions.yml: 62 | type: file 63 | description: File containing software versions 64 | pattern: "versions.yml" 65 | authors: 66 | - "@pinin4fjords" 67 | maintainers: 68 | - "@pinin4fjords" 69 | -------------------------------------------------------------------------------- /modules/nf-core/custom/catadditionalfasta/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process CUSTOM_CATADDITIONALFASTA" 4 | script "../main.nf" 5 | process "CUSTOM_CATADDITIONALFASTA" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "custom" 10 | tag "custom/catadditionalfasta" 11 | 12 | test("sarscov2 - fastq - gtf") { 13 | 14 | when { 15 | process { 16 | """ 17 | input[0] = Channel.of([ 18 | [ id:'test', single_end:false ], 19 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true), 20 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.gtf', checkIfExists: true) 21 | ]) 22 | input[1] = Channel.of([ 23 | [ id:'test', single_end:false ], 24 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/transcriptome.fasta', checkIfExists: true) 25 | ]) 26 | input[2] = 'test_biotype' 27 | """ 28 | } 29 | } 30 | 31 | then { 32 | assertAll( 33 | { assert process.success }, 34 | { assert snapshot( 35 | process.out.fasta, 36 | process.out.gtf, 37 | process.out.versions 38 | ).match() } 39 | ) 40 | } 41 | } 42 | 43 | test("sarscov2 - fastq - gtf - stub") { 44 | 45 | options "-stub" 46 | 47 | when { 48 | params { 49 | outdir = "$outputDir" 50 | } 51 | process { 52 | """ 53 | input[0] = Channel.of([ 54 | [ id:'test', single_end:false ], 55 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true), 56 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.gtf', checkIfExists: true) 57 | ]) 58 | input[1] = Channel.of([ 59 | [ id:'test', single_end:false ], 60 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/transcriptome.fasta', checkIfExists: true) 61 | ]) 62 | input[2] = 'test_biotype' 63 | """ 64 | } 65 | } 66 | 67 | then { 68 | assertAll( 69 | { assert process.success }, 70 | { assert snapshot(process.out).match() } 71 | ) 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - conda-forge::coreutils=9.5 8 | - conda-forge::grep=3.11 9 | - conda-forge::gzip=1.13 10 | - conda-forge::lbzip2=2.5 11 | - conda-forge::sed=4.8 12 | - conda-forge::tar=1.34 13 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/main.nf: -------------------------------------------------------------------------------- 1 | process GUNZIP { 2 | tag "${archive}" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 7 | ? 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/52/52ccce28d2ab928ab862e25aae26314d69c8e38bd41ca9431c67ef05221348aa/data' 8 | : 'community.wave.seqera.io/library/coreutils_grep_gzip_lbzip2_pruned:838ba80435a629f8'}" 9 | 10 | input: 11 | tuple val(meta), path(archive) 12 | 13 | output: 14 | tuple val(meta), path("${gunzip}"), emit: gunzip 15 | path "versions.yml", emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def extension = (archive.toString() - '.gz').tokenize('.')[-1] 23 | def name = archive.toString() - '.gz' - ".${extension}" 24 | def prefix = task.ext.prefix ?: name 25 | gunzip = prefix + ".${extension}" 26 | """ 27 | # Not calling gunzip itself because it creates files 28 | # with the original group ownership rather than the 29 | # default one for that user / the work directory 30 | gzip \\ 31 | -cd \\ 32 | ${args} \\ 33 | ${archive} \\ 34 | > ${gunzip} 35 | 36 | cat <<-END_VERSIONS > versions.yml 37 | "${task.process}": 38 | gunzip: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') 39 | END_VERSIONS 40 | """ 41 | 42 | stub: 43 | def args = task.ext.args ?: '' 44 | def extension = (archive.toString() - '.gz').tokenize('.')[-1] 45 | def name = archive.toString() - '.gz' - ".${extension}" 46 | def prefix = task.ext.prefix ?: name 47 | gunzip = prefix + ".${extension}" 48 | """ 49 | touch ${gunzip} 50 | cat <<-END_VERSIONS > versions.yml 51 | "${task.process}": 52 | gunzip: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') 53 | END_VERSIONS 54 | """ 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/meta.yml: -------------------------------------------------------------------------------- 1 | name: gunzip 2 | description: Compresses and decompresses files. 3 | keywords: 4 | - gunzip 5 | - compression 6 | - decompression 7 | tools: 8 | - gunzip: 9 | description: | 10 | gzip is a file format and a software application used for file compression and decompression. 11 | documentation: https://www.gnu.org/software/gzip/manual/gzip.html 12 | licence: ["GPL-3.0-or-later"] 13 | identifier: "" 14 | input: 15 | - - meta: 16 | type: map 17 | description: | 18 | Optional groovy Map containing meta information 19 | e.g. [ id:'test', single_end:false ] 20 | - archive: 21 | type: file 22 | description: File to be compressed/uncompressed 23 | pattern: "*.*" 24 | output: 25 | - gunzip: 26 | - meta: 27 | type: file 28 | description: Compressed/uncompressed file 29 | pattern: "*.*" 30 | - ${gunzip}: 31 | type: file 32 | description: Compressed/uncompressed file 33 | pattern: "*.*" 34 | - versions: 35 | - versions.yml: 36 | type: file 37 | description: File containing software versions 38 | pattern: "versions.yml" 39 | authors: 40 | - "@joseespinosa" 41 | - "@drpatelh" 42 | - "@jfy133" 43 | maintainers: 44 | - "@joseespinosa" 45 | - "@drpatelh" 46 | - "@jfy133" 47 | - "@gallvp" 48 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: GUNZIP { 3 | ext.prefix = { "${meta.id}.xyz" } 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::multiqc=1.28 8 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/main.nf: -------------------------------------------------------------------------------- 1 | process MULTIQC { 2 | label 'process_single' 3 | 4 | conda "${moduleDir}/environment.yml" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/multiqc:1.28--pyhdfd78af_0' : 7 | 'biocontainers/multiqc:1.28--pyhdfd78af_0' }" 8 | 9 | input: 10 | path multiqc_files, stageAs: "?/*" 11 | path(multiqc_config) 12 | path(extra_multiqc_config) 13 | path(multiqc_logo) 14 | path(replace_names) 15 | path(sample_names) 16 | 17 | output: 18 | path "*multiqc_report.html", emit: report 19 | path "*_data" , emit: data 20 | path "*_plots" , optional:true, emit: plots 21 | path "versions.yml" , emit: versions 22 | 23 | when: 24 | task.ext.when == null || task.ext.when 25 | 26 | script: 27 | def args = task.ext.args ?: '' 28 | def prefix = task.ext.prefix ? "--filename ${task.ext.prefix}.html" : '' 29 | def config = multiqc_config ? "--config $multiqc_config" : '' 30 | def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' 31 | def logo = multiqc_logo ? "--cl-config 'custom_logo: \"${multiqc_logo}\"'" : '' 32 | def replace = replace_names ? "--replace-names ${replace_names}" : '' 33 | def samples = sample_names ? "--sample-names ${sample_names}" : '' 34 | """ 35 | multiqc \\ 36 | --force \\ 37 | $args \\ 38 | $config \\ 39 | $prefix \\ 40 | $extra_config \\ 41 | $logo \\ 42 | $replace \\ 43 | $samples \\ 44 | . 45 | 46 | cat <<-END_VERSIONS > versions.yml 47 | "${task.process}": 48 | multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) 49 | END_VERSIONS 50 | """ 51 | 52 | stub: 53 | """ 54 | mkdir multiqc_data 55 | mkdir multiqc_plots 56 | touch multiqc_report.html 57 | 58 | cat <<-END_VERSIONS > versions.yml 59 | "${task.process}": 60 | multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) 61 | END_VERSIONS 62 | """ 63 | } 64 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/meta.yml: -------------------------------------------------------------------------------- 1 | name: multiqc 2 | description: Aggregate results from bioinformatics analyses across many samples into 3 | a single report 4 | keywords: 5 | - QC 6 | - bioinformatics tools 7 | - Beautiful stand-alone HTML report 8 | tools: 9 | - multiqc: 10 | description: | 11 | MultiQC searches a given directory for analysis logs and compiles a HTML report. 12 | It's a general use tool, perfect for summarising the output from numerous bioinformatics tools. 13 | homepage: https://multiqc.info/ 14 | documentation: https://multiqc.info/docs/ 15 | licence: ["GPL-3.0-or-later"] 16 | identifier: biotools:multiqc 17 | input: 18 | - - multiqc_files: 19 | type: file 20 | description: | 21 | List of reports / files recognised by MultiQC, for example the html and zip output of FastQC 22 | - - multiqc_config: 23 | type: file 24 | description: Optional config yml for MultiQC 25 | pattern: "*.{yml,yaml}" 26 | - - extra_multiqc_config: 27 | type: file 28 | description: Second optional config yml for MultiQC. Will override common sections 29 | in multiqc_config. 30 | pattern: "*.{yml,yaml}" 31 | - - multiqc_logo: 32 | type: file 33 | description: Optional logo file for MultiQC 34 | pattern: "*.{png}" 35 | - - replace_names: 36 | type: file 37 | description: | 38 | Optional two-column sample renaming file. First column a set of 39 | patterns, second column a set of corresponding replacements. Passed via 40 | MultiQC's `--replace-names` option. 41 | pattern: "*.{tsv}" 42 | - - sample_names: 43 | type: file 44 | description: | 45 | Optional TSV file with headers, passed to the MultiQC --sample_names 46 | argument. 47 | pattern: "*.{tsv}" 48 | output: 49 | - report: 50 | - "*multiqc_report.html": 51 | type: file 52 | description: MultiQC report file 53 | pattern: "multiqc_report.html" 54 | - data: 55 | - "*_data": 56 | type: directory 57 | description: MultiQC data dir 58 | pattern: "multiqc_data" 59 | - plots: 60 | - "*_plots": 61 | type: file 62 | description: Plots created by MultiQC 63 | pattern: "*_data" 64 | - versions: 65 | - versions.yml: 66 | type: file 67 | description: File containing software versions 68 | pattern: "versions.yml" 69 | authors: 70 | - "@abhi18av" 71 | - "@bunop" 72 | - "@drpatelh" 73 | - "@jfy133" 74 | maintainers: 75 | - "@abhi18av" 76 | - "@bunop" 77 | - "@drpatelh" 78 | - "@jfy133" 79 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "multiqc_versions_single": { 3 | "content": [ 4 | [ 5 | "versions.yml:md5,b05075d2d2b4f485c0d627a5c8e475b2" 6 | ] 7 | ], 8 | "meta": { 9 | "nf-test": "0.9.0", 10 | "nextflow": "24.10.4" 11 | }, 12 | "timestamp": "2025-03-26T16:05:18.927925" 13 | }, 14 | "multiqc_stub": { 15 | "content": [ 16 | [ 17 | "multiqc_report.html", 18 | "multiqc_data", 19 | "multiqc_plots", 20 | "versions.yml:md5,b05075d2d2b4f485c0d627a5c8e475b2" 21 | ] 22 | ], 23 | "meta": { 24 | "nf-test": "0.9.0", 25 | "nextflow": "24.10.4" 26 | }, 27 | "timestamp": "2025-03-26T16:05:55.639955" 28 | }, 29 | "multiqc_versions_config": { 30 | "content": [ 31 | [ 32 | "versions.yml:md5,b05075d2d2b4f485c0d627a5c8e475b2" 33 | ] 34 | ], 35 | "meta": { 36 | "nf-test": "0.9.0", 37 | "nextflow": "24.10.4" 38 | }, 39 | "timestamp": "2025-03-26T16:05:44.067369" 40 | } 41 | } -------------------------------------------------------------------------------- /modules/nf-core/multiqc/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'MULTIQC' { 3 | ext.prefix = null 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/sortmerna/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::sortmerna=4.3.6 8 | -------------------------------------------------------------------------------- /modules/nf-core/sortmerna/tests/indexing_only.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'SORTMERNA' { 3 | ext.args = '--index 1' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/sortmerna/tests/premade_index.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'SORTMERNA_INDEX' { 3 | ext.args = '--index 1' 4 | } 5 | withName: 'SORTMERNA' { 6 | ext.args = '--index 0' 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /modules/nf-core/untar/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - conda-forge::coreutils=9.5 8 | - conda-forge::grep=3.11 9 | - conda-forge::gzip=1.13 10 | - conda-forge::lbzip2=2.5 11 | - conda-forge::sed=4.8 12 | - conda-forge::tar=1.34 13 | -------------------------------------------------------------------------------- /modules/nf-core/untar/meta.yml: -------------------------------------------------------------------------------- 1 | name: untar 2 | description: Extract files. 3 | keywords: 4 | - untar 5 | - uncompress 6 | - extract 7 | tools: 8 | - untar: 9 | description: | 10 | Extract tar.gz files. 11 | documentation: https://www.gnu.org/software/tar/manual/ 12 | licence: ["GPL-3.0-or-later"] 13 | identifier: "" 14 | input: 15 | - - meta: 16 | type: map 17 | description: | 18 | Groovy Map containing sample information 19 | e.g. [ id:'test', single_end:false ] 20 | - archive: 21 | type: file 22 | description: File to be untar 23 | pattern: "*.{tar}.{gz}" 24 | output: 25 | - untar: 26 | - meta: 27 | type: map 28 | description: | 29 | Groovy Map containing sample information 30 | e.g. [ id:'test', single_end:false ] 31 | pattern: "*/" 32 | - ${prefix}: 33 | type: map 34 | description: | 35 | Groovy Map containing sample information 36 | e.g. [ id:'test', single_end:false ] 37 | pattern: "*/" 38 | - versions: 39 | - versions.yml: 40 | type: file 41 | description: File containing software versions 42 | pattern: "versions.yml" 43 | authors: 44 | - "@joseespinosa" 45 | - "@drpatelh" 46 | - "@matthdsm" 47 | - "@jfy133" 48 | maintainers: 49 | - "@joseespinosa" 50 | - "@drpatelh" 51 | - "@matthdsm" 52 | - "@jfy133" 53 | -------------------------------------------------------------------------------- /modules/nf-core/untar/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process UNTAR" 4 | script "../main.nf" 5 | process "UNTAR" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "untar" 9 | 10 | test("test_untar") { 11 | 12 | when { 13 | process { 14 | """ 15 | input[0] = [ [], file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/db/kraken2.tar.gz', checkIfExists: true) ] 16 | """ 17 | } 18 | } 19 | 20 | then { 21 | assertAll ( 22 | { assert process.success }, 23 | { assert snapshot(process.out).match() }, 24 | ) 25 | } 26 | } 27 | 28 | test("test_untar_onlyfiles") { 29 | 30 | when { 31 | process { 32 | """ 33 | input[0] = [ [], file(params.modules_testdata_base_path + 'generic/tar/hello.tar.gz', checkIfExists: true) ] 34 | """ 35 | } 36 | } 37 | 38 | then { 39 | assertAll ( 40 | { assert process.success }, 41 | { assert snapshot(process.out).match() }, 42 | ) 43 | } 44 | } 45 | 46 | test("test_untar - stub") { 47 | 48 | options "-stub" 49 | 50 | when { 51 | process { 52 | """ 53 | input[0] = [ [], file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/db/kraken2.tar.gz', checkIfExists: true) ] 54 | """ 55 | } 56 | } 57 | 58 | then { 59 | assertAll ( 60 | { assert process.success }, 61 | { assert snapshot(process.out).match() }, 62 | ) 63 | } 64 | } 65 | 66 | test("test_untar_onlyfiles - stub") { 67 | 68 | options "-stub" 69 | 70 | when { 71 | process { 72 | """ 73 | input[0] = [ [], file(params.modules_testdata_base_path + 'generic/tar/hello.tar.gz', checkIfExists: true) ] 74 | """ 75 | } 76 | } 77 | 78 | then { 79 | assertAll ( 80 | { assert process.success }, 81 | { assert snapshot(process.out).match() }, 82 | ) 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /modules/nf-core/unzip/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - conda-forge::p7zip=16.02 8 | -------------------------------------------------------------------------------- /modules/nf-core/unzip/main.nf: -------------------------------------------------------------------------------- 1 | process UNZIP { 2 | tag "$archive" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/p7zip:16.02' : 8 | 'biocontainers/p7zip:16.02' }" 9 | 10 | input: 11 | tuple val(meta), path(archive) 12 | 13 | output: 14 | tuple val(meta), path("${prefix}/"), emit: unzipped_archive 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | if ( archive instanceof List && archive.name.size > 1 ) { error "[UNZIP] error: 7za only accepts a single archive as input. Please check module input." } 23 | prefix = task.ext.prefix ?: ( meta.id ? "${meta.id}" : archive.baseName) 24 | """ 25 | 7za \\ 26 | x \\ 27 | -o"${prefix}"/ \\ 28 | $args \\ 29 | $archive 30 | 31 | cat <<-END_VERSIONS > versions.yml 32 | "${task.process}": 33 | 7za: \$(echo \$(7za --help) | sed 's/.*p7zip Version //; s/(.*//') 34 | END_VERSIONS 35 | """ 36 | 37 | stub: 38 | def args = task.ext.args ?: '' 39 | if ( archive instanceof List && archive.name.size > 1 ) { error "[UNZIP] error: 7za only accepts a single archive as input. Please check module input." } 40 | prefix = task.ext.prefix ?: ( meta.id ? "${meta.id}" : archive.baseName) 41 | """ 42 | mkdir "${prefix}" 43 | 44 | cat <<-END_VERSIONS > versions.yml 45 | "${task.process}": 46 | 7za: \$(echo \$(7za --help) | sed 's/.*p7zip Version //; s/(.*//') 47 | END_VERSIONS 48 | """ 49 | } 50 | -------------------------------------------------------------------------------- /modules/nf-core/unzip/meta.yml: -------------------------------------------------------------------------------- 1 | name: unzip 2 | description: Unzip ZIP archive files 3 | keywords: 4 | - unzip 5 | - decompression 6 | - zip 7 | - archiving 8 | tools: 9 | - unzip: 10 | description: p7zip is a quick port of 7z.exe and 7za.exe (command line version 11 | of 7zip, see www.7-zip.org) for Unix. 12 | homepage: https://sourceforge.net/projects/p7zip/ 13 | documentation: https://sourceforge.net/projects/p7zip/ 14 | tool_dev_url: https://sourceforge.net/projects/p7zip" 15 | licence: ["LGPL-2.1-or-later"] 16 | identifier: "" 17 | input: 18 | - - meta: 19 | type: map 20 | description: | 21 | Groovy Map containing sample information 22 | e.g. [ id:'test', single_end:false ] 23 | - archive: 24 | type: file 25 | description: ZIP file 26 | pattern: "*.zip" 27 | output: 28 | - unzipped_archive: 29 | - meta: 30 | type: map 31 | description: | 32 | Groovy Map containing sample information 33 | e.g. [ id:'test', single_end:false ] 34 | - ${prefix}/: 35 | type: directory 36 | description: Directory contents of the unzipped archive 37 | pattern: "${archive.baseName}/" 38 | - versions: 39 | - versions.yml: 40 | type: file 41 | description: File containing software versions 42 | pattern: "versions.yml" 43 | authors: 44 | - "@jfy133" 45 | maintainers: 46 | - "@jfy133" 47 | -------------------------------------------------------------------------------- /modules/nf-core/unzip/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process UNZIP" 4 | script "../main.nf" 5 | process "UNZIP" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "unzip" 10 | 11 | test("generic [tar] [tar_gz]") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = [ 17 | [ id: 'hello' ], 18 | file(params.modules_testdata_base_path + 'generic/tar/hello.tar.gz', checkIfExists: true) 19 | ] 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll( 26 | { assert process.success }, 27 | { assert snapshot(process.out).match() } 28 | ) 29 | } 30 | } 31 | 32 | test("generic [tar] [tar_gz] stub") { 33 | 34 | options "-stub" 35 | 36 | when { 37 | process { 38 | """ 39 | input[0] = [ 40 | [ id: 'hello' ], 41 | file(params.modules_testdata_base_path + 'generic/tar/hello.tar.gz', checkIfExists: true) 42 | ] 43 | """ 44 | } 45 | } 46 | 47 | then { 48 | assertAll( 49 | { assert process.success }, 50 | { assert snapshot(process.out).match() } 51 | ) 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /modules/nf-core/unzip/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "generic [tar] [tar_gz] stub": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "hello" 9 | }, 10 | [ 11 | 12 | ] 13 | ] 14 | ], 15 | "1": [ 16 | "versions.yml:md5,52c55ce814e8bc9edc5a6c625ed794b8" 17 | ], 18 | "unzipped_archive": [ 19 | [ 20 | { 21 | "id": "hello" 22 | }, 23 | [ 24 | 25 | ] 26 | ] 27 | ], 28 | "versions": [ 29 | "versions.yml:md5,52c55ce814e8bc9edc5a6c625ed794b8" 30 | ] 31 | } 32 | ], 33 | "meta": { 34 | "nf-test": "0.8.4", 35 | "nextflow": "24.04.2" 36 | }, 37 | "timestamp": "2024-06-30T19:16:37.11550986" 38 | }, 39 | "generic [tar] [tar_gz]": { 40 | "content": [ 41 | { 42 | "0": [ 43 | [ 44 | { 45 | "id": "hello" 46 | }, 47 | [ 48 | "hello.tar:md5,80c66db79a773bc87b3346035ff9593e" 49 | ] 50 | ] 51 | ], 52 | "1": [ 53 | "versions.yml:md5,52c55ce814e8bc9edc5a6c625ed794b8" 54 | ], 55 | "unzipped_archive": [ 56 | [ 57 | { 58 | "id": "hello" 59 | }, 60 | [ 61 | "hello.tar:md5,80c66db79a773bc87b3346035ff9593e" 62 | ] 63 | ] 64 | ], 65 | "versions": [ 66 | "versions.yml:md5,52c55ce814e8bc9edc5a6c625ed794b8" 67 | ] 68 | } 69 | ], 70 | "meta": { 71 | "nf-test": "0.8.4", 72 | "nextflow": "24.04.2" 73 | }, 74 | "timestamp": "2024-06-30T19:16:25.120242571" 75 | } 76 | } -------------------------------------------------------------------------------- /modules/nf-side/bowtie/build/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | # renovate: datasource=conda depName=bioconda/bowtie 8 | - bioconda::bowtie=1.3.1 9 | # renovate: datasource=conda depName=bioconda/htslib 10 | - bioconda::htslib=1.21 11 | # renovate: datasource=conda depName=bioconda/samtools 12 | - bioconda::samtools=1.21 13 | -------------------------------------------------------------------------------- /modules/nf-side/bowtie/build/main.nf: -------------------------------------------------------------------------------- 1 | process BOWTIE_BUILD { 2 | tag "${meta.id}" 3 | label 'process_high' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/6f/6f5ca09fd5aab931d9b87c532c69e0122ce5ff8ec88732f906e12108d48425e9/data' : 8 | 'community.wave.seqera.io/library/bowtie_htslib_samtools:e1e242368ffcb5d3' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta) 12 | 13 | output: 14 | tuple val(meta), path('bowtie') , emit: index 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def prefix = task.ext.prefix ?: "${meta.id}" 22 | """ 23 | mkdir -p bowtie 24 | bowtie-build --threads $task.cpus $fasta bowtie/${prefix} 25 | 26 | cat <<-END_VERSIONS > versions.yml 27 | "${task.process}": 28 | bowtie: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') 29 | END_VERSIONS 30 | """ 31 | 32 | stub: 33 | def prefix = task.ext.prefix ?: "${meta.id}" 34 | """ 35 | mkdir -p bowtie 36 | touch bowtie/${prefix}.1.ebwt 37 | touch bowtie/${prefix}.2.ebwt 38 | touch bowtie/${prefix}.3.ebwt 39 | touch bowtie/${prefix}.4.ebwt 40 | touch bowtie/${prefix}.rev.1.ebwt 41 | touch bowtie/${prefix}.rev.2.ebwt 42 | 43 | cat <<-END_VERSIONS > versions.yml 44 | "${task.process}": 45 | bowtie: \$(echo \$(bowtie --version 2>&1) | sed 's/^.*bowtie-align-s version //; s/ .*\$//') 46 | END_VERSIONS 47 | """ 48 | 49 | } 50 | -------------------------------------------------------------------------------- /modules/nf-side/bowtie/build/meta.yml: -------------------------------------------------------------------------------- 1 | name: bowtie_build 2 | description: Create bowtie index for reference genome 3 | keywords: 4 | - index 5 | - fasta 6 | - genome 7 | - reference 8 | tools: 9 | - bowtie: 10 | description: | 11 | bowtie is a software package for mapping DNA sequences against 12 | a large reference genome, such as the human genome. 13 | homepage: http://bowtie-bio.sourceforge.net/index.shtml 14 | documentation: http://bowtie-bio.sourceforge.net/manual.shtml 15 | arxiv: arXiv:1303.3997 16 | licence: ["Artistic-2.0"] 17 | identifier: biotools:bowtie 18 | input: 19 | - - meta: 20 | type: map 21 | description: | 22 | Groovy Map containing information about the genome fasta 23 | e.g. [ id:'test' ] 24 | - fasta: 25 | type: file 26 | description: Input genome fasta file 27 | output: 28 | - index: 29 | - meta: 30 | type: map 31 | description: | 32 | Groovy Map containing information about the genome fasta 33 | e.g. [ id:'test' ] 34 | - bowtie: 35 | type: file 36 | description: Folder containing bowtie genome index files 37 | pattern: "*.ebwt" 38 | - versions: 39 | - versions.yml: 40 | type: file 41 | description: File containing software versions 42 | pattern: "versions.yml" 43 | authors: 44 | - "@kevinmenden" 45 | - "@drpatelh" 46 | maintainers: 47 | - "@kevinmenden" 48 | - "@drpatelh" 49 | -------------------------------------------------------------------------------- /modules/nf-side/bowtie/build/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process BOWTIE_BUILD" 4 | script "../main.nf" 5 | process "BOWTIE_BUILD" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "bowtie" 10 | tag "bowtie/build" 11 | 12 | test("sarscov2 - fasta") { 13 | 14 | when { 15 | process { 16 | """ 17 | input[0] = [ 18 | [id: 'sarscov2'], 19 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 20 | ] 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | assertAll( 27 | { assert process.success }, 28 | { assert snapshot(process.out).match() } 29 | ) 30 | } 31 | 32 | } 33 | 34 | test("sarscov2 - fasta - stub") { 35 | 36 | options "-stub" 37 | tag "version" 38 | tag "stub" 39 | 40 | when { 41 | process { 42 | """ 43 | input[0] = [[id: 'sarscov2'], 44 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 45 | ] 46 | """ 47 | } 48 | } 49 | 50 | then { 51 | assertAll( 52 | { assert process.success }, 53 | { assert snapshot(process.out).match() }, 54 | { assert snapshot(path(process.out.versions.get(0)).yaml).match("versions") }, 55 | ) 56 | } 57 | 58 | } 59 | 60 | } -------------------------------------------------------------------------------- /modules/nf-side/bowtie2/build/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | # renovate: datasource=conda depName=bioconda/bowtie2 8 | - bioconda::bowtie2=2.5.4 9 | # renovate: datasource=conda depName=bioconda/htslib 10 | - bioconda::htslib=1.21 11 | # renovate: datasource=conda depName=bioconda/samtools 12 | - bioconda::samtools=1.21 13 | - conda-forge::pigz=2.8 14 | -------------------------------------------------------------------------------- /modules/nf-side/bowtie2/build/main.nf: -------------------------------------------------------------------------------- 1 | process BOWTIE2_BUILD { 2 | tag "$fasta" 3 | label 'process_high' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/b4/b41b403e81883126c3227fc45840015538e8e2212f13abc9ae84e4b98891d51c/data' : 8 | 'community.wave.seqera.io/library/bowtie2_htslib_samtools_pigz:edeb13799090a2a6' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta) 12 | 13 | output: 14 | tuple val(meta), path('bowtie2') , emit: index 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | """ 23 | mkdir bowtie2 24 | bowtie2-build $args --threads $task.cpus $fasta bowtie2/${fasta.baseName} 25 | cat <<-END_VERSIONS > versions.yml 26 | "${task.process}": 27 | bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') 28 | END_VERSIONS 29 | """ 30 | 31 | stub: 32 | """ 33 | mkdir bowtie2 34 | touch bowtie2/${fasta.baseName}.{1..4}.bt2 35 | touch bowtie2/${fasta.baseName}.rev.{1,2}.bt2 36 | 37 | cat <<-END_VERSIONS > versions.yml 38 | "${task.process}": 39 | bowtie2: \$(echo \$(bowtie2 --version 2>&1) | sed 's/^.*bowtie2-align-s version //; s/ .*\$//') 40 | END_VERSIONS 41 | """ 42 | } 43 | -------------------------------------------------------------------------------- /modules/nf-side/bowtie2/build/meta.yml: -------------------------------------------------------------------------------- 1 | name: bowtie2_build 2 | description: Builds bowtie index for reference genome 3 | keywords: 4 | - build 5 | - index 6 | - fasta 7 | - genome 8 | - reference 9 | tools: 10 | - bowtie2: 11 | description: | 12 | Bowtie 2 is an ultrafast and memory-efficient tool for aligning 13 | sequencing reads to long reference sequences. 14 | homepage: http://bowtie-bio.sourceforge.net/bowtie2/index.shtml 15 | documentation: http://bowtie-bio.sourceforge.net/bowtie2/manual.shtml 16 | doi: 10.1038/nmeth.1923 17 | licence: ["GPL-3.0-or-later"] 18 | identifier: "" 19 | input: 20 | - - meta: 21 | type: map 22 | description: | 23 | Groovy Map containing reference information 24 | e.g. [ id:'test', single_end:false ] 25 | - fasta: 26 | type: file 27 | description: Input genome fasta file 28 | output: 29 | - index: 30 | - meta: 31 | type: map 32 | description: | 33 | Groovy Map containing reference information 34 | e.g. [ id:'test', single_end:false ] 35 | - bowtie2: 36 | type: file 37 | description: Bowtie2 genome index files 38 | pattern: "*.bt2" 39 | - versions: 40 | - versions.yml: 41 | type: file 42 | description: File containing software versions 43 | pattern: "versions.yml" 44 | authors: 45 | - "@joseespinosa" 46 | - "@drpatelh" 47 | maintainers: 48 | - "@joseespinosa" 49 | - "@drpatelh" 50 | -------------------------------------------------------------------------------- /modules/nf-side/bowtie2/build/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process BOWTIE2_BUILD" 4 | script "../main.nf" 5 | process "BOWTIE2_BUILD" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "bowtie2" 9 | tag "bowtie2/build" 10 | 11 | test("Should run without failures") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = [ 17 | [ id:'test' ], 18 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 19 | ] 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assert process.success 26 | assert snapshot(process.out).match() 27 | } 28 | 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /modules/nf-side/bowtie2/build/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "test" 9 | }, 10 | [ 11 | "genome.1.bt2:md5,cbe3d0bbea55bc57c99b4bfa25b5fbdf", 12 | "genome.2.bt2:md5,47b153cd1319abc88dda532462651fcf", 13 | "genome.3.bt2:md5,4ed93abba181d8dfab2e303e33114777", 14 | "genome.4.bt2:md5,c25be5f8b0378abf7a58c8a880b87626", 15 | "genome.rev.1.bt2:md5,52be6950579598a990570fbcf5372184", 16 | "genome.rev.2.bt2:md5,e3b4ef343dea4dd571642010a7d09597" 17 | ] 18 | ] 19 | ], 20 | "1": [ 21 | "versions.yml:md5,d136fb9c16f0a9fb2ae804b2a5fbc09c" 22 | ], 23 | "index": [ 24 | [ 25 | { 26 | "id": "test" 27 | }, 28 | [ 29 | "genome.1.bt2:md5,cbe3d0bbea55bc57c99b4bfa25b5fbdf", 30 | "genome.2.bt2:md5,47b153cd1319abc88dda532462651fcf", 31 | "genome.3.bt2:md5,4ed93abba181d8dfab2e303e33114777", 32 | "genome.4.bt2:md5,c25be5f8b0378abf7a58c8a880b87626", 33 | "genome.rev.1.bt2:md5,52be6950579598a990570fbcf5372184", 34 | "genome.rev.2.bt2:md5,e3b4ef343dea4dd571642010a7d09597" 35 | ] 36 | ] 37 | ], 38 | "versions": [ 39 | "versions.yml:md5,d136fb9c16f0a9fb2ae804b2a5fbc09c" 40 | ] 41 | } 42 | ], 43 | "meta": { 44 | "nf-test": "0.9.2", 45 | "nextflow": "25.02.1" 46 | }, 47 | "timestamp": "2023-11-23T11:51:01.107681997" 48 | } 49 | } -------------------------------------------------------------------------------- /modules/nf-side/bwa/index/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | 7 | dependencies: 8 | # renovate: datasource=conda depName=bioconda/bwa 9 | - bioconda::bwa=0.7.18 10 | # renovate: datasource=conda depName=bioconda/htslib 11 | - bioconda::htslib=1.21 12 | # renovate: datasource=conda depName=bioconda/samtools 13 | - bioconda::samtools=1.21 14 | -------------------------------------------------------------------------------- /modules/nf-side/bwa/index/main.nf: -------------------------------------------------------------------------------- 1 | process BWA_INDEX { 2 | tag "$fasta" 3 | // NOTE requires 5.37N memory where N is the size of the database 4 | // source: https://bio-bwa.sourceforge.net/bwa.shtml#8 5 | memory { 6.B * fasta.size() } 6 | 7 | conda "${moduleDir}/environment.yml" 8 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 9 | 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/bf/bf7890f8d4e38a7586581cb7fa13401b7af1582f21d94eef969df4cea852b6da/data' : 10 | 'community.wave.seqera.io/library/bwa_htslib_samtools:56c9f8d5201889a4' }" 11 | 12 | input: 13 | tuple val(meta), path(fasta) 14 | 15 | output: 16 | tuple val(meta), path("bwa") , emit: index 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def prefix = task.ext.prefix ?: "${fasta.baseName}" 24 | def args = task.ext.args ?: '' 25 | """ 26 | mkdir bwa 27 | bwa \\ 28 | index \\ 29 | $args \\ 30 | -p bwa/${prefix} \\ 31 | $fasta 32 | 33 | cat <<-END_VERSIONS > versions.yml 34 | "${task.process}": 35 | bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') 36 | END_VERSIONS 37 | """ 38 | 39 | stub: 40 | def prefix = task.ext.prefix ?: "${fasta.baseName}" 41 | """ 42 | mkdir bwa 43 | 44 | touch bwa/${prefix}.amb 45 | touch bwa/${prefix}.ann 46 | touch bwa/${prefix}.bwt 47 | touch bwa/${prefix}.pac 48 | touch bwa/${prefix}.sa 49 | 50 | cat <<-END_VERSIONS > versions.yml 51 | "${task.process}": 52 | bwa: \$(echo \$(bwa 2>&1) | sed 's/^.*Version: //; s/Contact:.*\$//') 53 | END_VERSIONS 54 | """ 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-side/bwa/index/meta.yml: -------------------------------------------------------------------------------- 1 | name: bwa_index 2 | description: Create BWA index for reference genome 3 | keywords: 4 | - index 5 | - fasta 6 | - genome 7 | - reference 8 | tools: 9 | - bwa: 10 | description: | 11 | BWA is a software package for mapping DNA sequences against 12 | a large reference genome, such as the human genome. 13 | homepage: http://bio-bwa.sourceforge.net/ 14 | documentation: https://bio-bwa.sourceforge.net/bwa.shtml 15 | arxiv: arXiv:1303.3997 16 | licence: ["GPL-3.0-or-later"] 17 | identifier: "biotools:bwa" 18 | input: 19 | - - meta: 20 | type: map 21 | description: | 22 | Groovy Map containing reference information. 23 | e.g. [ id:'test', single_end:false ] 24 | - fasta: 25 | type: file 26 | description: Input genome fasta file 27 | ontologies: 28 | - edam: "http://edamontology.org/data_2044" # Sequence 29 | - edam: "http://edamontology.org/format_1929" # FASTA 30 | output: 31 | - index: 32 | - meta: 33 | type: map 34 | description: | 35 | Groovy Map containing reference information. 36 | e.g. [ id:'test', single_end:false ] 37 | - bwa: 38 | type: map 39 | description: | 40 | Groovy Map containing reference information. 41 | e.g. [ id:'test', single_end:false ] 42 | pattern: "*.{amb,ann,bwt,pac,sa}" 43 | ontologies: 44 | - edam: "http://edamontology.org/data_3210" # Genome index 45 | - versions: 46 | - versions.yml: 47 | type: file 48 | description: File containing software versions 49 | pattern: "versions.yml" 50 | authors: 51 | - "@drpatelh" 52 | - "@maxulysse" 53 | maintainers: 54 | - "@drpatelh" 55 | - "@maxulysse" 56 | - "@gallvp" 57 | -------------------------------------------------------------------------------- /modules/nf-side/bwa/index/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process BWA_INDEX" 4 | tag "modules_nfcore" 5 | tag "modules" 6 | tag "bwa" 7 | tag "bwa/index" 8 | script "../main.nf" 9 | process "BWA_INDEX" 10 | 11 | test("BWA index") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = [ 17 | [id: 'test'], 18 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 19 | ] 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll( 26 | { assert process.success }, 27 | { assert snapshot(process.out).match() } 28 | ) 29 | } 30 | 31 | } 32 | 33 | } 34 | -------------------------------------------------------------------------------- /modules/nf-side/bwa/index/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "BWA index": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "test" 9 | }, 10 | [ 11 | "genome.amb:md5,3a68b8b2287e07dd3f5f95f4344ba76e", 12 | "genome.ann:md5,c32e11f6c859f166c7525a9c1d583567", 13 | "genome.bwt:md5,0469c30a1e239dd08f68afe66fde99da", 14 | "genome.pac:md5,983e3d2cd6f36e2546e6d25a0da78d66", 15 | "genome.sa:md5,ab3952cabf026b48cd3eb5bccbb636d1" 16 | ] 17 | ] 18 | ], 19 | "1": [ 20 | "versions.yml:md5,a64462ac7dfb21f4ade9b02e7f65c5bb" 21 | ], 22 | "index": [ 23 | [ 24 | { 25 | "id": "test" 26 | }, 27 | [ 28 | "genome.amb:md5,3a68b8b2287e07dd3f5f95f4344ba76e", 29 | "genome.ann:md5,c32e11f6c859f166c7525a9c1d583567", 30 | "genome.bwt:md5,0469c30a1e239dd08f68afe66fde99da", 31 | "genome.pac:md5,983e3d2cd6f36e2546e6d25a0da78d66", 32 | "genome.sa:md5,ab3952cabf026b48cd3eb5bccbb636d1" 33 | ] 34 | ] 35 | ], 36 | "versions": [ 37 | "versions.yml:md5,a64462ac7dfb21f4ade9b02e7f65c5bb" 38 | ] 39 | } 40 | ], 41 | "meta": { 42 | "nf-test": "0.8.4", 43 | "nextflow": "23.10.1" 44 | }, 45 | "timestamp": "2024-05-16T11:40:09.925307" 46 | } 47 | } -------------------------------------------------------------------------------- /modules/nf-side/bwamem2/index/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | 7 | dependencies: 8 | # renovate: datasource=conda depName=bioconda/bwa-mem2 9 | - bwa-mem2=2.2.1 10 | # renovate: datasource=conda depName=bioconda/htslib 11 | - htslib=1.21 12 | # renovate: datasource=conda depName=bioconda/samtools 13 | - samtools=1.21 14 | -------------------------------------------------------------------------------- /modules/nf-side/bwamem2/index/main.nf: -------------------------------------------------------------------------------- 1 | process BWAMEM2_INDEX { 2 | tag "$fasta" 3 | // NOTE Requires 28N GB memory where N is the size of the reference sequence 4 | // source: https://github.com/bwa-mem2/bwa-mem2/issues/9 5 | memory { 28.B * fasta.size() } 6 | 7 | conda "${moduleDir}/environment.yml" 8 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 9 | 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/9a/9ac054213e67b3c9308e409b459080bbe438f8fd6c646c351bc42887f35a42e7/data' : 10 | 'community.wave.seqera.io/library/bwa-mem2_htslib_samtools:e1f420694f8e42bd' }" 11 | 12 | input: 13 | tuple val(meta), path(fasta) 14 | 15 | output: 16 | tuple val(meta), path("bwamem2"), emit: index 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def prefix = task.ext.prefix ?: "${fasta}" 24 | def args = task.ext.args ?: '' 25 | """ 26 | mkdir bwamem2 27 | bwa-mem2 \\ 28 | index \\ 29 | $args \\ 30 | $fasta -p bwamem2/${prefix} 31 | 32 | cat <<-END_VERSIONS > versions.yml 33 | "${task.process}": 34 | bwamem2: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') 35 | END_VERSIONS 36 | """ 37 | 38 | stub: 39 | def prefix = task.ext.prefix ?: "${fasta}" 40 | 41 | """ 42 | mkdir bwamem2 43 | touch bwamem2/${prefix}.0123 44 | touch bwamem2/${prefix}.ann 45 | touch bwamem2/${prefix}.pac 46 | touch bwamem2/${prefix}.amb 47 | touch bwamem2/${prefix}.bwt.2bit.64 48 | 49 | cat <<-END_VERSIONS > versions.yml 50 | "${task.process}": 51 | bwamem2: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') 52 | END_VERSIONS 53 | """ 54 | } 55 | -------------------------------------------------------------------------------- /modules/nf-side/bwamem2/index/meta.yml: -------------------------------------------------------------------------------- 1 | name: bwamem2_index 2 | description: Create BWA-mem2 index for reference genome 3 | keywords: 4 | - index 5 | - fasta 6 | - genome 7 | - reference 8 | tools: 9 | - bwamem2: 10 | description: | 11 | BWA-mem2 is a software package for mapping DNA sequences against 12 | a large reference genome, such as the human genome. 13 | homepage: https://github.com/bwa-mem2/bwa-mem2 14 | documentation: https://github.com/bwa-mem2/bwa-mem2#usage 15 | licence: ["MIT"] 16 | identifier: "biotools:bwa-mem2" 17 | input: 18 | - - meta: 19 | type: map 20 | description: | 21 | Groovy Map containing sample information 22 | e.g. [ id:'test', single_end:false ] 23 | - fasta: 24 | type: file 25 | description: Input genome fasta file 26 | ontologies: 27 | - edam: "http://edamontology.org/data_2044" # Sequence 28 | - edam: "http://edamontology.org/format_1929" # FASTA 29 | output: 30 | - index: 31 | - meta: 32 | type: map 33 | description: | 34 | Groovy Map containing sample information 35 | e.g. [ id:'test', single_end:false ] 36 | - bwamem2: 37 | type: file 38 | description: BWA genome index files 39 | pattern: "*.{0123,amb,ann,bwt.2bit.64,pac}" 40 | ontologies: 41 | - edam: "http://edamontology.org/data_3210" # Genome index 42 | - versions: 43 | - versions.yml: 44 | type: file 45 | description: File containing software versions 46 | pattern: "versions.yml" 47 | authors: 48 | - "@maxulysse" 49 | maintainers: 50 | - "@maxulysse" 51 | -------------------------------------------------------------------------------- /modules/nf-side/bwamem2/index/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process BWAMEM2_INDEX" 4 | tag "modules_nfcore" 5 | tag "modules" 6 | tag "bwamem2" 7 | tag "bwamem2/index" 8 | script "../main.nf" 9 | process "BWAMEM2_INDEX" 10 | 11 | test("BWAMEM2 index") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = [ 17 | [id: 'test'], 18 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 19 | ] 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll( 26 | { assert process.success }, 27 | { assert snapshot(process.out).match() } 28 | ) 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /modules/nf-side/bwamem2/index/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "BWAMEM2 index": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "test" 9 | }, 10 | [ 11 | "genome.fasta.0123:md5,b02870de80106104abcb03cd9463e7d8", 12 | "genome.fasta.amb:md5,3a68b8b2287e07dd3f5f95f4344ba76e", 13 | "genome.fasta.ann:md5,c32e11f6c859f166c7525a9c1d583567", 14 | "genome.fasta.bwt.2bit.64:md5,d097a1b82dee375d41a1ea69895a9216", 15 | "genome.fasta.pac:md5,983e3d2cd6f36e2546e6d25a0da78d66" 16 | ] 17 | ] 18 | ], 19 | "1": [ 20 | "versions.yml:md5,9ffd13d12e7108ed15c58566bc4717d6" 21 | ], 22 | "index": [ 23 | [ 24 | { 25 | "id": "test" 26 | }, 27 | [ 28 | "genome.fasta.0123:md5,b02870de80106104abcb03cd9463e7d8", 29 | "genome.fasta.amb:md5,3a68b8b2287e07dd3f5f95f4344ba76e", 30 | "genome.fasta.ann:md5,c32e11f6c859f166c7525a9c1d583567", 31 | "genome.fasta.bwt.2bit.64:md5,d097a1b82dee375d41a1ea69895a9216", 32 | "genome.fasta.pac:md5,983e3d2cd6f36e2546e6d25a0da78d66" 33 | ] 34 | ] 35 | ], 36 | "versions": [ 37 | "versions.yml:md5,9ffd13d12e7108ed15c58566bc4717d6" 38 | ] 39 | } 40 | ], 41 | "meta": { 42 | "nf-test": "0.8.4", 43 | "nextflow": "24.02.0" 44 | }, 45 | "timestamp": "2024-03-18T12:59:39.132616" 46 | } 47 | } -------------------------------------------------------------------------------- /modules/nf-side/dragmap/hashtable/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | # WARN: Do not update this tool to 1.3.0 until https://github.com/Illumina/DRAGMAP/issues/47 is resolved 8 | - bioconda::dragmap=1.2.1 9 | -------------------------------------------------------------------------------- /modules/nf-side/dragmap/hashtable/main.nf: -------------------------------------------------------------------------------- 1 | process DRAGMAP_HASHTABLE { 2 | tag "${fasta}" 3 | label 'process_high' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | // WARN: Do not update this tool to 1.3.0 until https://github.com/Illumina/DRAGMAP/issues/47 is resolved 7 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 8 | ? 'https://depot.galaxyproject.org/singularity/dragmap:1.2.1--h72d16da_1' 9 | : 'biocontainers/dragmap:1.2.1--h72d16da_1'}" 10 | 11 | input: 12 | tuple val(meta), path(fasta) 13 | 14 | output: 15 | tuple val(meta), path("dragmap"), emit: hashmap 16 | path "versions.yml", emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | """ 24 | mkdir dragmap 25 | dragen-os \\ 26 | --build-hash-table true \\ 27 | --ht-reference ${fasta} \\ 28 | --output-directory dragmap \\ 29 | ${args} \\ 30 | --ht-num-threads ${task.cpus} 31 | 32 | cat <<-END_VERSIONS > versions.yml 33 | "${task.process}": 34 | dragmap: \$(echo \$(dragen-os --version 2>&1)) 35 | END_VERSIONS 36 | """ 37 | 38 | stub: 39 | """ 40 | mkdir dragmap 41 | 42 | cat <<-END_VERSIONS > versions.yml 43 | "${task.process}": 44 | dragmap: \$(echo \$(dragen-os --version 2>&1)) 45 | END_VERSIONS 46 | """ 47 | } 48 | -------------------------------------------------------------------------------- /modules/nf-side/dragmap/hashtable/meta.yml: -------------------------------------------------------------------------------- 1 | name: dragmap_hashtable 2 | description: Create DRAGEN hashtable for reference genome 3 | keywords: 4 | - index 5 | - fasta 6 | - genome 7 | - reference 8 | tools: 9 | - dragmap: 10 | description: Dragmap is the Dragen mapper/aligner Open Source Software. 11 | homepage: https://github.com/Illumina/dragmap 12 | documentation: https://github.com/Illumina/dragmap 13 | tool_dev_url: https://github.com/Illumina/dragmap#basic-command-line-usage 14 | licence: ["GPL v3"] 15 | identifier: "" 16 | input: 17 | - - meta: 18 | type: map 19 | description: | 20 | Groovy Map containing reference information 21 | e.g. [ id:'test', single_end:false ] 22 | - fasta: 23 | type: file 24 | description: Input genome fasta file 25 | output: 26 | - hashmap: 27 | - meta: 28 | type: map 29 | description: | 30 | Groovy Map containing reference information 31 | e.g. [ id:'test', single_end:false ] 32 | - dragmap: 33 | type: file 34 | description: DRAGMAP hash table 35 | pattern: "*.{cmp,.bin,.txt}" 36 | - versions: 37 | - versions.yml: 38 | type: file 39 | description: File containing software versions 40 | pattern: "versions.yml" 41 | authors: 42 | - "@edmundmiller" 43 | maintainers: 44 | - "@edmundmiller" 45 | -------------------------------------------------------------------------------- /modules/nf-side/dragmap/hashtable/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process DRAGMAP_HASHTABLE" 4 | script "../main.nf" 5 | process "DRAGMAP_HASHTABLE" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "dragmap" 9 | tag "dragmap/hashtable" 10 | 11 | test("sarscov2 - fasta") { 12 | 13 | when { 14 | 15 | process { 16 | """ 17 | input[0] = [ 18 | [id:'test'], 19 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 20 | ] 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | assert { process.success } 27 | assertAll( 28 | { assert snapshot( 29 | file(process.out.hashmap[0][1]).name, 30 | process.out.versions, 31 | path(process.out.versions[0]).yaml 32 | ).match() 33 | } 34 | ) 35 | } 36 | } 37 | 38 | test("sarscov2 - fasta - stub") { 39 | 40 | options "-stub" 41 | when { 42 | 43 | process { 44 | """ 45 | input[0] = [ 46 | [id:'test'], 47 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 48 | ] 49 | """ 50 | } 51 | } 52 | 53 | then { 54 | assert { process.success } 55 | assertAll( 56 | { assert snapshot( 57 | process.out, 58 | path(process.out.versions[0]).yaml 59 | ).match() } 60 | ) 61 | } 62 | } 63 | 64 | // TODO Add test using alt-masked bed file 65 | // https://github.com/Illumina/dragmap#build-hash-table-using-an-alt-masked-bed-file 66 | } 67 | -------------------------------------------------------------------------------- /modules/nf-side/dragmap/hashtable/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "sarscov2 - fasta - stub": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "test" 9 | }, 10 | [ 11 | 12 | ] 13 | ] 14 | ], 15 | "1": [ 16 | "versions.yml:md5,050c28333b92fac50eec250c28b841d0" 17 | ], 18 | "hashmap": [ 19 | [ 20 | { 21 | "id": "test" 22 | }, 23 | [ 24 | 25 | ] 26 | ] 27 | ], 28 | "versions": [ 29 | "versions.yml:md5,050c28333b92fac50eec250c28b841d0" 30 | ] 31 | }, 32 | { 33 | "DRAGMAP_HASHTABLE": { 34 | "dragmap": "1.2.1" 35 | } 36 | } 37 | ], 38 | "meta": { 39 | "nf-test": "0.9.2", 40 | "nextflow": "24.10.5" 41 | }, 42 | "timestamp": "2025-04-10T11:34:52.49644133" 43 | }, 44 | "sarscov2 - fasta": { 45 | "content": [ 46 | "dragmap", 47 | [ 48 | "versions.yml:md5,050c28333b92fac50eec250c28b841d0" 49 | ], 50 | { 51 | "DRAGMAP_HASHTABLE": { 52 | "dragmap": "1.2.1" 53 | } 54 | } 55 | ], 56 | "meta": { 57 | "nf-test": "0.9.2", 58 | "nextflow": "24.10.5" 59 | }, 60 | "timestamp": "2025-04-10T11:34:44.786652653" 61 | } 62 | } -------------------------------------------------------------------------------- /modules/nf-side/gatk4/createsequencedictionary/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | # renovate: datasource=conda depName=bioconda/gatk4 8 | - bioconda::gatk4=4.6.1.0 9 | - bioconda::gcnvkernel=0.9 10 | -------------------------------------------------------------------------------- /modules/nf-side/gatk4/createsequencedictionary/main.nf: -------------------------------------------------------------------------------- 1 | process GATK4_CREATESEQUENCEDICTIONARY { 2 | tag "$fasta" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/b2/b28daf5d9bb2f0d129dcad1b7410e0dd8a9b087aaf3ec7ced929b1f57624ad98/data': 8 | 'community.wave.seqera.io/library/gatk4_gcnvkernel:e48d414933d188cd' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta) 12 | 13 | output: 14 | tuple val(meta), path('*.dict') , emit: dict 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | 23 | def avail_mem = 6144 24 | if (!task.memory) { 25 | log.info '[GATK CreateSequenceDictionary] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' 26 | } else { 27 | avail_mem = (task.memory.mega*0.8).intValue() 28 | } 29 | """ 30 | gatk --java-options "-Xmx${avail_mem}M -XX:-UsePerfData" \\ 31 | CreateSequenceDictionary \\ 32 | --REFERENCE $fasta \\ 33 | --URI $fasta \\ 34 | --TMP_DIR . \\ 35 | $args 36 | 37 | cat <<-END_VERSIONS > versions.yml 38 | "${task.process}": 39 | gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') 40 | END_VERSIONS 41 | """ 42 | 43 | stub: 44 | """ 45 | touch ${fasta.baseName}.dict 46 | 47 | cat <<-END_VERSIONS > versions.yml 48 | "${task.process}": 49 | gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') 50 | END_VERSIONS 51 | """ 52 | } 53 | -------------------------------------------------------------------------------- /modules/nf-side/gatk4/createsequencedictionary/meta.yml: -------------------------------------------------------------------------------- 1 | name: gatk4_createsequencedictionary 2 | description: Creates a sequence dictionary for a reference sequence 3 | keywords: 4 | - createsequencedictionary 5 | - dictionary 6 | - fasta 7 | - gatk4 8 | tools: 9 | - gatk: 10 | description: | 11 | Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools 12 | with a primary focus on variant discovery and genotyping. Its powerful processing engine 13 | and high-performance computing features make it capable of taking on projects of any size. 14 | homepage: https://gatk.broadinstitute.org/hc/en-us 15 | documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s 16 | doi: 10.1158/1538-7445.AM2017-3590 17 | licence: ["Apache-2.0"] 18 | identifier: "" 19 | input: 20 | - - meta: 21 | type: map 22 | description: | 23 | Groovy Map containing reference information 24 | e.g. [ id:'genome' ] 25 | - fasta: 26 | type: file 27 | description: Input fasta file 28 | pattern: "*.{fasta,fa}" 29 | output: 30 | - dict: 31 | - meta: 32 | type: file 33 | description: gatk dictionary file 34 | pattern: "*.{dict}" 35 | - "*.dict": 36 | type: file 37 | description: gatk dictionary file 38 | pattern: "*.{dict}" 39 | - versions: 40 | - versions.yml: 41 | type: file 42 | description: File containing software versions 43 | pattern: "versions.yml" 44 | authors: 45 | - "@maxulysse" 46 | - "@ramprasadn" 47 | maintainers: 48 | - "@maxulysse" 49 | - "@ramprasadn" 50 | -------------------------------------------------------------------------------- /modules/nf-side/gatk4/createsequencedictionary/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process GATK4_CREATESEQUENCEDICTIONARY" 4 | script "../main.nf" 5 | process "GATK4_CREATESEQUENCEDICTIONARY" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "gatk4" 10 | tag "gatk4/createsequencedictionary" 11 | 12 | test("sarscov2 - fasta") { 13 | 14 | when { 15 | process { 16 | """ 17 | input[0] = [ [ id:'test' ], // meta map 18 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 19 | ] 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll( 26 | { assert process.success }, 27 | { assert snapshot(process.out).match() } 28 | ) 29 | } 30 | 31 | } 32 | 33 | test("sarscov2 - fasta - stub") { 34 | 35 | options "-stub" 36 | 37 | when { 38 | process { 39 | """ 40 | input[0] = [ [ id:'test' ], // meta map 41 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.fasta', checkIfExists: true) 42 | ] 43 | """ 44 | } 45 | } 46 | 47 | then { 48 | assertAll( 49 | { assert process.success }, 50 | { assert snapshot(process.out).match() } 51 | ) 52 | } 53 | 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /modules/nf-side/gatk4/createsequencedictionary/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "sarscov2 - fasta - stub": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "test" 9 | }, 10 | "genome.dict:md5,d41d8cd98f00b204e9800998ecf8427e" 11 | ] 12 | ], 13 | "1": [ 14 | "versions.yml:md5,e993b2c99f7f6b0fcd8428de15c61439" 15 | ], 16 | "dict": [ 17 | [ 18 | { 19 | "id": "test" 20 | }, 21 | "genome.dict:md5,d41d8cd98f00b204e9800998ecf8427e" 22 | ] 23 | ], 24 | "versions": [ 25 | "versions.yml:md5,e993b2c99f7f6b0fcd8428de15c61439" 26 | ] 27 | } 28 | ], 29 | "meta": { 30 | "nf-test": "0.9.1", 31 | "nextflow": "24.10.0" 32 | }, 33 | "timestamp": "2024-10-31T10:51:56.155954077" 34 | }, 35 | "sarscov2 - fasta": { 36 | "content": [ 37 | { 38 | "0": [ 39 | [ 40 | { 41 | "id": "test" 42 | }, 43 | "genome.dict:md5,7362679f176e0f52add03c08f457f646" 44 | ] 45 | ], 46 | "1": [ 47 | "versions.yml:md5,e993b2c99f7f6b0fcd8428de15c61439" 48 | ], 49 | "dict": [ 50 | [ 51 | { 52 | "id": "test" 53 | }, 54 | "genome.dict:md5,7362679f176e0f52add03c08f457f646" 55 | ] 56 | ], 57 | "versions": [ 58 | "versions.yml:md5,e993b2c99f7f6b0fcd8428de15c61439" 59 | ] 60 | } 61 | ], 62 | "meta": { 63 | "nf-test": "0.9.1", 64 | "nextflow": "24.10.0" 65 | }, 66 | "timestamp": "2024-10-31T10:51:45.562993875" 67 | } 68 | } -------------------------------------------------------------------------------- /modules/nf-side/gawk/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - conda-forge::gawk=5.3.0 8 | -------------------------------------------------------------------------------- /modules/nf-side/gawk/main.nf: -------------------------------------------------------------------------------- 1 | process GAWK { 2 | tag "$meta.id" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/gawk:5.3.0' : 8 | 'biocontainers/gawk:5.3.0' }" 9 | 10 | input: 11 | tuple val(meta), path(input, arity: '0..*') 12 | path(program_file) 13 | val(disable_redirect_output) 14 | 15 | output: 16 | tuple val(meta), path("*.${suffix}"), emit: output 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' // args is used for the main arguments of the tool 24 | def args2 = task.ext.args2 ?: '' // args2 is used to specify a program when no program file has been given 25 | prefix = task.ext.prefix ?: "${meta.id}" 26 | suffix = task.ext.suffix ?: "${input.collect{ it.getExtension()}.get(0)}" // use the first extension of the input files 27 | 28 | program = program_file ? "-f ${program_file}" : "${args2}" 29 | lst_gz = input.findResults{ it.getExtension().endsWith("gz") ? it.toString() : null } 30 | unzip = lst_gz ? "gunzip -q -f ${lst_gz.join(" ")}" : "" 31 | input_cmd = input.collect { it.toString() - ~/\.gz$/ }.join(" ") 32 | output_cmd = suffix.endsWith("gz") ? "| gzip > ${prefix}.${suffix}" : "> ${prefix}.${suffix}" 33 | output = disable_redirect_output ? "" : output_cmd 34 | cleanup = lst_gz ? "rm ${lst_gz.collect{ it - ~/\.gz$/ }.join(" ")}" : "" 35 | 36 | input.collect{ 37 | assert it.name != "${prefix}.${suffix}" : "Input and output names are the same, set prefix in module configuration to disambiguate!" 38 | } 39 | 40 | """ 41 | ${unzip} 42 | 43 | awk \\ 44 | ${args} \\ 45 | ${program} \\ 46 | ${input_cmd} \\ 47 | ${output} 48 | 49 | ${cleanup} 50 | 51 | cat <<-END_VERSIONS > versions.yml 52 | "${task.process}": 53 | gawk: \$(awk -Wversion | sed '1!d; s/.*Awk //; s/,.*//') 54 | END_VERSIONS 55 | """ 56 | 57 | stub: 58 | prefix = task.ext.prefix ?: "${meta.id}" 59 | suffix = task.ext.suffix ?: "${input.getExtension()}" 60 | def create_cmd = suffix.endsWith("gz") ? "echo '' | gzip >" : "touch" 61 | 62 | """ 63 | ${create_cmd} ${prefix}.${suffix} 64 | 65 | cat <<-END_VERSIONS > versions.yml 66 | "${task.process}": 67 | gawk: \$(awk -Wversion | sed '1!d; s/.*Awk //; s/,.*//') 68 | END_VERSIONS 69 | """ 70 | } 71 | -------------------------------------------------------------------------------- /modules/nf-side/gawk/meta.yml: -------------------------------------------------------------------------------- 1 | name: "gawk" 2 | description: | 3 | If you are like many computer users, you would frequently like to make changes in various text files 4 | wherever certain patterns appear, or extract data from parts of certain lines while discarding the rest. 5 | The job is easy with awk, especially the GNU implementation gawk. 6 | keywords: 7 | - gawk 8 | - awk 9 | - txt 10 | - text 11 | - file parsing 12 | tools: 13 | - "gawk": 14 | description: "GNU awk" 15 | homepage: "https://www.gnu.org/software/gawk/" 16 | documentation: "https://www.gnu.org/software/gawk/manual/" 17 | tool_dev_url: "https://www.gnu.org/prep/ftp.html" 18 | licence: ["GPL v3"] 19 | identifier: "" 20 | input: 21 | - - meta: 22 | type: map 23 | description: | 24 | Groovy Map containing sample information 25 | e.g. [ id:'test', single_end:false ] 26 | - input: 27 | type: file 28 | description: The input file - Specify the logic that needs to be executed on 29 | this file on the `ext.args2` or in the program file. 30 | If the files have a `.gz` extension, they will be unzipped using `zcat`. 31 | pattern: "*" 32 | - - program_file: 33 | type: file 34 | description: Optional file containing logic for awk to execute. If you don't 35 | wish to use a file, you can use `ext.args2` to specify the logic. 36 | pattern: "*" 37 | - - disable_redirect_output: 38 | type: boolean 39 | description: Disable the redirection of awk output to a given file. This is 40 | useful if you want to use awk's built-in redirect to write files instead 41 | of the shell's redirect. 42 | output: 43 | - output: 44 | - meta: 45 | type: map 46 | description: | 47 | Groovy Map containing sample information 48 | e.g. [ id:'test', single_end:false ] 49 | - "*.${suffix}": 50 | type: file 51 | description: The output file - if using shell redirection, specify the name of this 52 | file using `ext.prefix` and the extension using `ext.suffix`. Otherwise, ensure 53 | the awk program produces files with the extension in `ext.suffix`. 54 | pattern: "*" 55 | - versions: 56 | - versions.yml: 57 | type: file 58 | description: File containing software versions 59 | pattern: "versions.yml" 60 | authors: 61 | - "@nvnieuwk" 62 | maintainers: 63 | - "@nvnieuwk" 64 | -------------------------------------------------------------------------------- /modules/nf-side/gawk/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: GAWK { 3 | ext.suffix = params.gawk_suffix 4 | ext.args2 = params.gawk_args2 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /modules/nf-side/gffread/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::gffread=0.12.7 8 | -------------------------------------------------------------------------------- /modules/nf-side/gffread/main.nf: -------------------------------------------------------------------------------- 1 | process GFFREAD { 2 | tag "$meta.id" 3 | label 'process_low' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/gffread:0.12.7--hdcf5f25_4' : 8 | 'biocontainers/gffread:0.12.7--hdcf5f25_4' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta), path(gff) 12 | 13 | output: 14 | tuple val(meta), path("*.gtf") , emit: gtf , optional: true 15 | tuple val(meta), path("*.gff3") , emit: gffread_gff , optional: true 16 | tuple val(meta), path("*.fasta"), emit: gffread_fasta , optional: true 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | def prefix = task.ext.prefix ?: "${meta.id}" 25 | def extension = args.contains("-T") ? 'gtf' : ( ( ['-w', '-x', '-y' ].any { args.contains(it) } ) ? 'fasta' : 'gff3' ) 26 | def fasta_arg = fasta ? "-g $fasta" : '' 27 | def output_name = "${prefix}.${extension}" 28 | def output = extension == "fasta" ? "$output_name" : "-o $output_name" 29 | def args_sorted = args.replaceAll(/(.*)(-[wxy])(.*)/) { all, pre, param, post -> "$pre $post $param" }.trim() 30 | // args_sorted = Move '-w', '-x', and '-y' to the end of the args string as gffread expects the file name after these parameters 31 | if ( "$output_name" in [ "$gff", "$fasta" ] ) error "Input and output names are the same, use \"task.ext.prefix\" to disambiguate!" 32 | """ 33 | gffread \\ 34 | $gff \\ 35 | $fasta_arg \\ 36 | $args_sorted \\ 37 | $output 38 | 39 | cat <<-END_VERSIONS > versions.yml 40 | "${task.process}": 41 | gffread: \$(gffread --version 2>&1) 42 | END_VERSIONS 43 | """ 44 | 45 | stub: 46 | def args = task.ext.args ?: '' 47 | def prefix = task.ext.prefix ?: "${meta.id}" 48 | def extension = args.contains("-T") ? 'gtf' : ( ( ['-w', '-x', '-y' ].any { args.contains(it) } ) ? 'fasta' : 'gff3' ) 49 | def output_name = "${prefix}.${extension}" 50 | if ( "$output_name" in [ "$gff", "$fasta" ] ) error "Input and output names are the same, use \"task.ext.prefix\" to disambiguate!" 51 | """ 52 | touch $output_name 53 | 54 | cat <<-END_VERSIONS > versions.yml 55 | "${task.process}": 56 | gffread: \$(gffread --version 2>&1) 57 | END_VERSIONS 58 | """ 59 | } 60 | -------------------------------------------------------------------------------- /modules/nf-side/gffread/meta.yml: -------------------------------------------------------------------------------- 1 | name: gffread 2 | description: Validate, filter, convert and perform various other operations on GFF 3 | files 4 | keywords: 5 | - gff 6 | - conversion 7 | - validation 8 | tools: 9 | - gffread: 10 | description: GFF/GTF utility providing format conversions, region filtering, FASTA 11 | sequence extraction and more. 12 | homepage: http://ccb.jhu.edu/software/stringtie/gff.shtml#gffread 13 | documentation: http://ccb.jhu.edu/software/stringtie/gff.shtml#gffread 14 | tool_dev_url: https://github.com/gpertea/gffread 15 | doi: 10.12688/f1000research.23297.1 16 | licence: ["MIT"] 17 | identifier: biotools:gffread 18 | input: 19 | - - meta: 20 | type: map 21 | description: | 22 | Groovy Map containing meta data 23 | e.g. [ id:'test' ] 24 | - gff: 25 | type: file 26 | description: A reference file in either the GFF3, GFF2 or GTF format. 27 | pattern: "*.{gff, gtf}" 28 | - - fasta: 29 | type: file 30 | description: A multi-fasta file with the genomic sequences 31 | pattern: "*.{fasta,fa,faa,fas,fsa}" 32 | output: 33 | - gtf: 34 | - meta: 35 | type: map 36 | description: | 37 | Groovy Map containing meta data 38 | e.g. [ id:'test' ] 39 | - "*.gtf": 40 | type: file 41 | description: GTF file resulting from the conversion of the GFF input file if 42 | '-T' argument is present 43 | pattern: "*.{gtf}" 44 | - gffread_gff: 45 | - meta: 46 | type: map 47 | description: | 48 | Groovy Map containing meta data 49 | e.g. [ id:'test' ] 50 | - "*.gff3": 51 | type: file 52 | description: GFF3 file resulting from the conversion of the GFF input file if 53 | '-T' argument is absent 54 | pattern: "*.gff3" 55 | - gffread_fasta: 56 | - meta: 57 | type: map 58 | description: | 59 | Groovy Map containing meta data 60 | e.g. [ id:'test' ] 61 | - "*.fasta": 62 | type: file 63 | description: Fasta file produced when either of '-w', '-x', '-y' parameters 64 | is present 65 | pattern: "*.fasta" 66 | - versions: 67 | - versions.yml: 68 | type: file 69 | description: File containing software versions 70 | pattern: "versions.yml" 71 | authors: 72 | - "@edmundmiller" 73 | maintainers: 74 | - "@edmundmiller" 75 | - "@gallvp" 76 | -------------------------------------------------------------------------------- /modules/nf-side/gffread/tests/nextflow-fasta.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: GFFREAD { 3 | ext.args = '-w -S' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-side/gffread/tests/nextflow-gff3.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: GFFREAD { 3 | ext.args = '' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-side/gffread/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: GFFREAD { 3 | ext.args = '-T' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-side/hisat2/build/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::hisat2=2.2.1 8 | -------------------------------------------------------------------------------- /modules/nf-side/hisat2/build/main.nf: -------------------------------------------------------------------------------- 1 | process HISAT2_BUILD { 2 | tag "$fasta" 3 | label 'process_high' 4 | label 'process_high_memory' 5 | 6 | conda "${moduleDir}/environment.yml" 7 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 8 | 'https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3' : 9 | 'biocontainers/hisat2:2.2.1--h1b792b2_3' }" 10 | 11 | input: 12 | tuple val(meta), path(fasta), path(gtf), path(splicesites) 13 | 14 | output: 15 | tuple val(meta), path("hisat2") , emit: index 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | def avail_mem = 0 24 | if (!task.memory) { 25 | log.info "[HISAT2 index build] Available memory not known - defaulting to 0. Specify process memory requirements to change this." 26 | } else { 27 | log.info "[HISAT2 index build] Available memory: ${task.memory}" 28 | avail_mem = task.memory.toGiga() 29 | } 30 | 31 | def ss = '' 32 | def exon = '' 33 | def extract_exons = '' 34 | def hisat2_build_memory = params.hisat2_build_memory ? (params.hisat2_build_memory as MemoryUnit).toGiga() : 0 35 | if (avail_mem >= hisat2_build_memory) { 36 | log.info "[HISAT2 index build] At least ${hisat2_build_memory} GB available, so using splice sites and exons to build HISAT2 index" 37 | extract_exons = gtf ? "hisat2_extract_exons.py $gtf > ${gtf.baseName}.exons.txt" : "" 38 | ss = splicesites ? "--ss $splicesites" : "" 39 | exon = gtf ? "--exon ${gtf.baseName}.exons.txt" : "" 40 | } else { 41 | log.info "[HISAT2 index build] Less than ${hisat2_build_memory} GB available, so NOT using splice sites and exons to build HISAT2 index." 42 | log.info "[HISAT2 index build] Use --hisat2_build_memory [small number] to skip this check." 43 | } 44 | """ 45 | mkdir hisat2 46 | $extract_exons 47 | hisat2-build \\ 48 | -p $task.cpus \\ 49 | $ss \\ 50 | $exon \\ 51 | $args \\ 52 | $fasta \\ 53 | hisat2/${fasta.baseName} 54 | 55 | cat <<-END_VERSIONS > versions.yml 56 | "${task.process}": 57 | hisat2: \$(hisat2 --version | grep -o 'version [^ ]*' | cut -d ' ' -f 2) 58 | END_VERSIONS 59 | """ 60 | 61 | stub: 62 | """ 63 | mkdir hisat2 64 | 65 | cat <<-END_VERSIONS > versions.yml 66 | "${task.process}": 67 | hisat2: \$(hisat2 --version | grep -o 'version [^ ]*' | cut -d ' ' -f 2) 68 | END_VERSIONS 69 | """ 70 | } 71 | -------------------------------------------------------------------------------- /modules/nf-side/hisat2/build/meta.yml: -------------------------------------------------------------------------------- 1 | name: hisat2_build 2 | description: Builds HISAT2 index for reference genome 3 | keywords: 4 | - build 5 | - index 6 | - fasta 7 | - genome 8 | - reference 9 | tools: 10 | - hisat2: 11 | description: HISAT2 is a fast and sensitive alignment program for mapping next-generation 12 | sequencing reads (both DNA and RNA) to a population of human genomes as well 13 | as to a single reference genome. 14 | homepage: https://daehwankimlab.github.io/hisat2/ 15 | documentation: https://daehwankimlab.github.io/hisat2/manual/ 16 | doi: "10.1038/s41587-019-0201-4" 17 | licence: ["MIT"] 18 | identifier: biotools:hisat2 19 | input: 20 | - - meta: 21 | type: map 22 | description: | 23 | Groovy Map containing reference information 24 | e.g. [ id:'genome' ] 25 | - fasta: 26 | type: file 27 | description: Reference fasta file 28 | pattern: "*.{fa,fasta,fna}" 29 | - - meta2: 30 | type: map 31 | description: | 32 | Groovy Map containing reference information 33 | e.g. [ id:'genome' ] 34 | - gtf: 35 | type: file 36 | description: Reference gtf annotation file 37 | pattern: "*.{gtf}" 38 | - - meta3: 39 | type: map 40 | description: | 41 | Groovy Map containing reference information 42 | e.g. [ id:'genome' ] 43 | - splicesites: 44 | type: file 45 | description: Splices sites in gtf file 46 | pattern: "*.{txt}" 47 | output: 48 | - index: 49 | - meta: 50 | type: map 51 | description: | 52 | Groovy Map containing reference information 53 | e.g. [ id:'genome' ] 54 | - hisat2: 55 | type: file 56 | description: HISAT2 genome index file 57 | pattern: "*.ht2" 58 | - versions: 59 | - versions.yml: 60 | type: file 61 | description: File containing software versions 62 | pattern: "versions.yml" 63 | authors: 64 | - "@ntoda03" 65 | maintainers: 66 | - "@ntoda03" 67 | -------------------------------------------------------------------------------- /modules/nf-side/hisat2/extractsplicesites/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::hisat2=2.2.1 8 | -------------------------------------------------------------------------------- /modules/nf-side/hisat2/extractsplicesites/main.nf: -------------------------------------------------------------------------------- 1 | process HISAT2_EXTRACTSPLICESITES { 2 | tag "$gtf" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/hisat2:2.2.1--h1b792b2_3' : 8 | 'biocontainers/hisat2:2.2.1--h1b792b2_3' }" 9 | 10 | input: 11 | tuple val(meta), path(gtf) 12 | 13 | output: 14 | tuple val(meta), path("*.splice_sites.txt"), emit: txt 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | """ 23 | hisat2_extract_splice_sites.py $gtf > ${gtf.baseName}.splice_sites.txt 24 | cat <<-END_VERSIONS > versions.yml 25 | "${task.process}": 26 | hisat2: \$(hisat2 --version | grep -o 'version [^ ]*' | cut -d ' ' -f 2) 27 | END_VERSIONS 28 | """ 29 | 30 | stub: 31 | """ 32 | touch ${gtf.baseName}.splice_sites.txt 33 | 34 | cat <<-END_VERSIONS > versions.yml 35 | "${task.process}": 36 | hisat2: \$(hisat2 --version | grep -o 'version [^ ]*' | cut -d ' ' -f 2) 37 | END_VERSIONS 38 | """ 39 | } 40 | -------------------------------------------------------------------------------- /modules/nf-side/hisat2/extractsplicesites/meta.yml: -------------------------------------------------------------------------------- 1 | name: hisat2_extractsplicesites 2 | description: Extracts splicing sites from a gtf files 3 | keywords: 4 | - splicing 5 | - gtf 6 | - genome 7 | - reference 8 | tools: 9 | - hisat2: 10 | description: HISAT2 is a fast and sensitive alignment program for mapping next-generation 11 | sequencing reads (both DNA and RNA) to a population of human genomes as well 12 | as to a single reference genome. 13 | homepage: https://daehwankimlab.github.io/hisat2/ 14 | documentation: https://daehwankimlab.github.io/hisat2/manual/ 15 | doi: "10.1038/s41587-019-0201-4" 16 | licence: ["MIT"] 17 | identifier: biotools:hisat2 18 | input: 19 | - - meta: 20 | type: map 21 | description: | 22 | Groovy Map containing reference information 23 | e.g. [ id:'genome' ] 24 | - gtf: 25 | type: file 26 | description: Reference gtf annotation file 27 | pattern: "*.{gtf}" 28 | output: 29 | - txt: 30 | - meta: 31 | type: map 32 | description: | 33 | Groovy Map containing reference information 34 | e.g. [ id:'genome' ] 35 | - "*.splice_sites.txt": 36 | type: file 37 | description: Splice sites in txt file 38 | pattern: "*.txt" 39 | - versions: 40 | - versions.yml: 41 | type: file 42 | description: File containing software versions 43 | pattern: "versions.yml" 44 | authors: 45 | - "@ntoda03" 46 | - "@ramprasadn" 47 | maintainers: 48 | - "@ntoda03" 49 | - "@ramprasadn" 50 | -------------------------------------------------------------------------------- /modules/nf-side/hisat2/extractsplicesites/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process HISAT2_EXTRACTSPLICESITES" 4 | script "../main.nf" 5 | process "HISAT2_EXTRACTSPLICESITES" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "hisat2" 9 | tag "hisat2/extractsplicesites" 10 | 11 | test("Should run without failures") { 12 | 13 | when { 14 | params { 15 | outdir = "$outputDir" 16 | } 17 | process { 18 | """ 19 | input[0] = Channel.of([ 20 | [id:'genome'], 21 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.gtf', checkIfExists: true) 22 | ]) 23 | """ 24 | } 25 | } 26 | 27 | then { 28 | assertAll( 29 | { assert process.success }, 30 | { assert path("${process.out.txt[0][1]}").exists() }, 31 | { assert snapshot(process.out.versions).match() } 32 | ) 33 | } 34 | } 35 | 36 | test("test - stub") { 37 | 38 | options "-stub" 39 | 40 | when { 41 | params { 42 | outdir = "$outputDir" 43 | } 44 | process { 45 | """ 46 | input[0] = Channel.of([ 47 | [id:'genome'], 48 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/genome.gtf', checkIfExists: true) 49 | ]) 50 | """ 51 | } 52 | } 53 | 54 | then { 55 | assertAll( 56 | { assert process.success }, 57 | { assert snapshot(process.out).match() } 58 | ) 59 | } 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /modules/nf-side/hisat2/extractsplicesites/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "test - stub": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "genome" 9 | }, 10 | "genome.splice_sites.txt:md5,d41d8cd98f00b204e9800998ecf8427e" 11 | ] 12 | ], 13 | "1": [ 14 | "versions.yml:md5,eeea7231fe197810659b8bad4133aff2" 15 | ], 16 | "txt": [ 17 | [ 18 | { 19 | "id": "genome" 20 | }, 21 | "genome.splice_sites.txt:md5,d41d8cd98f00b204e9800998ecf8427e" 22 | ] 23 | ], 24 | "versions": [ 25 | "versions.yml:md5,eeea7231fe197810659b8bad4133aff2" 26 | ] 27 | } 28 | ], 29 | "meta": { 30 | "nf-test": "0.8.4", 31 | "nextflow": "24.04.2" 32 | }, 33 | "timestamp": "2024-06-20T17:34:13.229903" 34 | }, 35 | "Should run without failures": { 36 | "content": [ 37 | [ 38 | "versions.yml:md5,eeea7231fe197810659b8bad4133aff2" 39 | ] 40 | ], 41 | "meta": { 42 | "nf-test": "0.8.4", 43 | "nextflow": "24.04.2" 44 | }, 45 | "timestamp": "2024-01-18T20:56:30.71763" 46 | } 47 | } -------------------------------------------------------------------------------- /modules/nf-side/kallisto/index/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::kallisto=0.51.1 8 | -------------------------------------------------------------------------------- /modules/nf-side/kallisto/index/main.nf: -------------------------------------------------------------------------------- 1 | process KALLISTO_INDEX { 2 | tag "$fasta" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/kallisto:0.51.1--heb0cbe2_0': 8 | 'biocontainers/kallisto:0.51.1--heb0cbe2_0' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta) 12 | 13 | output: 14 | tuple val(meta), path("kallisto") , emit: index 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | """ 23 | kallisto \\ 24 | index \\ 25 | $args \\ 26 | -i kallisto \\ 27 | $fasta 28 | 29 | cat <<-END_VERSIONS > versions.yml 30 | "${task.process}": 31 | kallisto: \$(echo \$(kallisto 2>&1) | sed 's/^kallisto //; s/Usage.*\$//') 32 | END_VERSIONS 33 | """ 34 | 35 | stub: 36 | """ 37 | mkdir kallisto 38 | 39 | cat <<-END_VERSIONS > versions.yml 40 | "${task.process}": 41 | kallisto: \$(echo \$(kallisto 2>&1) | sed 's/^kallisto //; s/Usage.*\$//') 42 | END_VERSIONS 43 | """ 44 | } 45 | -------------------------------------------------------------------------------- /modules/nf-side/kallisto/index/meta.yml: -------------------------------------------------------------------------------- 1 | name: kallisto_index 2 | description: Create kallisto index 3 | keywords: 4 | - kallisto 5 | - kallisto/index 6 | - index 7 | tools: 8 | - kallisto: 9 | description: Quantifying abundances of transcripts from bulk and single-cell RNA-Seq 10 | data, or more generally of target sequences using high-throughput sequencing 11 | reads. 12 | homepage: https://pachterlab.github.io/kallisto/ 13 | documentation: https://pachterlab.github.io/kallisto/manual 14 | tool_dev_url: https://github.com/pachterlab/kallisto 15 | licence: ["BSD-2-Clause"] 16 | identifier: biotools:kallisto 17 | input: 18 | - - meta: 19 | type: map 20 | description: | 21 | Groovy Map containing reference information 22 | e.g. [ id:'test' ] 23 | - fasta: 24 | type: file 25 | description: genome fasta file 26 | pattern: "*.{fasta}" 27 | output: 28 | - index: 29 | - meta: 30 | type: map 31 | description: | 32 | Groovy Map containing reference information 33 | e.g. [ id:'test' ] 34 | - kallisto: 35 | type: directory 36 | description: Kallisto genome index 37 | pattern: "*.idx" 38 | - versions: 39 | - versions.yml: 40 | type: file 41 | description: File containing software versions 42 | pattern: "versions.yml" 43 | authors: 44 | - "@ggabernet" 45 | maintainers: 46 | - "@ggabernet" 47 | -------------------------------------------------------------------------------- /modules/nf-side/kallisto/index/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process KALLISTO_INDEX" 4 | script "../main.nf" 5 | process "KALLISTO_INDEX" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "kallisto" 9 | tag "kallisto/index" 10 | 11 | test("sarscov2 transcriptome.fasta") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = Channel.of([ 17 | [ id:'transcriptome' ], // meta map 18 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/transcriptome.fasta', checkIfExists: true) 19 | ]) 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll( 26 | { assert process.success }, 27 | { assert path(process.out.index.get(0).get(1)).exists() }, 28 | { assert snapshot(process.out.versions).match() } 29 | ) 30 | } 31 | } 32 | 33 | test("sarscov2 transcriptome.fasta - stub") { 34 | 35 | options "-stub" 36 | 37 | when { 38 | process { 39 | """ 40 | input[0] = Channel.of([ 41 | [ id:'transcriptome' ], // meta map 42 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/transcriptome.fasta', checkIfExists: true) 43 | ]) 44 | """ 45 | } 46 | } 47 | 48 | then { 49 | assertAll( 50 | { assert process.success }, 51 | { assert snapshot(process.out).match() } 52 | ) 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-side/kallisto/index/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "sarscov2 transcriptome.fasta - stub": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "transcriptome" 9 | }, 10 | [ 11 | 12 | ] 13 | ] 14 | ], 15 | "1": [ 16 | "versions.yml:md5,e23afe0f9a5d0dc20b05c27fe59ad041" 17 | ], 18 | "index": [ 19 | [ 20 | { 21 | "id": "transcriptome" 22 | }, 23 | [ 24 | 25 | ] 26 | ] 27 | ], 28 | "versions": [ 29 | "versions.yml:md5,e23afe0f9a5d0dc20b05c27fe59ad041" 30 | ] 31 | } 32 | ], 33 | "meta": { 34 | "nf-test": "0.9.0", 35 | "nextflow": "24.04.4" 36 | }, 37 | "timestamp": "2024-10-19T18:51:55.402665844" 38 | }, 39 | "sarscov2 transcriptome.fasta": { 40 | "content": [ 41 | [ 42 | "versions.yml:md5,e23afe0f9a5d0dc20b05c27fe59ad041" 43 | ] 44 | ], 45 | "meta": { 46 | "nf-test": "0.9.0", 47 | "nextflow": "24.04.4" 48 | }, 49 | "timestamp": "2024-10-19T18:51:42.328144373" 50 | } 51 | } -------------------------------------------------------------------------------- /modules/nf-side/msisensorpro/scan/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::msisensor-pro=1.2.0 8 | -------------------------------------------------------------------------------- /modules/nf-side/msisensorpro/scan/main.nf: -------------------------------------------------------------------------------- 1 | process MSISENSORPRO_SCAN { 2 | tag "$meta.id" 3 | label 'process_low' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/msisensor-pro:1.2.0--hfc31af2_0' : 8 | 'biocontainers/msisensor-pro:1.2.0--hfc31af2_0' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta) 12 | 13 | output: 14 | tuple val(meta), path("*.list"), emit: list 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def prefix = task.ext.prefix ?: "${meta.id}" 23 | """ 24 | msisensor-pro \\ 25 | scan \\ 26 | -d $fasta \\ 27 | -o ${prefix}.msisensor_scan.list \\ 28 | $args 29 | 30 | cat <<-END_VERSIONS > versions.yml 31 | "${task.process}": 32 | msisensor-pro: \$(msisensor-pro 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') 33 | END_VERSIONS 34 | """ 35 | 36 | stub: 37 | def prefix = task.ext.prefix ?: "${meta.id}" 38 | """ 39 | touch ${prefix}.msisensor_scan.list 40 | 41 | cat <<-END_VERSIONS > versions.yml 42 | "${task.process}": 43 | msisensor-pro: \$(msisensor-pro 2>&1 | sed -nE 's/Version:\\sv([0-9]\\.[0-9])/\\1/ p') 44 | END_VERSIONS 45 | """ 46 | } 47 | -------------------------------------------------------------------------------- /modules/nf-side/msisensorpro/scan/meta.yml: -------------------------------------------------------------------------------- 1 | name: msisensorpro_scan 2 | description: MSIsensor-pro evaluates Microsatellite Instability (MSI) for cancer patients 3 | with next generation sequencing data. It accepts the whole genome sequencing, whole 4 | exome sequencing and target region (panel) sequencing data as input 5 | keywords: 6 | - micro-satellite-scan 7 | - msisensor-pro 8 | - scan 9 | tools: 10 | - msisensorpro: 11 | description: Microsatellite Instability (MSI) detection using high-throughput 12 | sequencing data. 13 | homepage: https://github.com/xjtu-omics/msisensor-pro 14 | documentation: https://github.com/xjtu-omics/msisensor-pro/wiki 15 | tool_dev_url: https://github.com/xjtu-omics/msisensor-pro 16 | doi: "10.1016/j.gpb.2020.02.001" 17 | licence: ["Custom Licence"] 18 | identifier: "" 19 | input: 20 | - - meta: 21 | type: map 22 | description: | 23 | Groovy Map containing sample information 24 | e.g. [ id:'test', single_end:false ] 25 | - fasta: 26 | type: file 27 | description: Reference genome 28 | pattern: "*.{fasta}" 29 | output: 30 | - list: 31 | - meta: 32 | type: map 33 | description: | 34 | Groovy Map containing sample information 35 | e.g. [ id:'test', single_end:false ] 36 | - "*.list": 37 | type: file 38 | description: File containing microsatellite list 39 | pattern: "*.{list}" 40 | - versions: 41 | - versions.yml: 42 | type: file 43 | description: File containing software versions 44 | pattern: "versions.yml" 45 | authors: 46 | - "@FriederikeHanssen" 47 | maintainers: 48 | - "@FriederikeHanssen" 49 | -------------------------------------------------------------------------------- /modules/nf-side/msisensorpro/scan/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | 2 | nextflow_process { 3 | 4 | name "Test Process MSISENSORPRO_SCAN" 5 | script "../main.nf" 6 | process "MSISENSORPRO_SCAN" 7 | 8 | tag "modules" 9 | tag "modules_nfcore" 10 | tag "msisensorpro" 11 | tag "msisensorpro/scan" 12 | 13 | test("test-msisensorpro-scan") { 14 | 15 | when { 16 | process { 17 | """ 18 | input[0] = [ 19 | [ id:'test', single_end:false ], // meta map 20 | file(params.modules_testdata_base_path + 'genomics/homo_sapiens/genome/chr21/sequence/genome.fasta', checkIfExists: true) 21 | ] 22 | 23 | """ 24 | } 25 | } 26 | 27 | then { 28 | assertAll( 29 | { assert process.success }, 30 | { assert snapshot(process.out).match() } 31 | ) 32 | } 33 | } 34 | 35 | test("test-msisensorpro-scan-stub") { 36 | options '-stub' 37 | 38 | when { 39 | process { 40 | """ 41 | input[0] = [ 42 | [ id:'test', single_end:false ], // meta map 43 | file(params.modules_testdata_base_path + 'genomics/homo_sapiens/genome/chr21/sequence/genome.fasta', checkIfExists: true) 44 | ] 45 | 46 | """ 47 | } 48 | } 49 | 50 | then { 51 | assertAll( 52 | { assert process.success }, 53 | { assert snapshot(process.out).match() } 54 | ) 55 | } 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /modules/nf-side/msisensorpro/scan/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "test-msisensorpro-scan-stub": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "test", 9 | "single_end": false 10 | }, 11 | "test.msisensor_scan.list:md5,d41d8cd98f00b204e9800998ecf8427e" 12 | ] 13 | ], 14 | "1": [ 15 | "versions.yml:md5,e99820cdb69a600f5919ee1d7d5d1c3f" 16 | ], 17 | "list": [ 18 | [ 19 | { 20 | "id": "test", 21 | "single_end": false 22 | }, 23 | "test.msisensor_scan.list:md5,d41d8cd98f00b204e9800998ecf8427e" 24 | ] 25 | ], 26 | "versions": [ 27 | "versions.yml:md5,e99820cdb69a600f5919ee1d7d5d1c3f" 28 | ] 29 | } 30 | ], 31 | "meta": { 32 | "nf-test": "0.9.0", 33 | "nextflow": "24.04.4" 34 | }, 35 | "timestamp": "2024-09-05T16:44:21.450285" 36 | }, 37 | "test-msisensorpro-scan": { 38 | "content": [ 39 | { 40 | "0": [ 41 | [ 42 | { 43 | "id": "test", 44 | "single_end": false 45 | }, 46 | "test.msisensor_scan.list:md5,309d41b136993db24a9f3dade877753b" 47 | ] 48 | ], 49 | "1": [ 50 | "versions.yml:md5,e99820cdb69a600f5919ee1d7d5d1c3f" 51 | ], 52 | "list": [ 53 | [ 54 | { 55 | "id": "test", 56 | "single_end": false 57 | }, 58 | "test.msisensor_scan.list:md5,309d41b136993db24a9f3dade877753b" 59 | ] 60 | ], 61 | "versions": [ 62 | "versions.yml:md5,e99820cdb69a600f5919ee1d7d5d1c3f" 63 | ] 64 | } 65 | ], 66 | "meta": { 67 | "nf-test": "0.9.0", 68 | "nextflow": "24.04.4" 69 | }, 70 | "timestamp": "2024-09-05T16:44:09.684249" 71 | } 72 | } -------------------------------------------------------------------------------- /modules/nf-side/rsem/preparereference/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::rsem=1.3.3 8 | - bioconda::star=2.7.10a 9 | -------------------------------------------------------------------------------- /modules/nf-side/rsem/preparereference/meta.yml: -------------------------------------------------------------------------------- 1 | name: rsem_preparereference 2 | description: Prepare a reference genome for RSEM 3 | keywords: 4 | - rsem 5 | - genome 6 | - index 7 | tools: 8 | - rseqc: 9 | description: | 10 | RSEM: accurate transcript quantification from RNA-Seq data with or without a reference genome 11 | homepage: https://github.com/deweylab/RSEM 12 | documentation: https://github.com/deweylab/RSEM 13 | doi: 10.1186/1471-2105-12-323 14 | licence: ["GPL-3.0-or-later"] 15 | identifier: biotools:rsem 16 | input: 17 | - - fasta: 18 | type: file 19 | description: The Fasta file of the reference genome 20 | pattern: "*.{fasta,fa}" 21 | - - gtf: 22 | type: file 23 | description: The GTF file of the reference genome 24 | pattern: "*.gtf" 25 | output: 26 | - index: 27 | - rsem: 28 | type: directory 29 | description: RSEM index directory 30 | pattern: "rsem" 31 | - transcript_fasta: 32 | - "*transcripts.fa": 33 | type: file 34 | description: Fasta file of transcripts 35 | pattern: "rsem/*transcripts.fa" 36 | - versions: 37 | - versions.yml: 38 | type: file 39 | description: File containing software versions 40 | pattern: "versions.yml" 41 | authors: 42 | - "@drpatelh" 43 | - "@kevinmenden" 44 | maintainers: 45 | - "@drpatelh" 46 | - "@kevinmenden" 47 | -------------------------------------------------------------------------------- /modules/nf-side/rsem/preparereference/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process RSEM_PREPAREREFERENCE" 4 | script "../main.nf" 5 | process "RSEM_PREPAREREFERENCE" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "rsem" 9 | tag "rsem/preparereference" 10 | 11 | test("homo_sapiens") { 12 | 13 | when { 14 | params { 15 | outdir = "$outputDir" 16 | } 17 | process { 18 | """ 19 | input[0] = Channel.of( 20 | [ 21 | [id: 'genome'], 22 | file(params.modules_testdata_base_path + 'genomics/homo_sapiens/genome/genome.fasta', checkIfExists: true), 23 | file(params.modules_testdata_base_path + 'genomics/homo_sapiens/genome/genome.gtf', checkIfExists: true) 24 | ] 25 | ) 26 | """ 27 | } 28 | } 29 | 30 | then { 31 | assertAll( 32 | { assert process.success }, 33 | { assert snapshot( 34 | process.out.index, 35 | process.out.transcript_fasta, 36 | process.out.versions).match() } 37 | ) 38 | } 39 | } 40 | 41 | test("homo_sapiens - stub") { 42 | 43 | options "-stub" 44 | 45 | when { 46 | process { 47 | """ 48 | input[0] = Channel.of( 49 | [ 50 | [id: 'genome'], 51 | file(params.modules_testdata_base_path + 'genomics/homo_sapiens/genome/genome.fasta', checkIfExists: true), 52 | file(params.modules_testdata_base_path + 'genomics/homo_sapiens/genome/genome.gtf', checkIfExists: true) 53 | ] 54 | ) 55 | """ 56 | } 57 | } 58 | 59 | then { 60 | assertAll( 61 | { assert process.success }, 62 | { assert snapshot(process.out).match() } 63 | ) 64 | } 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /modules/nf-side/salmon/index/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::salmon=1.10.3 8 | -------------------------------------------------------------------------------- /modules/nf-side/salmon/index/main.nf: -------------------------------------------------------------------------------- 1 | process SALMON_INDEX { 2 | tag "$transcript_fasta" 3 | label "process_medium" 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/salmon:1.10.3--h6dccd9a_2' : 8 | 'biocontainers/salmon:1.10.3--h6dccd9a_2' }" 9 | 10 | input: 11 | tuple val(meta), path(genome_fasta), path(transcript_fasta) 12 | 13 | output: 14 | tuple val(meta), path("salmon"), emit: index 15 | path "versions.yml", emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def decoys = '' 23 | def fasta = transcript_fasta 24 | if (genome_fasta){ 25 | if (genome_fasta.endsWith('.gz')) { 26 | genome_fasta = "<(gunzip -c $genome_fasta)" 27 | } 28 | decoys='-d decoys.txt' 29 | fasta='gentrome.fa' 30 | } 31 | if (transcript_fasta.endsWith('.gz')) { 32 | transcript_fasta = "<(gunzip -c $transcript_fasta)" 33 | } 34 | """ 35 | if [ -n '$genome_fasta' ]; then 36 | grep '^>' $genome_fasta | cut -d ' ' -f 1 | cut -d \$'\\t' -f 1 | sed 's/>//g' > decoys.txt 37 | cat $transcript_fasta $genome_fasta > $fasta 38 | fi 39 | 40 | salmon \\ 41 | index \\ 42 | --threads $task.cpus \\ 43 | -t $fasta \\ 44 | $decoys \\ 45 | $args \\ 46 | -i salmon 47 | 48 | cat <<-END_VERSIONS > versions.yml 49 | "${task.process}": 50 | salmon: \$(echo \$(salmon --version) | sed -e "s/salmon //g") 51 | END_VERSIONS 52 | """ 53 | 54 | stub: 55 | """ 56 | mkdir salmon 57 | touch salmon/complete_ref_lens.bin 58 | touch salmon/ctable.bin 59 | touch salmon/ctg_offsets.bin 60 | touch salmon/duplicate_clusters.tsv 61 | touch salmon/info.json 62 | touch salmon/mphf.bin 63 | touch salmon/pos.bin 64 | touch salmon/pre_indexing.log 65 | touch salmon/rank.bin 66 | touch salmon/refAccumLengths.bin 67 | touch salmon/ref_indexing.log 68 | touch salmon/reflengths.bin 69 | touch salmon/refseq.bin 70 | touch salmon/seq.bin 71 | touch salmon/versionInfo.json 72 | 73 | cat <<-END_VERSIONS > versions.yml 74 | "${task.process}": 75 | salmon: \$(echo \$(salmon --version) | sed -e "s/salmon //g") 76 | END_VERSIONS 77 | """ 78 | } 79 | -------------------------------------------------------------------------------- /modules/nf-side/salmon/index/meta.yml: -------------------------------------------------------------------------------- 1 | name: salmon_index 2 | description: Create index for salmon 3 | keywords: 4 | - index 5 | - fasta 6 | - genome 7 | - reference 8 | tools: 9 | - salmon: 10 | description: | 11 | Salmon is a tool for wicked-fast transcript quantification from RNA-seq data 12 | homepage: https://salmon.readthedocs.io/en/latest/salmon.html 13 | manual: https://salmon.readthedocs.io/en/latest/salmon.html 14 | doi: 10.1038/nmeth.4197 15 | licence: ["GPL-3.0-or-later"] 16 | identifier: biotools:salmon 17 | input: 18 | - - genome_fasta: 19 | type: file 20 | description: Fasta file of the reference genome 21 | - - transcript_fasta: 22 | type: file 23 | description: Fasta file of the reference transcriptome 24 | output: 25 | - index: 26 | - salmon: 27 | type: directory 28 | description: Folder containing the star index files 29 | pattern: "salmon" 30 | - versions: 31 | - versions.yml: 32 | type: file 33 | description: File containing software versions 34 | pattern: "versions.yml" 35 | authors: 36 | - "@kevinmenden" 37 | - "@drpatelh" 38 | maintainers: 39 | - "@kevinmenden" 40 | - "@drpatelh" 41 | -------------------------------------------------------------------------------- /modules/nf-side/salmon/index/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "sarscov2 stub": { 3 | "content": [ 4 | "[complete_ref_lens.bin, ctable.bin, ctg_offsets.bin, duplicate_clusters.tsv, info.json, mphf.bin, pos.bin, pre_indexing.log, rank.bin, refAccumLengths.bin, ref_indexing.log, reflengths.bin, refseq.bin, seq.bin, versionInfo.json]", 5 | [ 6 | "versions.yml:md5,85337fa0a286ea35073ee5260974e307" 7 | ] 8 | ], 9 | "meta": { 10 | "nf-test": "0.9.2", 11 | "nextflow": "24.10.2" 12 | }, 13 | "timestamp": "2025-01-20T12:57:51.498323" 14 | }, 15 | "sarscov2": { 16 | "content": [ 17 | "[complete_ref_lens.bin, ctable.bin, ctg_offsets.bin, duplicate_clusters.tsv, info.json, mphf.bin, pos.bin, pre_indexing.log, rank.bin, refAccumLengths.bin, ref_indexing.log, reflengths.bin, refseq.bin, seq.bin, versionInfo.json]", 18 | [ 19 | "versions.yml:md5,85337fa0a286ea35073ee5260974e307" 20 | ] 21 | ], 22 | "meta": { 23 | "nf-test": "0.9.2", 24 | "nextflow": "24.10.2" 25 | }, 26 | "timestamp": "2025-01-20T12:57:33.474302" 27 | }, 28 | "sarscov2 transcriptome only": { 29 | "content": [ 30 | "[complete_ref_lens.bin, ctable.bin, ctg_offsets.bin, duplicate_clusters.tsv, info.json, mphf.bin, pos.bin, pre_indexing.log, rank.bin, refAccumLengths.bin, ref_indexing.log, reflengths.bin, refseq.bin, seq.bin, versionInfo.json]", 31 | [ 32 | "versions.yml:md5,85337fa0a286ea35073ee5260974e307" 33 | ] 34 | ], 35 | "meta": { 36 | "nf-test": "0.9.2", 37 | "nextflow": "24.10.2" 38 | }, 39 | "timestamp": "2025-01-20T12:57:42.420247" 40 | } 41 | } -------------------------------------------------------------------------------- /modules/nf-side/samtools/faidx/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::htslib=1.21 8 | - bioconda::samtools=1.21 9 | -------------------------------------------------------------------------------- /modules/nf-side/samtools/faidx/main.nf: -------------------------------------------------------------------------------- 1 | process SAMTOOLS_FAIDX { 2 | tag "$fasta" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/samtools:1.21--h50ea8bc_0' : 8 | 'biocontainers/samtools:1.21--h50ea8bc_0' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta), path(fai) 12 | val get_sizes 13 | 14 | output: 15 | tuple val(meta), path ("*.{fa,fasta}") , emit: fa, optional: true 16 | tuple val(meta), path ("*.sizes") , emit: sizes, optional: true 17 | tuple val(meta), path ("*.fai") , emit: fai, optional: true 18 | tuple val(meta), path ("*.gzi") , emit: gzi, optional: true 19 | path "versions.yml" , emit: versions 20 | 21 | when: 22 | task.ext.when == null || task.ext.when 23 | 24 | script: 25 | def args = task.ext.args ?: '' 26 | def get_sizes_command = get_sizes ? "cut -f 1,2 ${fasta}.fai > ${fasta}.sizes" : '' 27 | """ 28 | samtools \\ 29 | faidx \\ 30 | $fasta \\ 31 | $args 32 | 33 | ${get_sizes_command} 34 | 35 | cat <<-END_VERSIONS > versions.yml 36 | "${task.process}": 37 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 38 | END_VERSIONS 39 | """ 40 | 41 | stub: 42 | def match = (task.ext.args =~ /-o(?:utput)?\s(.*)\s?/).findAll() 43 | def fastacmd = match[0] ? "touch ${match[0][1]}" : '' 44 | def get_sizes_command = get_sizes ? "touch ${fasta}.sizes" : '' 45 | """ 46 | ${fastacmd} 47 | touch ${fasta}.fai 48 | if [[ "${fasta.extension}" == "gz" ]]; then 49 | touch ${fasta}.gzi 50 | fi 51 | 52 | ${get_sizes_command} 53 | 54 | cat <<-END_VERSIONS > versions.yml 55 | 56 | "${task.process}": 57 | samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') 58 | END_VERSIONS 59 | """ 60 | } 61 | -------------------------------------------------------------------------------- /modules/nf-side/samtools/faidx/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: SAMTOOLS_FAIDX { 4 | ext.args = 'MT192765.1 -o extract.fa' 5 | } 6 | 7 | } 8 | -------------------------------------------------------------------------------- /modules/nf-side/samtools/faidx/tests/nextflow2.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: SAMTOOLS_FAIDX { 4 | ext.args = '-o extract.fa' 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /modules/nf-side/star/genomegenerate/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | 7 | dependencies: 8 | - bioconda::htslib=1.21 9 | - bioconda::samtools=1.21 10 | - bioconda::star=2.7.11b 11 | - conda-forge::gawk=5.1.0 12 | -------------------------------------------------------------------------------- /modules/nf-side/star/genomegenerate/meta.yml: -------------------------------------------------------------------------------- 1 | name: star_genomegenerate 2 | description: Create index for STAR 3 | keywords: 4 | - index 5 | - fasta 6 | - genome 7 | - reference 8 | tools: 9 | - star: 10 | description: | 11 | STAR is a software package for mapping DNA sequences against 12 | a large reference genome, such as the human genome. 13 | homepage: https://github.com/alexdobin/STAR 14 | manual: https://github.com/alexdobin/STAR/blob/master/doc/STARmanual.pdf 15 | doi: 10.1093/bioinformatics/bts635 16 | licence: ["MIT"] 17 | identifier: biotools:star 18 | input: 19 | - - meta: 20 | type: map 21 | description: | 22 | Groovy Map containing sample information 23 | e.g. [ id:'test', single_end:false ] 24 | - fasta: 25 | type: file 26 | description: Fasta file of the reference genome 27 | - - meta2: 28 | type: map 29 | description: | 30 | Groovy Map containing reference information 31 | e.g. [ id:'test' ] 32 | - gtf: 33 | type: file 34 | description: GTF file of the reference genome 35 | output: 36 | - index: 37 | - meta: 38 | type: map 39 | description: | 40 | Groovy Map containing sample information 41 | e.g. [ id:'test', single_end:false ] 42 | - star: 43 | type: directory 44 | description: Folder containing the star index files 45 | pattern: "star" 46 | - versions: 47 | - versions.yml: 48 | type: file 49 | description: File containing software versions 50 | pattern: "versions.yml" 51 | authors: 52 | - "@kevinmenden" 53 | - "@drpatelh" 54 | maintainers: 55 | - "@kevinmenden" 56 | - "@drpatelh" 57 | -------------------------------------------------------------------------------- /modules/nf-side/tabix/bgziptabix/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | 7 | dependencies: 8 | - bioconda::htslib=1.21 9 | - bioconda::tabix=1.11 10 | -------------------------------------------------------------------------------- /modules/nf-side/tabix/bgziptabix/main.nf: -------------------------------------------------------------------------------- 1 | process TABIX_BGZIPTABIX { 2 | tag "$meta.id" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/92/92859404d861ae01afb87e2b789aebc71c0ab546397af890c7df74e4ee22c8dd/data' : 8 | 'community.wave.seqera.io/library/htslib:1.21--ff8e28a189fbecaa' }" 9 | 10 | input: 11 | tuple val(meta), path(input) 12 | 13 | output: 14 | tuple val(meta), path("*.gz"), path("*.tbi"), optional: true, emit: gz_tbi 15 | tuple val(meta), path("*.gz"), path("*.csi"), optional: true, emit: gz_csi 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | def args2 = task.ext.args2 ?: '' 24 | def prefix = task.ext.prefix ?: "${meta.id}" 25 | """ 26 | bgzip --threads ${task.cpus} -c $args $input > ${prefix}.${input.getExtension()}.gz 27 | tabix --threads ${task.cpus} $args2 ${prefix}.${input.getExtension()}.gz 28 | 29 | cat <<-END_VERSIONS > versions.yml 30 | "${task.process}": 31 | tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') 32 | END_VERSIONS 33 | """ 34 | 35 | stub: 36 | def prefix = task.ext.prefix ?: "${meta.id}" 37 | def args2 = task.ext.args2 ?: '' 38 | def index = args2.contains("-C ") || args2.contains("--csi") ? "csi" : "tbi" 39 | """ 40 | echo "" | gzip > ${prefix}.${input.getExtension()}.gz 41 | touch ${prefix}.${input.getExtension()}.gz.${index} 42 | 43 | cat <<-END_VERSIONS > versions.yml 44 | "${task.process}": 45 | tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') 46 | END_VERSIONS 47 | """ 48 | } 49 | -------------------------------------------------------------------------------- /modules/nf-side/tabix/bgziptabix/meta.yml: -------------------------------------------------------------------------------- 1 | name: tabix_bgziptabix 2 | description: bgzip a sorted tab-delimited genome file and then create tabix index 3 | keywords: 4 | - bgzip 5 | - compress 6 | - index 7 | - tabix 8 | - vcf 9 | tools: 10 | - tabix: 11 | description: Generic indexer for TAB-delimited genome position files. 12 | homepage: https://www.htslib.org/doc/tabix.html 13 | documentation: https://www.htslib.org/doc/tabix.1.html 14 | doi: 10.1093/bioinformatics/btq671 15 | licence: ["MIT"] 16 | identifier: biotools:tabix 17 | input: 18 | - - meta: 19 | type: map 20 | description: | 21 | Groovy Map containing sample information 22 | e.g. [ id:'test', single_end:false ] 23 | - input: 24 | type: file 25 | description: Sorted tab-delimited genome file 26 | output: 27 | - gz_tbi: 28 | - meta: 29 | type: map 30 | description: | 31 | Groovy Map containing sample information 32 | e.g. [ id:'test', single_end:false ] 33 | - "*.gz": 34 | type: file 35 | description: bgzipped tab-delimited genome file 36 | pattern: "*.gz" 37 | - "*.tbi": 38 | type: file 39 | description: tabix index file 40 | pattern: "*.tbi" 41 | - gz_csi: 42 | - meta: 43 | type: map 44 | description: | 45 | Groovy Map containing sample information 46 | e.g. [ id:'test', single_end:false ] 47 | - "*.gz": 48 | type: file 49 | description: bgzipped tab-delimited genome file 50 | pattern: "*.gz" 51 | - "*.csi": 52 | type: file 53 | description: csi index file 54 | pattern: "*.csi" 55 | - versions: 56 | - versions.yml: 57 | type: file 58 | description: File containing software versions 59 | pattern: "versions.yml" 60 | authors: 61 | - "@maxulysse" 62 | - "@DLBPointon" 63 | maintainers: 64 | - "@maxulysse" 65 | - "@DLBPointon" 66 | -------------------------------------------------------------------------------- /modules/nf-side/tabix/bgziptabix/tests/tabix_csi.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: TABIX_BGZIPTABIX { 3 | ext.args2 = '-p vcf --csi' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-side/tabix/bgziptabix/tests/tabix_tbi.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: TABIX_BGZIPTABIX { 3 | ext.args2 = '-p vcf' 4 | } 5 | } -------------------------------------------------------------------------------- /modules/nf-side/tabix/tabix/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | 7 | dependencies: 8 | - bioconda::htslib=1.21 9 | - bioconda::tabix=1.11 10 | -------------------------------------------------------------------------------- /modules/nf-side/tabix/tabix/main.nf: -------------------------------------------------------------------------------- 1 | process TABIX_TABIX { 2 | tag "$meta.id" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/92/92859404d861ae01afb87e2b789aebc71c0ab546397af890c7df74e4ee22c8dd/data' : 8 | 'community.wave.seqera.io/library/htslib:1.21--ff8e28a189fbecaa' }" 9 | 10 | input: 11 | tuple val(meta), path(tab) 12 | 13 | output: 14 | tuple val(meta), path("*.tbi"), optional:true, emit: tbi 15 | tuple val(meta), path("*.csi"), optional:true, emit: csi 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | """ 24 | tabix \\ 25 | --threads $task.cpus \\ 26 | $args \\ 27 | $tab 28 | 29 | cat <<-END_VERSIONS > versions.yml 30 | "${task.process}": 31 | tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') 32 | END_VERSIONS 33 | """ 34 | 35 | stub: 36 | """ 37 | touch ${tab}.tbi 38 | touch ${tab}.csi 39 | 40 | cat <<-END_VERSIONS > versions.yml 41 | "${task.process}": 42 | tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') 43 | END_VERSIONS 44 | """ 45 | } 46 | -------------------------------------------------------------------------------- /modules/nf-side/tabix/tabix/meta.yml: -------------------------------------------------------------------------------- 1 | name: tabix_tabix 2 | description: create tabix index from a sorted bgzip tab-delimited genome file 3 | keywords: 4 | - index 5 | - tabix 6 | - vcf 7 | tools: 8 | - tabix: 9 | description: Generic indexer for TAB-delimited genome position files. 10 | homepage: https://www.htslib.org/doc/tabix.html 11 | documentation: https://www.htslib.org/doc/tabix.1.html 12 | doi: 10.1093/bioinformatics/btq671 13 | licence: ["MIT"] 14 | identifier: biotools:tabix 15 | input: 16 | - - meta: 17 | type: map 18 | description: | 19 | Groovy Map containing sample information 20 | e.g. [ id:'test', single_end:false ] 21 | - tab: 22 | type: file 23 | description: TAB-delimited genome position file compressed with bgzip 24 | pattern: "*.{bed.gz,gff.gz,sam.gz,vcf.gz}" 25 | output: 26 | - tbi: 27 | - meta: 28 | type: map 29 | description: | 30 | Groovy Map containing sample information 31 | e.g. [ id:'test', single_end:false ] 32 | - "*.tbi": 33 | type: file 34 | description: tabix index file 35 | pattern: "*.{tbi}" 36 | - csi: 37 | - meta: 38 | type: map 39 | description: | 40 | Groovy Map containing sample information 41 | e.g. [ id:'test', single_end:false ] 42 | - "*.csi": 43 | type: file 44 | description: coordinate sorted index file 45 | pattern: "*.{csi}" 46 | - versions: 47 | - versions.yml: 48 | type: file 49 | description: File containing software versions 50 | pattern: "versions.yml" 51 | authors: 52 | - "@joseespinosa" 53 | - "@drpatelh" 54 | - "@maxulysse" 55 | maintainers: 56 | - "@joseespinosa" 57 | - "@drpatelh" 58 | - "@maxulysse" 59 | -------------------------------------------------------------------------------- /modules/nf-side/tabix/tabix/tests/tabix_bed.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: TABIX_TABIX { 3 | ext.args = '-p bed' 4 | } 5 | } -------------------------------------------------------------------------------- /modules/nf-side/tabix/tabix/tests/tabix_gff.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: TABIX_TABIX { 3 | ext.args = '-p gff' 4 | } 5 | } -------------------------------------------------------------------------------- /modules/nf-side/tabix/tabix/tests/tabix_vcf_csi.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: TABIX_TABIX { 3 | ext.args = '-p vcf --csi' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-side/tabix/tabix/tests/tabix_vcf_tbi.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: TABIX_TABIX { 3 | ext.args = '-p vcf' 4 | } 5 | } -------------------------------------------------------------------------------- /nf-test.config: -------------------------------------------------------------------------------- 1 | config { 2 | // location for all nf-test tests 3 | testsDir "." 4 | 5 | // nf-test directory including temporary files for each test 6 | workDir System.getenv("NFT_WORKDIR") ?: ".nf-test" 7 | 8 | // location of an optional nextflow.config file specific for executing tests 9 | configFile "tests/nextflow.config" 10 | 11 | // ignore tests coming from the nf-core/modules repo 12 | ignore 'modules/nf-core/**/*', 'subworkflows/nf-core/**/*', 'modules/nf-side/**/*', 'subworkflows/nf-side/**/*' 13 | 14 | // run all test with defined profile(s) from the main nextflow.config 15 | profile "test" 16 | 17 | // list of filenames or patterns that should be trigger a full test run 18 | triggers 'conf/test.config', 'nextflow.config', 'nextflow_schema.json', 'nf-test.config', 'tests/.nftignore', 'tests/nextflow.config', '**/schema_references.json' 19 | 20 | // load the necessary plugins 21 | plugins { 22 | load "nft-utils@0.0.3" 23 | load "nft-vcf@1.0.7" 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /subworkflows/nf-core/archive_extract/main.nf: -------------------------------------------------------------------------------- 1 | include { GUNZIP } from '../../../modules/nf-core/gunzip' 2 | include { UNTAR } from '../../../modules/nf-core/untar' 3 | include { UNZIP } from '../../../modules/nf-core/unzip' 4 | 5 | workflow ARCHIVE_EXTRACT { 6 | take: 7 | archive // Channel: [[meta], archive] 8 | 9 | main: 10 | versions = Channel.empty() 11 | 12 | archive_to_extract = archive.branch { _meta, archive_ -> 13 | tar: archive_.toString().endsWith('.tar.gz') 14 | gz: archive_.toString().endsWith('.gz') 15 | zip: archive_.toString().endsWith('.zip') 16 | non_assigned: true 17 | } 18 | 19 | // This is a confidence check 20 | not_extracted = archive_to_extract.non_assigned 21 | not_extracted.view { _meta, archive_ -> log.warn("Archive not in an expected format: " + archive_) } 22 | 23 | // Extract any archive with a recognized extension 24 | GUNZIP(archive_to_extract.gz) 25 | UNTAR(archive_to_extract.tar) 26 | UNZIP(archive_to_extract.zip) 27 | 28 | extracted = Channel 29 | .empty() 30 | .mix( 31 | GUNZIP.out.gunzip, 32 | UNTAR.out.untar, 33 | UNZIP.out.unzipped_archive, 34 | ) 35 | 36 | versions = versions.mix(GUNZIP.out.versions) 37 | versions = versions.mix(UNTAR.out.versions) 38 | versions = versions.mix(UNZIP.out.versions) 39 | 40 | emit: 41 | extracted // channel: [ meta, extracted_archive ] 42 | not_extracted // channel: [ meta, not_extracted_archive ] 43 | versions // channel: [ versions.yml ] 44 | } 45 | -------------------------------------------------------------------------------- /subworkflows/nf-core/archive_extract/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "archive_extract" 3 | description: | 4 | Extract archive(s) from any format 5 | Currently supported format are .gz, .tar.gz, .zip 6 | keywords: 7 | - archive 8 | - gzip 9 | - tar 10 | - zip 11 | components: 12 | - gunzip 13 | - untar 14 | - unzip 15 | input: 16 | - archive: 17 | description: Channel with archive to extract 18 | structure: 19 | - meta: 20 | type: map 21 | description: Metadata map 22 | - archive: 23 | type: file 24 | description: | 25 | Structure: [path(archive)] 26 | File containing the archive to extract 27 | pattern: "*{.tar.gz, .gz, .zip}" 28 | output: 29 | - extracted: 30 | description: Channel with extracted archive 31 | structure: 32 | - meta: 33 | type: map 34 | description: Metadata map 35 | - "*": 36 | type: file 37 | description: | 38 | Structure: [path] 39 | Folder or file(s) extracted 40 | - not_extracted: 41 | description: Channel with any not extracted (ie not recognized) archive 42 | structure: 43 | - meta: 44 | type: map 45 | description: Metadata map 46 | - "*": 47 | type: file 48 | description: | 49 | Structure: [path] 50 | File(s) not extracted 51 | - versions: 52 | type: file 53 | description: File containing software versions 54 | pattern: "versions.yml" 55 | authors: 56 | - "@maxulysse" 57 | maintainers: 58 | - "@maxulysse" 59 | -------------------------------------------------------------------------------- /subworkflows/nf-core/archive_extract/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test ARCHIVE_EXTRACT" 4 | script "../main.nf" 5 | workflow "ARCHIVE_EXTRACT" 6 | 7 | tag "subworkflows" 8 | tag "subworkflows_nfcore" 9 | tag "subworkflows/archive_extract" 10 | 11 | tag "gunzip" 12 | tag "untar" 13 | tag "unzip" 14 | 15 | test(".gz && .tar.gz && .zip") { 16 | 17 | when { 18 | params { 19 | } 20 | workflow { 21 | """ 22 | input[0] = Channel.of( 23 | [[id:'test_gz'], 24 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) 25 | ], 26 | [[id:'test_tar'], 27 | file(params.modules_testdata_base_path + 'generic/tar/hello.tar.gz', checkIfExists: true) 28 | ], 29 | [[id:'test_zip'], 30 | file(params.modules_testdata_base_path + 'genomics/homo_sapiens/genome/chr21/sequence/genome_strtablefile.zip', checkIfExists: true) 31 | ]) 32 | """ 33 | } 34 | } 35 | 36 | then { 37 | assert workflow.success 38 | assert snapshot(workflow.out).match() 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NEXTFLOW_PIPELINE" 3 | description: Subworkflow with functionality that may be useful for any Nextflow pipeline 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - version 9 | components: [] 10 | input: 11 | - print_version: 12 | type: boolean 13 | description: | 14 | Print the version of the pipeline and exit 15 | - dump_parameters: 16 | type: boolean 17 | description: | 18 | Dump the parameters of the pipeline to a JSON file 19 | - output_directory: 20 | type: directory 21 | description: Path to output dir to write JSON file to. 22 | pattern: "results/" 23 | - check_conda_channel: 24 | type: boolean 25 | description: | 26 | Check if the conda channel priority is correct. 27 | output: 28 | - dummy_emit: 29 | type: boolean 30 | description: | 31 | Dummy emit to make nf-core subworkflows lint happy 32 | authors: 33 | - "@adamrtalbot" 34 | - "@drpatelh" 35 | maintainers: 36 | - "@adamrtalbot" 37 | - "@drpatelh" 38 | - "@maxulysse" 39 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test: -------------------------------------------------------------------------------- 1 | 2 | nextflow_function { 3 | 4 | name "Test Functions" 5 | script "subworkflows/nf-core/utils_nextflow_pipeline/main.nf" 6 | config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" 7 | tag 'subworkflows' 8 | tag 'utils_nextflow_pipeline' 9 | tag 'subworkflows/utils_nextflow_pipeline' 10 | 11 | test("Test Function getWorkflowVersion") { 12 | 13 | function "getWorkflowVersion" 14 | 15 | then { 16 | assertAll( 17 | { assert function.success }, 18 | { assert snapshot(function.result).match() } 19 | ) 20 | } 21 | } 22 | 23 | test("Test Function dumpParametersToJSON") { 24 | 25 | function "dumpParametersToJSON" 26 | 27 | when { 28 | function { 29 | """ 30 | // define inputs of the function here. Example: 31 | input[0] = "$outputDir" 32 | """.stripIndent() 33 | } 34 | } 35 | 36 | then { 37 | assertAll( 38 | { assert function.success } 39 | ) 40 | } 41 | } 42 | 43 | test("Test Function checkCondaChannels") { 44 | 45 | function "checkCondaChannels" 46 | 47 | then { 48 | assertAll( 49 | { assert function.success }, 50 | { assert snapshot(function.result).match() } 51 | ) 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Test Function getWorkflowVersion": { 3 | "content": [ 4 | "v9.9.9" 5 | ], 6 | "meta": { 7 | "nf-test": "0.8.4", 8 | "nextflow": "23.10.1" 9 | }, 10 | "timestamp": "2024-02-28T12:02:05.308243" 11 | }, 12 | "Test Function checkCondaChannels": { 13 | "content": null, 14 | "meta": { 15 | "nf-test": "0.8.4", 16 | "nextflow": "23.10.1" 17 | }, 18 | "timestamp": "2024-02-28T12:02:12.425833" 19 | } 20 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | manifest { 2 | name = 'nextflow_workflow' 3 | author = """nf-core""" 4 | homePage = 'https://127.0.0.1' 5 | description = """Dummy pipeline""" 6 | nextflowVersion = '!>=23.04.0' 7 | version = '9.9.9' 8 | doi = 'https://doi.org/10.5281/zenodo.5070524' 9 | } 10 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NFCORE_PIPELINE" 3 | description: Subworkflow with utility functions specific to the nf-core pipeline template 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - version 9 | components: [] 10 | input: 11 | - nextflow_cli_args: 12 | type: list 13 | description: | 14 | Nextflow CLI positional arguments 15 | output: 16 | - success: 17 | type: boolean 18 | description: | 19 | Dummy output to indicate success 20 | authors: 21 | - "@adamrtalbot" 22 | maintainers: 23 | - "@adamrtalbot" 24 | - "@maxulysse" 25 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow UTILS_NFCORE_PIPELINE" 4 | script "../main.nf" 5 | config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" 6 | workflow "UTILS_NFCORE_PIPELINE" 7 | tag "subworkflows" 8 | tag "subworkflows_nfcore" 9 | tag "utils_nfcore_pipeline" 10 | tag "subworkflows/utils_nfcore_pipeline" 11 | 12 | test("Should run without failures") { 13 | 14 | when { 15 | workflow { 16 | """ 17 | input[0] = [] 18 | """ 19 | } 20 | } 21 | 22 | then { 23 | assertAll( 24 | { assert workflow.success }, 25 | { assert snapshot(workflow.out).match() } 26 | ) 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | true 7 | ], 8 | "valid_config": [ 9 | true 10 | ] 11 | } 12 | ], 13 | "meta": { 14 | "nf-test": "0.8.4", 15 | "nextflow": "23.10.1" 16 | }, 17 | "timestamp": "2024-02-28T12:03:25.726491" 18 | } 19 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | manifest { 2 | name = 'nextflow_workflow' 3 | author = """nf-core""" 4 | homePage = 'https://127.0.0.1' 5 | description = """Dummy pipeline""" 6 | nextflowVersion = '!>=23.04.0' 7 | version = '9.9.9' 8 | doi = 'https://doi.org/10.5281/zenodo.5070524' 9 | } 10 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfschema_plugin/main.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Subworkflow that uses the nf-schema plugin to validate parameters and render the parameter summary 3 | // 4 | 5 | include { paramsSummaryLog } from 'plugin/nf-schema' 6 | include { validateParameters } from 'plugin/nf-schema' 7 | 8 | workflow UTILS_NFSCHEMA_PLUGIN { 9 | 10 | take: 11 | input_workflow // workflow: the workflow object used by nf-schema to get metadata from the workflow 12 | validate_params // boolean: validate the parameters 13 | parameters_schema // string: path to the parameters JSON schema. 14 | // this has to be the same as the schema given to `validation.parametersSchema` 15 | // when this input is empty it will automatically use the configured schema or 16 | // "${projectDir}/nextflow_schema.json" as default. This input should not be empty 17 | // for meta pipelines 18 | 19 | main: 20 | 21 | // 22 | // Print parameter summary to stdout. This will display the parameters 23 | // that differ from the default given in the JSON schema 24 | // 25 | if(parameters_schema) { 26 | log.info paramsSummaryLog(input_workflow, parameters_schema:parameters_schema) 27 | } else { 28 | log.info paramsSummaryLog(input_workflow) 29 | } 30 | 31 | // 32 | // Validate the parameters using nextflow_schema.json or the schema 33 | // given via the validation.parametersSchema configuration option 34 | // 35 | if(validate_params) { 36 | if(parameters_schema) { 37 | validateParameters(parameters_schema:parameters_schema) 38 | } else { 39 | validateParameters() 40 | } 41 | } 42 | 43 | emit: 44 | dummy_emit = true 45 | } 46 | 47 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfschema_plugin/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "utils_nfschema_plugin" 3 | description: Run nf-schema to validate parameters and create a summary of changed parameters 4 | keywords: 5 | - validation 6 | - JSON schema 7 | - plugin 8 | - parameters 9 | - summary 10 | components: [] 11 | input: 12 | - input_workflow: 13 | type: object 14 | description: | 15 | The workflow object of the used pipeline. 16 | This object contains meta data used to create the params summary log 17 | - validate_params: 18 | type: boolean 19 | description: Validate the parameters and error if invalid. 20 | - parameters_schema: 21 | type: string 22 | description: | 23 | Path to the parameters JSON schema. 24 | This has to be the same as the schema given to the `validation.parametersSchema` config 25 | option. When this input is empty it will automatically use the configured schema or 26 | "${projectDir}/nextflow_schema.json" as default. The schema should not be given in this way 27 | for meta pipelines. 28 | output: 29 | - dummy_emit: 30 | type: boolean 31 | description: Dummy emit to make nf-core subworkflows lint happy 32 | authors: 33 | - "@nvnieuwk" 34 | maintainers: 35 | - "@nvnieuwk" 36 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | plugins { 2 | id "nf-schema@2.1.0" 3 | } 4 | 5 | validation { 6 | parametersSchema = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" 7 | monochromeLogs = true 8 | } -------------------------------------------------------------------------------- /subworkflows/nf-side/prepare_genome_dnaseq/nextflow.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Config file for defining DSL2 per module options 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Available keys to override module options: 6 | ext.args = Additional arguments appended to command in module. 7 | ext.args2 = Second set of arguments appended to command in module (multi-tool modules). 8 | ext.args3 = Third set of arguments appended to command in module (multi-tool modules). 9 | ext.prefix = File name prefix for output files. 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | withName: 'BUILD_INTERVALS' { 15 | ext.when = { meta.run_intervals } 16 | ext.args = { "-v FS='\t' -v OFS='\t' '{ print \$1, \"0\", \$2 }'" } 17 | ext.suffix = { "bed" } 18 | } 19 | withName: 'BWAMEM1_INDEX' { 20 | ext.when = { meta.run_bwamem1 } 21 | } 22 | withName: 'BWAMEM2_INDEX' { 23 | ext.when = { meta.run_bwamem2 } 24 | } 25 | withName: 'DRAGMAP_HASHTABLE' { 26 | ext.when = { meta.run_dragmap } 27 | } 28 | withName: 'GATK4_CREATESEQUENCEDICTIONARY' { 29 | ext.when = { meta.run_createsequencedictionary } 30 | } 31 | withName: 'MSISENSORPRO_SCAN' { 32 | ext.when = { meta.run_msisensorpro } 33 | } 34 | withName: 'SAMTOOLS_FAIDX' { 35 | ext.when = { meta.run_faidx } 36 | } 37 | withName: 'TABIX_BGZIPTABIX' { 38 | ext.when = { meta.run_bgziptabix } 39 | } 40 | withName: 'TABIX_TABIX' { 41 | ext.when = { meta.run_tabix } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /subworkflows/nf-side/prepare_genome_dnaseq/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName:BWAMEM2_INDEX { 3 | memory = { 6.GB } 4 | } 5 | withName: 'BUILD_INTERVALS' { 6 | ext.args = { "-v FS='\t' -v OFS='\t' '{ print \$1, \"0\", \$2 }'" } 7 | ext.suffix = { "bed" } 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /subworkflows/nf-side/prepare_genome_rnaseq/nextflow.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Config file for defining DSL2 per module options 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Available keys to override module options: 6 | ext.args = Additional arguments appended to command in module. 7 | ext.args2 = Second set of arguments appended to command in module (multi-tool modules). 8 | ext.args3 = Third set of arguments appended to command in module (multi-tool modules). 9 | ext.prefix = File name prefix for output files. 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | withName: 'BOWTIE1_BUILD' { 15 | ext.when = { meta.run_bowtie1 } 16 | } 17 | withName: 'BOWTIE2_BUILD' { 18 | ext.when = { meta.run_bowtie2 } 19 | } 20 | withName: 'GFFREAD' { 21 | ext.args = '--keep-exon-attrs -F -T' 22 | ext.when = { meta.run_gffread } 23 | } 24 | withName: 'HISAT2_BUILD' { 25 | ext.when = { meta.run_hisat2 } 26 | } 27 | withName: 'HISAT2_EXTRACTSPLICESITES' { 28 | ext.when = { meta.run_hisat2 } 29 | } 30 | withName: 'KALLISTO_INDEX' { 31 | ext.args = { params.kallisto_make_unique ? '--make-unique' : '' } 32 | ext.when = { meta.run_kallisto } 33 | } 34 | withName: 'MAKE_TRANSCRIPTS_FASTA' { 35 | ext.when = { meta.run_rsem_make_transcript_fasta } 36 | } 37 | withName: 'RSEM_PREPAREREFERENCE_GENOME' { 38 | ext.args = '--star' 39 | ext.when = { meta.run_rsem } 40 | } 41 | withName: 'SALMON_INDEX' { 42 | ext.when = { meta.run_salmon } 43 | } 44 | withName: 'SAMTOOLS_FAIDX' { 45 | ext.when = { meta.run_faidx } 46 | } 47 | withName: 'STAR_GENOMEGENERATE' { 48 | ext.when = { meta.run_star } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /subworkflows/nf-side/prepare_genome_rnaseq/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'GFFREAD' { 3 | ext.args = '--keep-exon-attrs -F -T' 4 | } 5 | withName: 'KALLISTO_INDEX' { 6 | ext.args = { params.kallisto_make_unique ? '--make-unique' : '' } 7 | } 8 | withName: 'RSEM_PREPAREREFERENCE_GENOME' { 9 | ext.args = '--star' 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /subworkflows/nf-side/utils_references/README.md: -------------------------------------------------------------------------------- 1 | # Disclaimer 2 | 3 | This `utils_references/` folder contains for now two functions and a schema. 4 | This is really meant for a POC and should not be installed by anyone except @maxulysse. 5 | But that was the easiest way to share functions and a schema between three different pipelines and still showcase the logic. 6 | This might evolve in the future, possibly towards a proper plugin. 7 | 8 | If you do so, please be aware that: 9 | 10 | - @maxulysse has hacked the `main.nf` to test the functions and the schema 11 | - This is really meant to evolve in the future and can be deleted at any moment without prior notice. 12 | 13 | That being said, if you still want to use it or want to know more about it, please check the `#references` channel on the nf-core slack. 14 | -------------------------------------------------------------------------------- /subworkflows/nf-side/utils_references/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "utils_references" 3 | description: Functionality for dealing with references that may be useful for any Nextflow pipeline 4 | keywords: 5 | - utility 6 | - pipeline 7 | - references 8 | components: [] 9 | input: [] 10 | output: [] 11 | authors: 12 | - "@maxulysse" 13 | maintainers: 14 | - "@maxulysse" 15 | -------------------------------------------------------------------------------- /subworkflows/nf-side/utils_references/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow UTILS_REFERENCES" 4 | script "../main.nf" 5 | workflow "UTILS_REFERENCES" 6 | config './nextflow.config' 7 | 8 | test("references_file with params - references_value without params + replace basepath with s3") { 9 | 10 | when { 11 | params { 12 | igenomes_base = 's3://ngi-igenomes/igenomes/' 13 | } 14 | 15 | workflow { 16 | """ 17 | input[0] = 'https://raw.githubusercontent.com/nf-core/references-assets/main/genomes/Homo_sapiens/test/GRCh38_chr22.yml' 18 | input[1] = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/sarscov2/genome/genome.fasta' 19 | input[2] = null 20 | input[3] = 'fasta' 21 | input[4] = 'species' 22 | input[5] = 's3://nf-core-references/test_data/' 23 | input[6] = ['\${params.igenomes_base}', 's3://ngi-igenomes/igenomes/'] 24 | """ 25 | } 26 | } 27 | 28 | then { 29 | assert workflow.success 30 | assert snapshot(workflow.out).match() 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /subworkflows/nf-side/utils_references/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "references_file with params - references_value without params + replace basepath with s3": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "GRCh38_chr22" 9 | }, 10 | "/nf-core/test-datasets/modules/data/genomics/sarscov2/genome/genome.fasta" 11 | ] 12 | ], 13 | "1": [ 14 | [ 15 | { 16 | "id": "GRCh38_chr22" 17 | }, 18 | "Homo_sapiens" 19 | ] 20 | ], 21 | "references_file": [ 22 | [ 23 | { 24 | "id": "GRCh38_chr22" 25 | }, 26 | "/nf-core/test-datasets/modules/data/genomics/sarscov2/genome/genome.fasta" 27 | ] 28 | ], 29 | "references_value": [ 30 | [ 31 | { 32 | "id": "GRCh38_chr22" 33 | }, 34 | "Homo_sapiens" 35 | ] 36 | ] 37 | } 38 | ], 39 | "meta": { 40 | "nf-test": "0.9.2", 41 | "nextflow": "25.02.1" 42 | }, 43 | "timestamp": "2025-03-28T11:16:31.722406802" 44 | } 45 | } -------------------------------------------------------------------------------- /subworkflows/nf-side/utils_references/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'nf-schema@2.4.1' 3 | } 4 | -------------------------------------------------------------------------------- /tests/.nftignore: -------------------------------------------------------------------------------- 1 | **/RSEMIndex/**/Log.out 2 | **/STARIndex/**/Log.out 3 | **/SalmonIndex/**/ctable.bin 4 | **/SalmonIndex/**/pos.bin 5 | **/SalmonIndex/**/pre_indexing.log 6 | **/SalmonIndex/**/ref_indexing.log 7 | **/SalmonIndex/**/seq.bin 8 | **/dragmap/*/hash_table.cfg 9 | **/dragmap/*/hash_table.cfg.bin 10 | **/dragmap/*/hash_table_stats.txt 11 | **/kallisto 12 | .DS_Store 13 | index.json 14 | multiqc/multiqc_data/multiqc.log 15 | multiqc/multiqc_data/multiqc_data.json 16 | multiqc/multiqc_data/multiqc_general_stats.txt 17 | multiqc/multiqc_data/multiqc_software_versions.txt 18 | multiqc/multiqc_data/multiqc_sources.txt 19 | multiqc/multiqc_plots/{svg,pdf,png}/*.{svg,pdf,png} 20 | multiqc/multiqc_report.html 21 | pipeline_info/*.{html,json,txt,yml} 22 | -------------------------------------------------------------------------------- /tests/createsequencedictionary.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_pipeline { 2 | 3 | name "Build samtools references" 4 | script "../main.nf" 5 | tag "pipeline" 6 | 7 | test("-profile test --tools createsequencedictionary") { 8 | 9 | options "-output-dir $outputDir" 10 | 11 | when { 12 | params { 13 | outdir = "$outputDir" 14 | tools = 'createsequencedictionary' 15 | } 16 | } 17 | 18 | then { 19 | // stable_name: All files + folders in ${params.outdir}/ with a stable name 20 | def stable_name = getAllFilesFromDir(params.outdir, relative: true, includeDir: true, ignore: ['pipeline_info/*.{html,json,txt}']) 21 | // stable_path: All files in ${params.outdir}/ with stable content 22 | def stable_path = getAllFilesFromDir(params.outdir, ignoreFile: 'tests/.nftignore') 23 | assert workflow.success 24 | assertAll( 25 | { assert snapshot( 26 | // Number of successful tasks 27 | workflow.trace.succeeded().size(), 28 | // pipeline versions.yml file for multiqc from which Nextflow version is removed because we tests pipelines on multiple Nextflow versions 29 | removeNextflowVersion("$outputDir/pipeline_info/nf_core_references_software_mqc_versions.yml"), 30 | // All stable path name, with a relative path 31 | stable_name, 32 | // All files with stable contents 33 | stable_path 34 | ).match() } 35 | ) 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /tests/createsequencedictionary.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "-profile test --tools createsequencedictionary": { 3 | "content": [ 4 | 2, 5 | { 6 | "GATK4_CREATESEQUENCEDICTIONARY": { 7 | "gatk4": "4.6.1.0" 8 | }, 9 | "Workflow": { 10 | "nf-core/references": "v1.0.0dev" 11 | } 12 | }, 13 | [ 14 | "Homo_sapiens", 15 | "Homo_sapiens/nf-core", 16 | "Homo_sapiens/nf-core/GRCh38_chr21", 17 | "Homo_sapiens/nf-core/GRCh38_chr21/Sequence", 18 | "Homo_sapiens/nf-core/GRCh38_chr21/Sequence/WholeGenomeFasta", 19 | "Homo_sapiens/nf-core/GRCh38_chr21/Sequence/WholeGenomeFasta/GRCh38_chr21.dict", 20 | "index.json", 21 | "multiqc", 22 | "multiqc/multiqc_data", 23 | "multiqc/multiqc_data/multiqc.log", 24 | "multiqc/multiqc_data/multiqc_citations.txt", 25 | "multiqc/multiqc_data/multiqc_data.json", 26 | "multiqc/multiqc_data/multiqc_software_versions.txt", 27 | "multiqc/multiqc_data/multiqc_sources.txt", 28 | "multiqc/multiqc_report.html", 29 | "pipeline_info", 30 | "pipeline_info/nf_core_references_software_mqc_versions.yml" 31 | ], 32 | [ 33 | "GRCh38_chr21.dict:md5,40482944b1a51af140a397e2544b6ed4", 34 | "multiqc_citations.txt:md5,4c806e63a283ec1b7e78cdae3a923d4f" 35 | ] 36 | ], 37 | "meta": { 38 | "nf-test": "0.9.2", 39 | "nextflow": "24.10.5" 40 | }, 41 | "timestamp": "2025-04-20T10:37:08.391506996" 42 | } 43 | } -------------------------------------------------------------------------------- /tests/default.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_pipeline { 2 | 3 | name "Build rnaseq references" 4 | script "../main.nf" 5 | tag "pipeline" 6 | 7 | test("-profile test --tools bowtie1,bowtie2,faidx,gffread,sizes,star") { 8 | 9 | options "-output-dir $outputDir" 10 | 11 | when { 12 | params { 13 | outdir = "$outputDir" 14 | tools = 'bowtie1,bowtie2,faidx,gffread,sizes,star' 15 | } 16 | } 17 | 18 | then { 19 | // stable_name: All files + folders in ${params.outdir}/ with a stable name 20 | def stable_name = getAllFilesFromDir(params.outdir, relative: true, includeDir: true, ignore: ['pipeline_info/*.{html,json,txt}']) 21 | // stable_path: All files in ${params.outdir}/ with stable content 22 | def stable_path = getAllFilesFromDir(params.outdir, ignoreFile: 'tests/.nftignore') 23 | assert workflow.success 24 | assertAll( 25 | { assert snapshot( 26 | // Number of successful tasks 27 | workflow.trace.succeeded().size(), 28 | // pipeline versions.yml file for multiqc from which Nextflow version is removed because we tests pipelines on multiple Nextflow versions 29 | removeNextflowVersion("$outputDir/pipeline_info/nf_core_references_software_mqc_versions.yml"), 30 | // All stable path name, with a relative path 31 | stable_name, 32 | // All files with stable contents 33 | stable_path 34 | ).match() } 35 | ) 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /tests/nextflow.config: -------------------------------------------------------------------------------- 1 | /* 2 | ======================================================================================== 3 | Nextflow config file for running nf-test tests 4 | ======================================================================================== 5 | */ 6 | 7 | // Specify any additional parameters here 8 | // Or any resources requirements 9 | 10 | // Should resolve issue with accessing s3 from the runners 11 | aws.client.anonymous = true 12 | -------------------------------------------------------------------------------- /tests/wbcel235.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_pipeline { 2 | 3 | name "Build WBCEL235 references" 4 | script "../main.nf" 5 | tag "pipeline" 6 | 7 | test("-profile test --tools bwamem1,createsequencedictionary,faidx,intervals,rsem,star --input Caenorhabditis_elegans/NCBI/WBcel235.yml") { 8 | 9 | options "-output-dir $outputDir" 10 | 11 | when { 12 | params { 13 | input = 'https://raw.githubusercontent.com/nf-core/references-datasheets/latest/genomes_source/Caenorhabditis_elegans/NCBI/WBcel235.yml' 14 | outdir = "$outputDir" 15 | tools = 'bwamem1,createsequencedictionary,faidx,intervals,rsem,star' 16 | } 17 | } 18 | 19 | then { 20 | // stable_name: All files + folders in ${params.outdir}/ with a stable name 21 | def stable_name = getAllFilesFromDir(params.outdir, relative: true, includeDir: true, ignore: ['pipeline_info/*.{html,json,txt}']) 22 | // stable_path: All files in ${params.outdir}/ with stable content 23 | def stable_path = getAllFilesFromDir(params.outdir, ignoreFile: 'tests/.nftignore') 24 | assert workflow.success 25 | assertAll( 26 | { assert snapshot( 27 | // Number of successful tasks 28 | workflow.trace.succeeded().size(), 29 | // pipeline versions.yml file for multiqc from which Nextflow version is removed because we tests pipelines on multiple Nextflow versions 30 | removeNextflowVersion("$outputDir/pipeline_info/nf_core_references_software_mqc_versions.yml"), 31 | // All stable path name, with a relative path 32 | stable_name, 33 | // All files with stable contents 34 | stable_path 35 | ).match() } 36 | ) 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /tower.yml: -------------------------------------------------------------------------------- 1 | reports: 2 | multiqc_report.html: 3 | display: "MultiQC HTML report" 4 | samplesheet.csv: 5 | display: "Auto-created samplesheet with collated metadata and FASTQ paths" 6 | --------------------------------------------------------------------------------