├── .devcontainer └── devcontainer.json ├── .gitattributes ├── .github ├── .dockstore.yml ├── CODEOWNERS ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ └── feature_request.yml ├── PULL_REQUEST_TEMPLATE.md ├── actions │ ├── get-shards │ │ └── action.yml │ └── nf-test │ │ └── action.yml └── workflows │ ├── awsfulltest.yml │ ├── awstest.yml │ ├── branch.yml │ ├── clean-up.yml │ ├── download_pipeline.yml │ ├── fix_linting.yml │ ├── linting.yml │ ├── linting_comment.yml │ ├── nf-test.yml │ ├── release-announcements.yml │ └── template-version-comment.yml ├── .gitignore ├── .gitpod.yml ├── .nf-core.yml ├── .pre-commit-config.yaml ├── .prettierignore ├── .prettierrc.yml ├── CHANGELOG.md ├── CITATIONS.md ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── assets ├── adaptivecard.json ├── email_template.html ├── email_template.txt ├── methods_description_template.yml ├── multiqc_config.yml ├── nf-core-funcscan_logo.png ├── nf-core-funcscan_logo_light.png ├── samplesheet.csv ├── schema_input.json ├── sendmail_template.txt └── slackreport.json ├── bin ├── ampcombi_download.py ├── comBGC.py └── merge_taxonomy.py ├── conf ├── base.config ├── modules.config ├── test.config ├── test_bakta.config ├── test_bgc_bakta.config ├── test_bgc_prokka.config ├── test_bgc_pyrodigal.config ├── test_full.config ├── test_minimal.config ├── test_preannotated.config ├── test_preannotated_bgc.config ├── test_prokka.config ├── test_taxonomy_bakta.config ├── test_taxonomy_prokka.config └── test_taxonomy_pyrodigal.config ├── docs ├── README.md ├── images │ ├── funcscan_icon.png │ ├── funcscan_icon.svg │ ├── funcscan_metro_workflow.png │ ├── funcscan_metro_workflow.svg │ ├── funcscan_metro_workflow_dark.png │ ├── funcscan_metro_workflow_dark.svg │ ├── funcscan_metro_workflow_vertical.png │ ├── funcscan_metro_workflow_vertical.svg │ ├── nf-core-funcscan_logo_dark.png │ ├── nf-core-funcscan_logo_flat_dark.png │ ├── nf-core-funcscan_logo_flat_dark.svg │ ├── nf-core-funcscan_logo_flat_light.png │ ├── nf-core-funcscan_logo_flat_light.svg │ └── nf-core-funcscan_logo_light.png ├── output.md └── usage.md ├── main.nf ├── modules.json ├── modules ├── local │ ├── amp_database_download.nf │ ├── combgc.nf │ ├── interproscan_download.nf │ ├── merge_taxonomy_ampcombi.nf │ ├── merge_taxonomy_combgc.nf │ └── merge_taxonomy_hamronization.nf └── nf-core │ ├── abricate │ └── run │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── ampcombi2 │ ├── cluster │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── tags.yml │ ├── complete │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── tags.yml │ └── parsetables │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── ampir │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── amplify │ └── predict │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── amrfinderplus │ ├── run │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ └── update │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── antismash │ ├── antismash │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── nextflow.config │ └── antismashdownloaddatabases │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── nextflow.config │ ├── argnorm │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── argnorm_hamronized.config │ │ ├── argnorm_raw.config │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── bakta │ ├── bakta │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ ├── nextflow.config │ │ │ └── tags.yml │ └── baktadbdownload │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── deeparg │ ├── downloaddata │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── tags.yml │ └── predict │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── deepbgc │ ├── download │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── tags.yml │ └── pipeline │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── fargene │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── gecco │ └── run │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── gunzip │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── hamronization │ ├── abricate │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── amrfinderplus │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── deeparg │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── fargene │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── rgi │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ └── summarize │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── hmmer │ └── hmmsearch │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── interproscan │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── macrel │ └── contigs │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── mmseqs │ ├── createdb │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── createtsv │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── cluster.nextflow.config │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── taxonomy.nextflow.config │ ├── databases │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ └── taxonomy │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── multiqc │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── nextflow.config │ ├── prodigal │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── prokka │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── pyrodigal │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ ├── rgi │ ├── cardannotation │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ └── main │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── seqkit │ └── seq │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ ├── tabix │ └── bgzip │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ └── tests │ │ ├── bgzip_compress.config │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ ├── tags.yml │ │ └── vcf_none.config │ └── untar │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ ├── main.nf.test │ ├── main.nf.test.snap │ └── tags.yml ├── nextflow.config ├── nextflow_schema.json ├── nf-test.config ├── ro-crate-metadata.json ├── subworkflows ├── local │ ├── amp.nf │ ├── annotation.nf │ ├── arg.nf │ ├── bgc.nf │ ├── protein_annotation.nf │ ├── taxa_class.nf │ └── utils_nfcore_funcscan_pipeline │ │ └── main.nf └── nf-core │ ├── utils_nextflow_pipeline │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.function.nf.test │ │ ├── main.function.nf.test.snap │ │ ├── main.workflow.nf.test │ │ ├── nextflow.config │ │ └── tags.yml │ ├── utils_nfcore_pipeline │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.function.nf.test │ │ ├── main.function.nf.test.snap │ │ ├── main.workflow.nf.test │ │ ├── main.workflow.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ └── utils_nfschema_plugin │ ├── main.nf │ ├── meta.yml │ └── tests │ ├── main.nf.test │ ├── nextflow.config │ └── nextflow_schema.json ├── tests ├── .nftignore ├── default.nf.test ├── default.nf.test.snap ├── nextflow.config ├── test_bakta.nf.test ├── test_bakta.nf.test.snap ├── test_bgc_bakta.nf.test ├── test_bgc_bakta.nf.test.snap ├── test_bgc_prokka.nf.test ├── test_bgc_prokka.nf.test.snap ├── test_bgc_pyrodigal.nf.test ├── test_bgc_pyrodigal.nf.test.snap ├── test_full.nf.test.manualtesting ├── test_full.nf.test.snap.manualtesting ├── test_minimal.nf.test ├── test_preannotated.nf.test ├── test_preannotated.nf.test.snap ├── test_preannotated_bgc.nf.test ├── test_preannotated_bgc.nf.test.snap ├── test_prokka.nf.test ├── test_prokka.nf.test.snap ├── test_taxonomy_bakta.nf.test ├── test_taxonomy_bakta.nf.test.snap ├── test_taxonomy_prokka.nf.test ├── test_taxonomy_prokka.nf.test.snap ├── test_taxonomy_pyrodigal.nf.test └── test_taxonomy_pyrodigal.nf.test.snap ├── tower.yml └── workflows └── funcscan.nf /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nfcore", 3 | "image": "nfcore/gitpod:latest", 4 | "remoteUser": "gitpod", 5 | "runArgs": ["--privileged"], 6 | 7 | // Configure tool-specific properties. 8 | "customizations": { 9 | // Configure properties specific to VS Code. 10 | "vscode": { 11 | // Set *default* container specific settings.json values on container create. 12 | "settings": { 13 | "python.defaultInterpreterPath": "/opt/conda/bin/python" 14 | }, 15 | 16 | // Add the IDs of extensions you want installed when the container is created. 17 | "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"] 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.config linguist-language=nextflow 2 | *.nf.test linguist-language=nextflow 3 | modules/nf-core/** linguist-generated 4 | subworkflows/nf-core/** linguist-generated 5 | -------------------------------------------------------------------------------- /.github/.dockstore.yml: -------------------------------------------------------------------------------- 1 | # Dockstore config version, not pipeline version 2 | version: 1.2 3 | workflows: 4 | - subclass: nfl 5 | primaryDescriptorPath: /nextflow.config 6 | publish: True 7 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @jfy133 @jasmezz @Darcy220606 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug report 2 | description: Report something that is broken or incorrect 3 | labels: bug 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Before you post this issue, please check the documentation: 9 | 10 | - [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) 11 | - [nf-core/funcscan pipeline documentation](https://nf-co.re/funcscan/usage) 12 | - type: textarea 13 | id: description 14 | attributes: 15 | label: Description of the bug 16 | description: A clear and concise description of what the bug is. 17 | validations: 18 | required: true 19 | 20 | - type: textarea 21 | id: command_used 22 | attributes: 23 | label: Command used and terminal output 24 | description: Steps to reproduce the behaviour. Please paste the command you used to launch the pipeline and the output from your terminal. 25 | render: console 26 | placeholder: | 27 | $ nextflow run ... 28 | 29 | Some output where something broke 30 | 31 | - type: textarea 32 | id: files 33 | attributes: 34 | label: Relevant files 35 | description: | 36 | Please drag and drop the relevant files here. Create a `.zip` archive if the extension is not allowed. 37 | Your verbose log file `.nextflow.log` is often useful _(this is a hidden file in the directory where you launched the pipeline)_ as well as custom Nextflow configuration files. 38 | 39 | - type: textarea 40 | id: system 41 | attributes: 42 | label: System information 43 | description: | 44 | * Nextflow version _(eg. 23.04.0)_ 45 | * Hardware _(eg. HPC, Desktop, Cloud)_ 46 | * Executor _(eg. slurm, local, awsbatch)_ 47 | * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter, Charliecloud, or Apptainer)_ 48 | * OS _(eg. CentOS Linux, macOS, Linux Mint)_ 49 | * Version of nf-core/funcscan _(eg. 1.1, 1.5, 1.8.2)_ 50 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | contact_links: 2 | - name: Join nf-core 3 | url: https://nf-co.re/join 4 | about: Please join the nf-core community here 5 | - name: "Slack #funcscan channel" 6 | url: https://nfcore.slack.com/channels/funcscan 7 | about: Discussion about the nf-core/funcscan pipeline 8 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature request 2 | description: Suggest an idea for the nf-core/funcscan pipeline 3 | labels: enhancement 4 | body: 5 | - type: textarea 6 | id: description 7 | attributes: 8 | label: Description of feature 9 | description: Please describe your suggestion for a new feature. It might help to describe a problem or use case, plus any alternatives that you have considered. 10 | validations: 11 | required: true 12 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 13 | 14 | ## PR checklist 15 | 16 | - [ ] This comment contains a description of changes (with reason). 17 | - [ ] If you've fixed a bug or added code that should be tested, add tests! 18 | - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/funcscan/tree/master/.github/CONTRIBUTING.md) 19 | - [ ] If necessary, also make a PR on the nf-core/funcscan _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. 20 | - [ ] Make sure your code lints (`nf-core pipelines lint`). 21 | - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). 22 | - [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir `). 23 | - [ ] Usage Documentation in `docs/usage.md` is updated. 24 | - [ ] Output Documentation in `docs/output.md` is updated. 25 | - [ ] `CHANGELOG.md` is updated. 26 | - [ ] `README.md` is updated (including new tool citations and authors/contributors). 27 | -------------------------------------------------------------------------------- /.github/workflows/awsfulltest.yml: -------------------------------------------------------------------------------- 1 | name: nf-core AWS full size tests 2 | # This workflow is triggered on PRs opened against the main/master branch. 3 | # It can be additionally triggered manually with GitHub actions workflow dispatch button. 4 | # It runs the -profile 'test_full' on AWS batch 5 | 6 | on: 7 | workflow_dispatch: 8 | pull_request_review: 9 | types: [submitted] 10 | release: 11 | types: [published] 12 | 13 | jobs: 14 | run-platform: 15 | name: Run AWS full tests 16 | # run only if the PR is approved by at least 2 reviewers and against the master/main branch or manually triggered 17 | if: github.repository == 'nf-core/funcscan' && github.event.review.state == 'approved' && (github.event.pull_request.base.ref == 'master' || github.event.pull_request.base.ref == 'main') || github.event_name == 'workflow_dispatch' || github.event_name == 'release' 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: Set revision variable 21 | id: revision 22 | run: | 23 | echo "revision=${{ (github.event_name == 'workflow_dispatch' || github.event_name == 'release') && github.sha || 'dev' }}" >> "$GITHUB_OUTPUT" 24 | 25 | - name: Launch workflow via Seqera Platform 26 | uses: seqeralabs/action-tower-launch@v2 27 | with: 28 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 29 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 30 | compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} 31 | revision: ${{ steps.revision.outputs.revision }} 32 | workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/funcscan/work-${{ steps.revision.outputs.revision }} 33 | parameters: | 34 | { 35 | "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", 36 | "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/funcscan/results-${{ steps.revision.outputs.revision }}" 37 | } 38 | profiles: test_full 39 | 40 | - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 41 | with: 42 | name: Seqera Platform debug log file 43 | path: | 44 | seqera_platform_action_*.log 45 | seqera_platform_action_*.json 46 | -------------------------------------------------------------------------------- /.github/workflows/awstest.yml: -------------------------------------------------------------------------------- 1 | name: nf-core AWS test 2 | # This workflow can be triggered manually with the GitHub actions workflow dispatch button. 3 | # It runs the -profile 'test' on AWS batch 4 | 5 | on: 6 | workflow_dispatch: 7 | jobs: 8 | run-platform: 9 | name: Run AWS tests 10 | if: github.repository == 'nf-core/funcscan' 11 | runs-on: ubuntu-latest 12 | steps: 13 | # Launch workflow using Seqera Platform CLI tool action 14 | - name: Launch workflow via Seqera Platform 15 | uses: seqeralabs/action-tower-launch@v2 16 | with: 17 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 18 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 19 | compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} 20 | revision: ${{ github.sha }} 21 | workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/funcscan/work-${{ github.sha }} 22 | parameters: | 23 | { 24 | "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/funcscan/results-test-${{ github.sha }}" 25 | } 26 | profiles: test 27 | 28 | - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 29 | with: 30 | name: Seqera Platform debug log file 31 | path: | 32 | seqera_platform_action_*.log 33 | seqera_platform_action_*.json 34 | -------------------------------------------------------------------------------- /.github/workflows/clean-up.yml: -------------------------------------------------------------------------------- 1 | name: "Close user-tagged issues and PRs" 2 | on: 3 | schedule: 4 | - cron: "0 0 * * 0" # Once a week 5 | 6 | jobs: 7 | clean-up: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | issues: write 11 | pull-requests: write 12 | steps: 13 | - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9 14 | with: 15 | stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." 16 | stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." 17 | close-issue-message: "This issue was closed because it has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor and then staled for 20 days with no activity." 18 | days-before-stale: 30 19 | days-before-close: 20 20 | days-before-pr-close: -1 21 | any-of-labels: "awaiting-changes,awaiting-feedback" 22 | exempt-issue-labels: "WIP" 23 | exempt-pr-labels: "WIP" 24 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 25 | -------------------------------------------------------------------------------- /.github/workflows/linting_comment.yml: -------------------------------------------------------------------------------- 1 | name: nf-core linting comment 2 | # This workflow is triggered after the linting action is complete 3 | # It posts an automated comment to the PR, even if the PR is coming from a fork 4 | 5 | on: 6 | workflow_run: 7 | workflows: ["nf-core linting"] 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Download lint results 14 | uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11 15 | with: 16 | workflow: linting.yml 17 | workflow_conclusion: completed 18 | 19 | - name: Get PR number 20 | id: pr_number 21 | run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT 22 | 23 | - name: Post PR comment 24 | uses: marocchino/sticky-pull-request-comment@52423e01640425a022ef5fd42c6fb5f633a02728 # v2 25 | with: 26 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 27 | number: ${{ steps.pr_number.outputs.pr_number }} 28 | path: linting-logs/lint_results.md 29 | -------------------------------------------------------------------------------- /.github/workflows/release-announcements.yml: -------------------------------------------------------------------------------- 1 | name: release-announcements 2 | # Automatic release toot and tweet anouncements 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | 8 | jobs: 9 | toot: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: get topics and convert to hashtags 13 | id: get_topics 14 | run: | 15 | echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" | sed 's/-//g' >> $GITHUB_OUTPUT 16 | 17 | - uses: rzr/fediverse-action@master 18 | with: 19 | access-token: ${{ secrets.MASTODON_ACCESS_TOKEN }} 20 | host: "mstdn.science" # custom host if not "mastodon.social" (default) 21 | # GitHub event payload 22 | # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#release 23 | message: | 24 | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! 25 | 26 | Please see the changelog: ${{ github.event.release.html_url }} 27 | 28 | ${{ steps.get_topics.outputs.topics }} #nfcore #openscience #nextflow #bioinformatics 29 | 30 | bsky-post: 31 | runs-on: ubuntu-latest 32 | steps: 33 | - uses: zentered/bluesky-post-action@6461056ea355ea43b977e149f7bf76aaa572e5e8 # v0.3.0 34 | with: 35 | post: | 36 | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! 37 | 38 | Please see the changelog: ${{ github.event.release.html_url }} 39 | env: 40 | BSKY_IDENTIFIER: ${{ secrets.BSKY_IDENTIFIER }} 41 | BSKY_PASSWORD: ${{ secrets.BSKY_PASSWORD }} 42 | # 43 | -------------------------------------------------------------------------------- /.github/workflows/template-version-comment.yml: -------------------------------------------------------------------------------- 1 | name: nf-core template version comment 2 | # This workflow is triggered on PRs to check if the pipeline template version matches the latest nf-core version. 3 | # It posts a comment to the PR, even if it comes from a fork. 4 | 5 | on: pull_request_target 6 | 7 | jobs: 8 | template_version: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Check out pipeline code 12 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 13 | with: 14 | ref: ${{ github.event.pull_request.head.sha }} 15 | 16 | - name: Read template version from .nf-core.yml 17 | uses: nichmor/minimal-read-yaml@1f7205277e25e156e1f63815781db80a6d490b8f # v0.0.2 18 | id: read_yml 19 | with: 20 | config: ${{ github.workspace }}/.nf-core.yml 21 | 22 | - name: Install nf-core 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install nf-core==${{ steps.read_yml.outputs['nf_core_version'] }} 26 | 27 | - name: Check nf-core outdated 28 | id: nf_core_outdated 29 | run: echo "OUTPUT=$(pip list --outdated | grep nf-core)" >> ${GITHUB_ENV} 30 | 31 | - name: Post nf-core template version comment 32 | uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 33 | if: | 34 | contains(env.OUTPUT, 'nf-core') 35 | with: 36 | repo-token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }} 37 | allow-repeats: false 38 | message: | 39 | > [!WARNING] 40 | > Newer version of the nf-core template is available. 41 | > 42 | > Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}. 43 | > Please update your pipeline to the latest version. 44 | > 45 | > For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync). 46 | # 47 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .nextflow* 2 | work/ 3 | data/ 4 | results/ 5 | .DS_Store 6 | testing/ 7 | testing* 8 | *.pyc 9 | null/ 10 | .nf-test* 11 | -------------------------------------------------------------------------------- /.gitpod.yml: -------------------------------------------------------------------------------- 1 | image: nfcore/gitpod:latest 2 | tasks: 3 | - name: Update Nextflow and setup pre-commit 4 | command: | 5 | pre-commit install --install-hooks 6 | nextflow self-update 7 | 8 | vscode: 9 | extensions: 10 | - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack 11 | -------------------------------------------------------------------------------- /.nf-core.yml: -------------------------------------------------------------------------------- 1 | lint: 2 | files_exist: 3 | - conf/igenomes.config 4 | - conf/igenomes_ignored.config 5 | nf_core_version: 3.3.2 6 | repository_type: pipeline 7 | template: 8 | author: Jasmin Frangenberg, Anan Ibrahim, Louisa Perelo, Moritz E. Beber, 9 | James A. Fellows Yates 10 | description: Pipeline for screening for functional components of assembled 11 | contigs 12 | force: false 13 | is_nfcore: true 14 | name: funcscan 15 | org: nf-core 16 | outdir: . 17 | skip_features: 18 | - igenomes 19 | - fastqc 20 | version: 3.0.0 21 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/mirrors-prettier 3 | rev: "v3.1.0" 4 | hooks: 5 | - id: prettier 6 | additional_dependencies: 7 | - prettier@3.6.2 8 | - repo: https://github.com/pre-commit/pre-commit-hooks 9 | rev: v5.0.0 10 | hooks: 11 | - id: trailing-whitespace 12 | args: [--markdown-linebreak-ext=md] 13 | exclude: | 14 | (?x)^( 15 | .*ro-crate-metadata.json$| 16 | modules/nf-core/.*| 17 | subworkflows/nf-core/.*| 18 | .*\.snap$ 19 | )$ 20 | - id: end-of-file-fixer 21 | exclude: | 22 | (?x)^( 23 | .*ro-crate-metadata.json$| 24 | modules/nf-core/.*| 25 | subworkflows/nf-core/.*| 26 | .*\.snap$ 27 | )$ 28 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | email_template.html 2 | adaptivecard.json 3 | slackreport.json 4 | .nextflow* 5 | work/ 6 | data/ 7 | results/ 8 | .DS_Store 9 | testing/ 10 | testing* 11 | *.pyc 12 | bin/ 13 | ro-crate-metadata.json 14 | tests/ 15 | -------------------------------------------------------------------------------- /.prettierrc.yml: -------------------------------------------------------------------------------- 1 | printWidth: 120 2 | tabWidth: 4 3 | overrides: 4 | - files: "*.{md,yml,yaml,html,css,scss,js,cff}" 5 | options: 6 | tabWidth: 2 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) The nf-core/funcscan team 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /assets/email_template.txt: -------------------------------------------------------------------------------- 1 | ---------------------------------------------------- 2 | ,--./,-. 3 | ___ __ __ __ ___ /,-._.--~\\ 4 | |\\ | |__ __ / ` / \\ |__) |__ } { 5 | | \\| | \\__, \\__/ | \\ |___ \\`-._,-`-, 6 | `._,._,' 7 | nf-core/funcscan ${version} 8 | ---------------------------------------------------- 9 | Run Name: $runName 10 | 11 | <% if (success){ 12 | out << "## nf-core/funcscan execution completed successfully! ##" 13 | } else { 14 | out << """#################################################### 15 | ## nf-core/funcscan execution completed unsuccessfully! ## 16 | #################################################### 17 | The exit status of the task that caused the workflow execution to fail was: $exitStatus. 18 | The full error message was: 19 | 20 | ${errorReport} 21 | """ 22 | } %> 23 | 24 | 25 | The workflow was completed at $dateComplete (duration: $duration) 26 | 27 | The command used to launch the workflow was as follows: 28 | 29 | $commandLine 30 | 31 | 32 | 33 | Pipeline Configuration: 34 | ----------------------- 35 | <% out << summary.collect{ k,v -> " - $k: $v" }.join("\n") %> 36 | 37 | -- 38 | nf-core/funcscan 39 | https://github.com/nf-core/funcscan 40 | -------------------------------------------------------------------------------- /assets/multiqc_config.yml: -------------------------------------------------------------------------------- 1 | report_comment: > 2 | This report has been generated by the nf-core/funcscan analysis pipeline. For information about how 4 | to interpret these results, please see the documentation. 6 | report_section_order: 7 | "nf-core-funcscan-methods-description": 8 | order: -1000 9 | software_versions: 10 | order: -1001 11 | "nf-core-funcscan-summary": 12 | order: -1002 13 | 14 | run_modules: 15 | - prokka 16 | - custom_content 17 | 18 | table_columns_visible: 19 | Prokka: 20 | organism: false 21 | 22 | export_plots: true 23 | 24 | disable_version_detection: true 25 | 26 | custom_logo: "nf-core-funcscan_logo_flat_light.png" 27 | custom_logo_url: https://nf-co.re/funcscan 28 | custom_logo_title: "nf-core/funcscan" 29 | 30 | ## Tool specific configuration 31 | prokka_fn_snames: true 32 | -------------------------------------------------------------------------------- /assets/nf-core-funcscan_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/funcscan/HEAD/assets/nf-core-funcscan_logo.png -------------------------------------------------------------------------------- /assets/nf-core-funcscan_logo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/funcscan/HEAD/assets/nf-core-funcscan_logo_light.png -------------------------------------------------------------------------------- /assets/samplesheet.csv: -------------------------------------------------------------------------------- 1 | sample,fasta,protein,gbk 2 | sample_1,https://raw.githubusercontent.com/nf-core/test-datasets/funcscan/wastewater_metagenome_contigs_1.fasta.gz,https://raw.githubusercontent.com/nf-core/test-datasets/funcscan/wastewater_metagenome_contigs_prokka_1.faa,https://raw.githubusercontent.com/nf-core/test-datasets/funcscan/wastewater_metagenome_contigs_prokka_1.gbk 3 | sample_2,https://raw.githubusercontent.com/nf-core/test-datasets/funcscan/wastewater_metagenome_contigs_2.fasta.gz,https://raw.githubusercontent.com/nf-core/test-datasets/funcscan/wastewater_metagenome_contigs_prokka_2.faa.gz,https://raw.githubusercontent.com/nf-core/test-datasets/funcscan/wastewater_metagenome_contigs_prokka_2.gbk.gz 4 | sample_3,https://raw.githubusercontent.com/nf-core/test-datasets/funcscan/wastewater_metagenome_contigs.fasta 5 | -------------------------------------------------------------------------------- /assets/schema_input.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json-schema.org/draft/2020-12/schema", 3 | "$id": "https://raw.githubusercontent.com/nf-core/funcscan/master/assets/schema_input.json", 4 | "title": "nf-core/funcscan pipeline - params.input schema", 5 | "description": "Schema for the file provided with params.input", 6 | "type": "array", 7 | "items": { 8 | "type": "object", 9 | "properties": { 10 | "sample": { 11 | "type": "string", 12 | "pattern": "^\\S+$", 13 | "errorMessage": "Sample ID must be provided, must be an integer or string and cannot contain spaces", 14 | "meta": ["id"] 15 | }, 16 | "fasta": { 17 | "type": "string", 18 | "format": "file-path", 19 | "exists": true, 20 | "pattern": "^\\S+\\.(fasta|fas|fna|fa)(\\.gz)?$", 21 | "errorMessage": "Fasta file for reads must be provided, cannot contain spaces and must have extension `.fa`, `.fa.gz`, `.fna`, `.fna.gz`, `.fasta`, or `.fasta.gz`" 22 | }, 23 | "protein": { 24 | "type": "string", 25 | "format": "file-path", 26 | "exists": true, 27 | "pattern": "^\\S+\\.(faa|fasta)(\\.gz)?$", 28 | "errorMessage": "Input file for peptide annotations has incorrect file format. File must end in `.fasta`, `.fasta.gz`, `.faa`, or `.faa.gz`" 29 | }, 30 | "gbk": { 31 | "type": "string", 32 | "format": "file-path", 33 | "exists": true, 34 | "pattern": "^\\S+\\.(gbk|gbff)(\\.gz)?$", 35 | "errorMessage": "Input file for feature annotations has incorrect file format. File must end in `.gbk`, `.gbk.gz` or `.gbff`, or `.gbff.gz`" 36 | } 37 | }, 38 | "required": ["sample", "fasta"], 39 | "dependentRequired": { 40 | "protein": ["gbk"], 41 | "gbk": ["protein"] 42 | } 43 | }, 44 | "uniqueItems": true 45 | } 46 | -------------------------------------------------------------------------------- /assets/sendmail_template.txt: -------------------------------------------------------------------------------- 1 | To: $email 2 | Subject: $subject 3 | Mime-Version: 1.0 4 | Content-Type: multipart/related;boundary="nfcoremimeboundary" 5 | 6 | --nfcoremimeboundary 7 | Content-Type: text/html; charset=utf-8 8 | 9 | $email_html 10 | 11 | --nfcoremimeboundary 12 | Content-Type: image/png;name="nf-core-funcscan_logo.png" 13 | Content-Transfer-Encoding: base64 14 | Content-ID: 15 | Content-Disposition: inline; filename="nf-core-funcscan_logo_light.png" 16 | 17 | <% out << new File("$projectDir/assets/nf-core-funcscan_logo_light.png"). 18 | bytes. 19 | encodeBase64(). 20 | toString(). 21 | tokenize( '\n' )*. 22 | toList()*. 23 | collate( 76 )*. 24 | collect { it.join() }. 25 | flatten(). 26 | join( '\n' ) %> 27 | 28 | <% 29 | if (mqcFile){ 30 | def mqcFileObj = new File("$mqcFile") 31 | if (mqcFileObj.length() < mqcMaxSize){ 32 | out << """ 33 | --nfcoremimeboundary 34 | Content-Type: text/html; name=\"multiqc_report\" 35 | Content-Transfer-Encoding: base64 36 | Content-ID: 37 | Content-Disposition: attachment; filename=\"${mqcFileObj.getName()}\" 38 | 39 | ${mqcFileObj. 40 | bytes. 41 | encodeBase64(). 42 | toString(). 43 | tokenize( '\n' )*. 44 | toList()*. 45 | collate( 76 )*. 46 | collect { it.join() }. 47 | flatten(). 48 | join( '\n' )} 49 | """ 50 | }} 51 | %> 52 | 53 | --nfcoremimeboundary-- 54 | -------------------------------------------------------------------------------- /assets/slackreport.json: -------------------------------------------------------------------------------- 1 | { 2 | "attachments": [ 3 | { 4 | "fallback": "Plain-text summary of the attachment.", 5 | "color": "<% if (success) { %>good<% } else { %>danger<%} %>", 6 | "author_name": "nf-core/funcscan ${version} - ${runName}", 7 | "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", 8 | "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", 9 | "fields": [ 10 | { 11 | "title": "Command used to launch the workflow", 12 | "value": "```${commandLine}```", 13 | "short": false 14 | } 15 | <% 16 | if (!success) { %> 17 | , 18 | { 19 | "title": "Full error message", 20 | "value": "```${errorReport}```", 21 | "short": false 22 | }, 23 | { 24 | "title": "Pipeline configuration", 25 | "value": "<% out << summary.collect{ k,v -> k == "hook_url" ? "_${k}_: (_hidden_)" : ( ( v.class.toString().contains('Path') || ( v.class.toString().contains('String') && v.contains('/') ) ) ? "_${k}_: `${v}`" : (v.class.toString().contains('DateTime') ? ("_${k}_: " + v.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM))) : "_${k}_: ${v}") ) }.join(",\n") %>", 26 | "short": false 27 | } 28 | <% } 29 | %> 30 | ], 31 | "footer": "Completed at <% out << dateComplete.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM)) %> (duration: ${duration})" 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /conf/test.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/funcscan -profile test, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | resourceLimits = [ 15 | cpus: 4, 16 | memory: '15.GB', 17 | time: '1.h' 18 | ] 19 | } 20 | 21 | params { 22 | config_profile_name = 'AMP/ARG Pyrodigal test profile' 23 | config_profile_description = 'Minimal test dataset to check pipeline function' 24 | 25 | // Input data 26 | input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv' 27 | 28 | annotation_tool = 'pyrodigal' 29 | 30 | run_arg_screening = true 31 | arg_fargene_hmmmodel = 'class_a,class_b_1_2' 32 | 33 | run_amp_screening = true 34 | amp_run_hmmsearch = true 35 | amp_hmmsearch_models = params.pipelines_testdata_base_path + 'funcscan/hmms/mybacteriocin.hmm' 36 | } 37 | -------------------------------------------------------------------------------- /conf/test_bakta.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/funcscan -profile test_bakta, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | resourceLimits = [ 15 | cpus: 4, 16 | memory: '15.GB', 17 | time: '1.h', 18 | ] 19 | } 20 | 21 | params { 22 | config_profile_name = 'AMP/ARG Bakta test profile' 23 | config_profile_description = 'Minimal test dataset to check pipeline function' 24 | 25 | // Input data 26 | input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv' 27 | 28 | annotation_tool = 'bakta' 29 | annotation_bakta_db_downloadtype = 'light' 30 | 31 | run_amp_screening = true 32 | amp_run_hmmsearch = true 33 | amp_hmmsearch_models = params.pipelines_testdata_base_path + 'funcscan/hmms/mybacteriocin.hmm' 34 | 35 | run_arg_screening = true 36 | arg_skip_deeparg = true 37 | arg_fargene_hmmmodel = 'class_a,class_b_1_2' 38 | } 39 | -------------------------------------------------------------------------------- /conf/test_bgc_bakta.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/funcscan -profile test_bgc_bakta, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | resourceLimits = [ 15 | cpus: 4, 16 | memory: '15.GB', 17 | time: '1.h', 18 | ] 19 | } 20 | 21 | params { 22 | config_profile_name = 'BGC Bakta test profile' 23 | config_profile_description = 'Minimal test dataset to check BGC workflow function' 24 | 25 | // Input data 26 | input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv' 27 | bgc_antismash_db = params.pipelines_testdata_base_path + 'funcscan/databases/antismash_trimmed_8_0_1.tar.gz' 28 | 29 | annotation_tool = 'bakta' 30 | annotation_bakta_db_downloadtype = "light" 31 | 32 | run_arg_screening = false 33 | run_amp_screening = false 34 | run_bgc_screening = true 35 | 36 | bgc_run_hmmsearch = true 37 | bgc_hmmsearch_models = 'https://raw.githubusercontent.com/antismash/antismash/fd61de057e082fbf071732ac64b8b2e8883de32f/antismash/detection/hmm_detection/data/ToyB.hmm' 38 | } 39 | -------------------------------------------------------------------------------- /conf/test_bgc_prokka.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/funcscan -profile test_bgc_prokka, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | resourceLimits = [ 15 | cpus: 4, 16 | memory: '15.GB', 17 | time: '1.h', 18 | ] 19 | } 20 | 21 | params { 22 | config_profile_name = 'BGC Prokka test profile' 23 | config_profile_description = 'Minimal test dataset to check BGC workflow function' 24 | 25 | // Input data 26 | input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv' 27 | bgc_antismash_db = params.pipelines_testdata_base_path + 'funcscan/databases/antismash_trimmed_8_0_1.tar.gz' 28 | 29 | annotation_tool = 'prokka' 30 | 31 | run_arg_screening = false 32 | run_amp_screening = false 33 | run_bgc_screening = true 34 | 35 | bgc_run_hmmsearch = true 36 | bgc_hmmsearch_models = 'https://raw.githubusercontent.com/antismash/antismash/fd61de057e082fbf071732ac64b8b2e8883de32f/antismash/detection/hmm_detection/data/ToyB.hmm' 37 | } 38 | -------------------------------------------------------------------------------- /conf/test_bgc_pyrodigal.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/funcscan -profile test_bgc_pyrodigal, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | resourceLimits = [ 15 | cpus: 4, 16 | memory: '15.GB', 17 | time: '1.h', 18 | ] 19 | } 20 | 21 | params { 22 | config_profile_name = 'BGC Pyrodigal test profile' 23 | config_profile_description = 'Minimal test dataset to check BGC workflow function' 24 | 25 | // Input data 26 | input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv' 27 | bgc_antismash_db = params.pipelines_testdata_base_path + 'funcscan/databases/antismash_trimmed_8_0_1.tar.gz' 28 | 29 | annotation_tool = 'pyrodigal' 30 | 31 | run_arg_screening = false 32 | run_amp_screening = false 33 | run_bgc_screening = true 34 | 35 | bgc_run_hmmsearch = true 36 | bgc_hmmsearch_models = 'https://raw.githubusercontent.com/antismash/antismash/fd61de057e082fbf071732ac64b8b2e8883de32f/antismash/detection/hmm_detection/data/ToyB.hmm' 37 | } 38 | -------------------------------------------------------------------------------- /conf/test_full.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running full-size tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a full size pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/funcscan -profile test_full, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | params { 14 | config_profile_name = 'Full test profile' 15 | config_profile_description = 'Full test dataset to check pipeline function' 16 | 17 | // Input data for full size test 18 | input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_full.csv' 19 | 20 | // Database and annotation options 21 | save_annotations = true 22 | 23 | // AMP params 24 | run_amp_screening = true 25 | amp_run_hmmsearch = true 26 | amp_hmmsearch_models = params.pipelines_testdata_base_path + 'funcscan/hmms/mybacteriocin.hmm' 27 | amp_hmmsearch_savealignments = true 28 | amp_hmmsearch_savedomains = true 29 | amp_hmmsearch_savetargets = true 30 | amp_skip_amplify = true // takes too long 31 | 32 | // ARG params 33 | run_arg_screening = true 34 | arg_skip_deeparg = false 35 | 36 | // BGC params 37 | run_bgc_screening = true 38 | bgc_run_hmmsearch = true 39 | bgc_hmmsearch_models = 'https://raw.githubusercontent.com/antismash/antismash/fd61de057e082fbf071732ac64b8b2e8883de32f/antismash/detection/hmm_detection/data/ToyB.hmm' 40 | bgc_hmmsearch_savealignments = true 41 | bgc_hmmsearch_savetargets = true 42 | bgc_hmmsearch_savedomains = true 43 | bgc_skip_deepbgc = true // takes too long 44 | bgc_mincontiglength = 1000 45 | bgc_savefilteredcontigs = true 46 | bgc_skip_deepbgc = true 47 | bgc_antismash_contigminlength = 1000 48 | } 49 | -------------------------------------------------------------------------------- /conf/test_preannotated.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/funcscan -profile test_preannotated, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | resourceLimits = [ 15 | cpus: 4, 16 | memory: '15.GB', 17 | time: '1.h' 18 | ] 19 | } 20 | 21 | params { 22 | config_profile_name = 'ARG/AMP test profile - preannotated input' 23 | config_profile_description = 'Minimal test dataset to check pipeline function' 24 | 25 | // Input data 26 | input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_preannotated.csv' 27 | 28 | annotation_tool = 'pyrodigal' 29 | 30 | run_arg_screening = true 31 | arg_fargene_hmmmodel = 'class_a,class_b_1_2' 32 | 33 | run_amp_screening = true 34 | amp_run_hmmsearch = true 35 | amp_hmmsearch_models = params.pipelines_testdata_base_path + 'funcscan/hmms/mybacteriocin.hmm' 36 | } 37 | -------------------------------------------------------------------------------- /conf/test_preannotated_bgc.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/funcscan -profile test_preannotated_bgc, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | resourceLimits = [ 15 | cpus: 4, 16 | memory: '15.GB', 17 | time: '1.h', 18 | ] 19 | } 20 | 21 | params { 22 | config_profile_name = 'BGC test profile - preannotated input' 23 | config_profile_description = 'Minimal test dataset to check BGC workflow function' 24 | 25 | // Input data 26 | input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_preannotated.csv' 27 | bgc_antismash_db = params.pipelines_testdata_base_path + 'funcscan/databases/antismash_trimmed_8_0_1.tar.gz' 28 | 29 | annotation_tool = 'pyrodigal' 30 | 31 | run_arg_screening = false 32 | run_amp_screening = false 33 | run_bgc_screening = true 34 | 35 | bgc_run_hmmsearch = true 36 | bgc_hmmsearch_models = 'https://raw.githubusercontent.com/antismash/antismash/fd61de057e082fbf071732ac64b8b2e8883de32f/antismash/detection/hmm_detection/data/ToyB.hmm' 37 | } 38 | -------------------------------------------------------------------------------- /conf/test_prokka.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/funcscan -profile test_prokka, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | resourceLimits = [ 15 | cpus: 4, 16 | memory: '15.GB', 17 | time: '1.h' 18 | ] 19 | } 20 | 21 | params { 22 | config_profile_name = 'AMP/ARG Prokka test profile' 23 | config_profile_description = 'Minimal test dataset to check pipeline function' 24 | 25 | // Input data 26 | input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv' 27 | 28 | annotation_tool = 'prokka' 29 | 30 | run_arg_screening = true 31 | arg_fargene_hmmmodel = 'class_a,class_b_1_2' 32 | 33 | run_amp_screening = true 34 | amp_run_hmmsearch = true 35 | amp_hmmsearch_models = params.pipelines_testdata_base_path + 'funcscan/hmms/mybacteriocin.hmm' 36 | } 37 | -------------------------------------------------------------------------------- /conf/test_taxonomy_prokka.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/funcscan -profile test_taxonomy_prokka, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | resourceLimits = [ 15 | cpus: 4, 16 | memory: '15.GB', 17 | time: '1.h', 18 | ] 19 | withName: MMSEQS_DATABASES { 20 | memory = '14.GB' 21 | } 22 | } 23 | 24 | params { 25 | config_profile_name = 'Taxonomic classification test profile' 26 | config_profile_description = 'Minimal test dataset to check taxonomic classification workflow function' 27 | 28 | // Input data 29 | input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_reduced.csv' 30 | bgc_hmmsearch_models = 'https://raw.githubusercontent.com/antismash/antismash/fd61de057e082fbf071732ac64b8b2e8883de32f/antismash/detection/hmm_detection/data/ToyB.hmm' 31 | amp_hmmsearch_models = params.pipelines_testdata_base_path + 'funcscan/hmms/mybacteriocin.hmm' 32 | bgc_antismash_db = params.pipelines_testdata_base_path + 'funcscan/databases/antismash_trimmed_8_0_1.tar.gz' 33 | 34 | run_taxa_classification = true 35 | annotation_tool = 'prokka' 36 | 37 | run_arg_screening = true 38 | arg_skip_deeparg = true 39 | arg_skip_amrfinderplus = true 40 | 41 | run_amp_screening = true 42 | amp_run_hmmsearch = true 43 | 44 | run_bgc_screening = true 45 | bgc_mincontiglength = 1000 46 | bgc_savefilteredcontigs = true 47 | bgc_skip_deepbgc = true 48 | bgc_antismash_contigminlength = 1000 49 | bgc_run_hmmsearch = true 50 | } 51 | -------------------------------------------------------------------------------- /conf/test_taxonomy_pyrodigal.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/funcscan -profile test_taxonomy_pyrodigal, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | resourceLimits = [ 15 | cpus: 4, 16 | memory: '15.GB', 17 | time: '1.h', 18 | ] 19 | withName: MMSEQS_DATABASES { 20 | memory = '14.GB' 21 | } 22 | } 23 | 24 | params { 25 | config_profile_name = 'Taxonomic classification test profile' 26 | config_profile_description = 'Minimal test dataset to check taxonomic classification workflow function' 27 | 28 | // Input data 29 | input = params.pipelines_testdata_base_path + 'funcscan/samplesheet_hits.csv' 30 | bgc_hmmsearch_models = 'https://raw.githubusercontent.com/antismash/antismash/fd61de057e082fbf071732ac64b8b2e8883de32f/antismash/detection/hmm_detection/data/ToyB.hmm' 31 | amp_hmmsearch_models = params.pipelines_testdata_base_path + 'funcscan/hmms/mybacteriocin.hmm' 32 | bgc_antismash_db = params.pipelines_testdata_base_path + 'funcscan/databases/antismash_trimmed_8_0_1.tar.gz' 33 | 34 | run_taxa_classification = true 35 | annotation_tool = 'pyrodigal' 36 | 37 | run_arg_screening = true 38 | arg_skip_deeparg = true 39 | arg_skip_amrfinderplus = true 40 | 41 | run_amp_screening = true 42 | amp_run_hmmsearch = true 43 | 44 | run_bgc_screening = true 45 | bgc_mincontiglength = 1000 46 | bgc_savefilteredcontigs = true 47 | bgc_skip_deepbgc = true 48 | bgc_antismash_contigminlength = 1000 49 | bgc_run_hmmsearch = true 50 | } 51 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # nf-core/funcscan: Documentation 2 | 3 | The nf-core/funcscan documentation is split into the following pages: 4 | 5 | - [Usage](usage.md) 6 | - An overview of how the pipeline works, how to run it and a description of all of the different command-line flags. 7 | - [Output](output.md) 8 | - An overview of the different results produced by the pipeline and how to interpret them. 9 | 10 | You can find a lot more documentation about installing, configuring and running nf-core pipelines on the website: [https://nf-co.re](https://nf-co.re) 11 | -------------------------------------------------------------------------------- /docs/images/funcscan_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/funcscan/HEAD/docs/images/funcscan_icon.png -------------------------------------------------------------------------------- /docs/images/funcscan_metro_workflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/funcscan/HEAD/docs/images/funcscan_metro_workflow.png -------------------------------------------------------------------------------- /docs/images/funcscan_metro_workflow_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/funcscan/HEAD/docs/images/funcscan_metro_workflow_dark.png -------------------------------------------------------------------------------- /docs/images/funcscan_metro_workflow_vertical.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/funcscan/HEAD/docs/images/funcscan_metro_workflow_vertical.png -------------------------------------------------------------------------------- /docs/images/nf-core-funcscan_logo_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/funcscan/HEAD/docs/images/nf-core-funcscan_logo_dark.png -------------------------------------------------------------------------------- /docs/images/nf-core-funcscan_logo_flat_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/funcscan/HEAD/docs/images/nf-core-funcscan_logo_flat_dark.png -------------------------------------------------------------------------------- /docs/images/nf-core-funcscan_logo_flat_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/funcscan/HEAD/docs/images/nf-core-funcscan_logo_flat_light.png -------------------------------------------------------------------------------- /docs/images/nf-core-funcscan_logo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/funcscan/HEAD/docs/images/nf-core-funcscan_logo_light.png -------------------------------------------------------------------------------- /modules/local/amp_database_download.nf: -------------------------------------------------------------------------------- 1 | process AMP_DATABASE_DOWNLOAD { 2 | label 'process_single' 3 | 4 | conda "bioconda::ampcombi=2.0.1" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/ampcombi:2.0.1--pyhdfd78af_0': 7 | 'biocontainers/ampcombi:2.0.1--pyhdfd78af_0' }" 8 | 9 | input: 10 | val database_id 11 | 12 | output: 13 | path "amp_${database_id}_database" , emit: db 14 | path "versions.yml" , emit: versions 15 | 16 | when: 17 | task.ext.when == null || task.ext.when 18 | 19 | script: // This script is bundled with the pipeline, in nf-core/funcscan/bin/ 20 | """ 21 | ampcombi_download.py \\ 22 | --database_id $database_id \\ 23 | --threads ${task.cpus} 24 | 25 | cat <<-END_VERSIONS > versions.yml 26 | "${task.process}": 27 | ampcombi: \$(ampcombi --version | sed 's/ampcombi //') 28 | END_VERSIONS 29 | """ 30 | } 31 | -------------------------------------------------------------------------------- /modules/local/combgc.nf: -------------------------------------------------------------------------------- 1 | process COMBGC { 2 | tag "$meta.id" 3 | label 'process_low' 4 | 5 | conda "conda-forge::python=3.11.0 conda-forge::biopython=1.80 conda-forge::pandas=1.5.2" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/mulled-v2-27978155697a3671f3ef9aead4b5c823a02cc0b7:548df772fe13c0232a7eab1bc1deb98b495a05ab-0' : 8 | 'biocontainers/mulled-v2-27978155697a3671f3ef9aead4b5c823a02cc0b7:548df772fe13c0232a7eab1bc1deb98b495a05ab-0' }" 9 | 10 | input: 11 | tuple val(meta), path(input_paths) 12 | 13 | output: 14 | tuple val(meta), path("${prefix}/combgc_summary.tsv") , emit: tsv 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: // This script is bundled with the pipeline, in nf-core/funcscan/bin/ 21 | prefix = task.ext.prefix ?: "${meta.id}" 22 | """ 23 | comBGC.py \\ 24 | -i $input_paths \\ 25 | -o $prefix 26 | 27 | cat <<-END_VERSIONS > versions.yml 28 | "${task.process}": 29 | comBGC: \$(comBGC.py --version | sed 's/comBGC //g') 30 | END_VERSIONS 31 | """ 32 | } 33 | -------------------------------------------------------------------------------- /modules/local/interproscan_download.nf: -------------------------------------------------------------------------------- 1 | process INTERPROSCAN_DATABASE { 2 | tag "interproscan_database_download" 3 | label 'process_long' 4 | 5 | conda "conda-forge::sed=4.7" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/curl:7.80.0' : 8 | 'biocontainers/curl:7.80.0' }" 9 | 10 | input: 11 | val database_url 12 | 13 | output: 14 | path("interproscan_db/*"), emit: db 15 | path "versions.yml", emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | """ 22 | mkdir -p interproscan_db/ 23 | 24 | filename=\$(basename ${database_url}) 25 | 26 | curl -L ${database_url} -o interproscan_db/\$filename 27 | tar -xzf interproscan_db/\$filename -C interproscan_db/ 28 | 29 | cat <<-END_VERSIONS > versions.yml 30 | "${task.process}": 31 | tar: \$(tar --version 2>&1 | sed -n '1s/tar (busybox) //p') 32 | curl: "\$(curl --version 2>&1 | sed -n '1s/^curl \\([0-9.]*\\).*/\\1/p')" 33 | END_VERSIONS 34 | """ 35 | } 36 | -------------------------------------------------------------------------------- /modules/local/merge_taxonomy_ampcombi.nf: -------------------------------------------------------------------------------- 1 | process MERGE_TAXONOMY_AMPCOMBI { 2 | label 'process_medium' 3 | 4 | conda "conda-forge::python=3.11.0 conda-forge::biopython=1.80 conda-forge::pandas=1.5.2" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/mulled-v2-27978155697a3671f3ef9aead4b5c823a02cc0b7:548df772fe13c0232a7eab1bc1deb98b495a05ab-0' : 7 | 'biocontainers/mulled-v2-27978155697a3671f3ef9aead4b5c823a02cc0b7:548df772fe13c0232a7eab1bc1deb98b495a05ab-0' }" 8 | 9 | input: 10 | path(ampcombi_df) 11 | path(taxa_list) 12 | 13 | output: 14 | path "ampcombi_complete_summary_taxonomy.tsv" , emit: tsv 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: // This script is bundled with the pipeline, in nf-core/funcscan/bin/ 21 | """ 22 | merge_taxonomy.py \\ 23 | ampcombi_taxa \\ 24 | --ampcombi $ampcombi_df \\ 25 | --taxonomy $taxa_list 26 | 27 | cat <<-END_VERSIONS > versions.yml 28 | "${task.process}": 29 | merge_taxonomy: \$(merge_taxonomy.py --version | sed 's/merge_taxonomy //g') 30 | END_VERSIONS 31 | """ 32 | } 33 | -------------------------------------------------------------------------------- /modules/local/merge_taxonomy_combgc.nf: -------------------------------------------------------------------------------- 1 | process MERGE_TAXONOMY_COMBGC { 2 | label 'process_medium' 3 | 4 | conda "conda-forge::python=3.11.0 conda-forge::biopython=1.80 conda-forge::pandas=1.5.2" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/mulled-v2-27978155697a3671f3ef9aead4b5c823a02cc0b7:548df772fe13c0232a7eab1bc1deb98b495a05ab-0' : 7 | 'biocontainers/mulled-v2-27978155697a3671f3ef9aead4b5c823a02cc0b7:548df772fe13c0232a7eab1bc1deb98b495a05ab-0' }" 8 | 9 | input: 10 | path(combgc_df) 11 | path(taxa_list) 12 | 13 | output: 14 | path "combgc_complete_summary_taxonomy.tsv" , emit: tsv 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: // This script is bundled with the pipeline, in nf-core/funcscan/bin/ 21 | """ 22 | merge_taxonomy.py \\ 23 | combgc_taxa \\ 24 | --combgc $combgc_df \\ 25 | --taxonomy $taxa_list 26 | 27 | cat <<-END_VERSIONS > versions.yml 28 | "${task.process}": 29 | merge_taxonomy: \$(merge_taxonomy.py --version | sed 's/merge_taxonomy //g') 30 | END_VERSIONS 31 | """ 32 | } 33 | -------------------------------------------------------------------------------- /modules/local/merge_taxonomy_hamronization.nf: -------------------------------------------------------------------------------- 1 | process MERGE_TAXONOMY_HAMRONIZATION { 2 | label 'process_medium' 3 | 4 | conda "conda-forge::python=3.11.0 conda-forge::biopython=1.80 conda-forge::pandas=1.5.2" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/mulled-v2-27978155697a3671f3ef9aead4b5c823a02cc0b7:548df772fe13c0232a7eab1bc1deb98b495a05ab-0' : 7 | 'biocontainers/mulled-v2-27978155697a3671f3ef9aead4b5c823a02cc0b7:548df772fe13c0232a7eab1bc1deb98b495a05ab-0' }" 8 | 9 | input: 10 | path(hamronization_df) 11 | path(taxa_list) 12 | 13 | output: 14 | path "hamronization_complete_summary_taxonomy.tsv" , emit: tsv 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: // This script is bundled with the pipeline, in nf-core/funcscan/bin/ 21 | """ 22 | merge_taxonomy.py \\ 23 | hamronization_taxa \\ 24 | --hamronization $hamronization_df \\ 25 | --taxonomy $taxa_list 26 | 27 | cat <<-END_VERSIONS > versions.yml 28 | "${task.process}": 29 | merge_taxonomy: \$(merge_taxonomy.py --version | sed 's/merge_taxonomy //g') 30 | END_VERSIONS 31 | """ 32 | } 33 | -------------------------------------------------------------------------------- /modules/nf-core/abricate/run/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::abricate=1.0.1 8 | -------------------------------------------------------------------------------- /modules/nf-core/abricate/run/main.nf: -------------------------------------------------------------------------------- 1 | process ABRICATE_RUN { 2 | tag "${meta.id}" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 7 | ? 'https://depot.galaxyproject.org/singularity/abricate%3A1.0.1--ha8f3691_1' 8 | : 'biocontainers/abricate:1.0.1--ha8f3691_1'}" 9 | 10 | input: 11 | tuple val(meta), path(assembly) 12 | path databasedir 13 | 14 | output: 15 | tuple val(meta), path("*.txt"), emit: report 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | def prefix = task.ext.prefix ?: "${meta.id}" 24 | def datadir = databasedir ? "--datadir ${databasedir}" : '' 25 | """ 26 | ## Symlink when necessary to rename the file to allow specifying the prefix variable inside report 27 | ## As the variable is what is used as the sample ID in the report file 28 | if [[ "${assembly}" != "${prefix}.fasta" ]]; then 29 | ln -s ${assembly} ${prefix}.fasta 30 | fi 31 | 32 | abricate \\ 33 | ${prefix}.fasta \\ 34 | ${args} \\ 35 | ${datadir} \\ 36 | --threads ${task.cpus} \\ 37 | > ${prefix}.txt 38 | 39 | cat <<-END_VERSIONS > versions.yml 40 | "${task.process}": 41 | abricate: \$(echo \$(abricate --version 2>&1) | sed 's/^.*abricate //' ) 42 | END_VERSIONS 43 | """ 44 | 45 | stub: 46 | def prefix = task.ext.prefix ?: "${meta.id}" 47 | """ 48 | touch ${prefix}.txt 49 | 50 | cat <<-END_VERSIONS > versions.yml 51 | "${task.process}": 52 | abricate: \$(echo \$(abricate --version 2>&1) | sed 's/^.*abricate //' ) 53 | END_VERSIONS 54 | """ 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/abricate/run/meta.yml: -------------------------------------------------------------------------------- 1 | name: abricate_run 2 | description: Screen assemblies for antimicrobial resistance against multiple databases 3 | keywords: 4 | - bacteria 5 | - assembly 6 | - antimicrobial resistance 7 | tools: 8 | - abricate: 9 | description: Mass screening of contigs for antibiotic resistance genes 10 | homepage: https://github.com/tseemann/abricate 11 | documentation: https://github.com/tseemann/abricate 12 | tool_dev_url: https://github.com/tseemann/abricate 13 | licence: ["GPL v2"] 14 | identifier: biotools:ABRicate 15 | input: 16 | - - meta: 17 | type: map 18 | description: | 19 | Groovy Map containing sample information 20 | e.g. [ id:'test', single_end:false ] 21 | - assembly: 22 | type: file 23 | description: FASTA, GenBank or EMBL formatted file 24 | pattern: "*.{fa,fasta,fna,fa.gz,fasta.gz,fna.gz,gbk,gbk.gz,embl,embl.gz}" 25 | ontologies: 26 | - edam: "http://edamontology.org/format_1929" # FASTA 27 | - edam: "http://edamontology.org/format_1936" # GenBank 28 | - edam: "http://edamontology.org/format_2204" # EMBL format 29 | - databasedir: 30 | type: directory 31 | description: Optional location of local copy of database files, possibly with 32 | custom databases set up with `abricate --setupdb` 33 | pattern: "*/" 34 | output: 35 | report: 36 | - - meta: 37 | type: map 38 | description: | 39 | Groovy Map containing sample information 40 | e.g. [ id:'test', single_end:false ] 41 | - "*.txt": 42 | type: file 43 | description: Tab-delimited report of results 44 | pattern: "*.{txt}" 45 | ontologies: 46 | - edam: "http://edamontology.org/format_2330" # Textual format 47 | versions: 48 | - versions.yml: 49 | type: file 50 | description: File containing software versions 51 | pattern: "versions.yml" 52 | ontologies: 53 | - edam: "http://edamontology.org/format_3750" # YAML 54 | authors: 55 | - "@rpetit3" 56 | maintainers: 57 | - "@rpetit3" 58 | -------------------------------------------------------------------------------- /modules/nf-core/abricate/run/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process ABRICATE_RUN" 4 | script "../main.nf" 5 | process "ABRICATE_RUN" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "abricate" 9 | tag "abricate/run" 10 | 11 | test("bacteroides_fragilis - genome.fa.gz") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = [ 17 | [ id:'test', single_end:false ], // meta map 18 | file(params.modules_testdata_base_path + 'genomics/prokaryotes/bacteroides_fragilis/genome/genome.fna.gz', checkIfExists: true) 19 | ] 20 | input[1] = [] 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | assertAll( 27 | { assert process.success }, 28 | { assert snapshot(process.out).match() } 29 | ) 30 | } 31 | } 32 | 33 | test("bacteroides_fragilis - genome.fa.gz - stub") { 34 | 35 | options "-stub" 36 | 37 | when { 38 | process { 39 | """ 40 | input[0] = [ 41 | [ id:'test', single_end:false ], // meta map 42 | file(params.modules_testdata_base_path + 'genomics/prokaryotes/bacteroides_fragilis/genome/genome.fna.gz', checkIfExists: true) 43 | ] 44 | input[1] = [] 45 | """ 46 | } 47 | } 48 | 49 | then { 50 | assertAll( 51 | { assert process.success }, 52 | { assert snapshot(process.out).match() } 53 | ) 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/cluster/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::ampcombi=2.0.1 8 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/cluster/main.nf: -------------------------------------------------------------------------------- 1 | process AMPCOMBI2_CLUSTER { 2 | tag 'ampcombi2' 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/ampcombi:2.0.1--pyhdfd78af_0': 8 | 'biocontainers/ampcombi:2.0.1--pyhdfd78af_0' }" 9 | 10 | input: 11 | path(summary_file) 12 | 13 | output: 14 | path("Ampcombi_summary_cluster.tsv") , emit: cluster_tsv 15 | path("Ampcombi_summary_cluster_representative_seq.tsv"), emit: rep_cluster_tsv 16 | path("Ampcombi_cluster.log") , emit: log, optional:true 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | """ 25 | ampcombi cluster \\ 26 | --ampcombi_summary ${summary_file} \\ 27 | $args \\ 28 | --threads ${task.cpus} 29 | 30 | cat <<-END_VERSIONS > versions.yml 31 | "${task.process}": 32 | ampcombi: \$(ampcombi --version | sed 's/ampcombi //') 33 | END_VERSIONS 34 | """ 35 | 36 | stub: 37 | def args = task.ext.args ?: '' 38 | """ 39 | touch Ampcombi_summary_cluster.tsv 40 | touch Ampcombi_summary_cluster_representative_seq.tsv 41 | touch Ampcombi_cluster.log 42 | 43 | cat <<-END_VERSIONS > versions.yml 44 | "${task.process}": 45 | ampcombi: \$(ampcombi --version | sed 's/ampcombi //') 46 | END_VERSIONS 47 | """ 48 | } 49 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/cluster/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process AMPCOMBI2_CLUSTER" 4 | script "../main.nf" 5 | process "AMPCOMBI2_CLUSTER" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "ampcombi2" 10 | tag "ampcombi2/cluster" 11 | tag "ampcombi2/complete" 12 | 13 | setup { 14 | run("AMPCOMBI2_COMPLETE") { 15 | script "../../../ampcombi2/complete/main.nf" 16 | process { 17 | """ 18 | input[0] = 19 | [ 20 | file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/ampcombi/ampcombi2/sample_1_ampcombi.tsv', checkIfExists: true), 21 | file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/ampcombi/ampcombi2/sample_2_ampcombi.tsv', checkIfExists: true) 22 | ] 23 | """ 24 | } 25 | } 26 | } 27 | 28 | test("ampcombi2_cluster - metagenome") { 29 | when { 30 | process { 31 | """ 32 | input[0] = AMPCOMBI2_COMPLETE.out.tsv 33 | """ 34 | } 35 | } 36 | 37 | then { 38 | assertAll( 39 | { assert process.success }, 40 | { assert snapshot( 41 | file(process.out.cluster_tsv[0]).readLines()[0].contains("Linear/Cyclic/Branched"), 42 | file(process.out.rep_cluster_tsv[0]).readLines()[0].contains("total_cluster_members"), 43 | process.out.versions).match() } 44 | ) 45 | } 46 | } 47 | 48 | test("ampcombi2_cluster - metagenome - stub") { 49 | options "-stub" 50 | when { 51 | process { 52 | """ 53 | input[0] = AMPCOMBI2_COMPLETE.out.tsv 54 | """ 55 | } 56 | } 57 | 58 | then { 59 | assertAll( 60 | { assert process.success }, 61 | { assert snapshot(process.out).match() } 62 | ) 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/cluster/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "ampcombi2_cluster - metagenome": { 3 | "content": [ 4 | true, 5 | true, 6 | [ 7 | "versions.yml:md5,b629089d44775078dce5e664a455422b" 8 | ] 9 | ], 10 | "meta": { 11 | "nf-test": "0.9.2", 12 | "nextflow": "24.10.2" 13 | }, 14 | "timestamp": "2024-12-03T07:57:01.869983435" 15 | }, 16 | "ampcombi2_cluster - metagenome - stub": { 17 | "content": [ 18 | { 19 | "0": [ 20 | "Ampcombi_summary_cluster.tsv:md5,d41d8cd98f00b204e9800998ecf8427e" 21 | ], 22 | "1": [ 23 | "Ampcombi_summary_cluster_representative_seq.tsv:md5,d41d8cd98f00b204e9800998ecf8427e" 24 | ], 25 | "2": [ 26 | "Ampcombi_cluster.log:md5,d41d8cd98f00b204e9800998ecf8427e" 27 | ], 28 | "3": [ 29 | "versions.yml:md5,b629089d44775078dce5e664a455422b" 30 | ], 31 | "cluster_tsv": [ 32 | "Ampcombi_summary_cluster.tsv:md5,d41d8cd98f00b204e9800998ecf8427e" 33 | ], 34 | "log": [ 35 | "Ampcombi_cluster.log:md5,d41d8cd98f00b204e9800998ecf8427e" 36 | ], 37 | "rep_cluster_tsv": [ 38 | "Ampcombi_summary_cluster_representative_seq.tsv:md5,d41d8cd98f00b204e9800998ecf8427e" 39 | ], 40 | "versions": [ 41 | "versions.yml:md5,b629089d44775078dce5e664a455422b" 42 | ] 43 | } 44 | ], 45 | "meta": { 46 | "nf-test": "0.9.2", 47 | "nextflow": "24.10.2" 48 | }, 49 | "timestamp": "2024-12-03T07:57:23.939137628" 50 | } 51 | } -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/cluster/tests/tags.yml: -------------------------------------------------------------------------------- 1 | ampcombi2/cluster: 2 | - "modules/nf-core/ampcombi2/cluster/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/complete/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::ampcombi=2.0.1 8 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/complete/main.nf: -------------------------------------------------------------------------------- 1 | process AMPCOMBI2_COMPLETE { 2 | tag "ampcombi2" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/ampcombi:2.0.1--pyhdfd78af_0': 8 | 'biocontainers/ampcombi:2.0.1--pyhdfd78af_0' }" 9 | 10 | input: 11 | path(summaries) 12 | 13 | output: 14 | path("Ampcombi_summary.tsv") , emit: tsv 15 | path("Ampcombi_complete.log"), emit: log, optional:true 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | """ 24 | ampcombi complete \\ 25 | --summaries_files '${summaries.collect{"$it"}.join("' '")}' \\ 26 | $args 27 | 28 | cat <<-END_VERSIONS > versions.yml 29 | "${task.process}": 30 | ampcombi: \$(ampcombi --version | sed 's/ampcombi //') 31 | END_VERSIONS 32 | """ 33 | 34 | stub: 35 | def args = task.ext.args ?: '' 36 | """ 37 | touch Ampcombi_summary.tsv 38 | 39 | cat <<-END_VERSIONS > versions.yml 40 | "${task.process}": 41 | ampcombi: \$(ampcombi --version | sed 's/ampcombi //') 42 | END_VERSIONS 43 | """ 44 | } 45 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/complete/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json 2 | name: "ampcombi2_complete" 3 | description: A submodule that merges all output summary tables from ampcombi/parsetables 4 | in one summary file. 5 | keywords: 6 | - antimicrobial peptides 7 | - amps 8 | - parsing 9 | - reporting 10 | - align 11 | - macrel 12 | - amplify 13 | - hmmsearch 14 | - neubi 15 | - ampir 16 | - ampgram 17 | - amptransformer 18 | - DRAMP 19 | tools: 20 | - ampcombi2/complete: 21 | description: "This merges the per sample AMPcombi summaries generated by running 22 | 'ampcombi2/parsetables'." 23 | homepage: "https://github.com/Darcy220606/AMPcombi" 24 | documentation: "https://github.com/Darcy220606/AMPcombi" 25 | tool_dev_url: "https://github.com/Darcy220606/AMPcombi/tree/dev" 26 | licence: ["MIT"] 27 | identifier: "" 28 | 29 | input: 30 | - - summaries: 31 | type: list 32 | description: The path to the list of files corresponding to each sample as generated 33 | by ampcombi2/parsetables. 34 | pattern: "[*_ampcombi.tsv, *_ampcombi.tsv]" 35 | output: 36 | - tsv: 37 | - Ampcombi_summary.tsv: 38 | type: file 39 | description: A file containing the complete AMPcombi summaries from all processed 40 | samples. 41 | pattern: "*.tsv" 42 | - log: 43 | - Ampcombi_complete.log: 44 | type: file 45 | description: A log file that captures the standard output for the entire process 46 | in a log file. Can be activated by `--log`. 47 | pattern: "*.log" 48 | - versions: 49 | - versions.yml: 50 | type: file 51 | description: File containing software versions 52 | pattern: "versions.yml" 53 | authors: 54 | - "@darcy220606" 55 | maintainers: 56 | - "@darcy220606" 57 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/complete/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process AMPCOMBI2_COMPLETE" 4 | script "../main.nf" 5 | process "AMPCOMBI2_COMPLETE" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "ampcombi2" 10 | tag "ampcombi2/complete" 11 | 12 | test("ampcombi2_complete - contigs") { 13 | when { 14 | process { 15 | """ 16 | input[0] = 17 | [ 18 | file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/ampcombi/ampcombi2/sample_1_ampcombi.tsv', checkIfExists: true), 19 | file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/ampcombi/ampcombi2/sample_2_ampcombi.tsv', checkIfExists: true) 20 | ] 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | assertAll( 27 | { assert process.success }, 28 | { assert snapshot( 29 | file(process.out.tsv[0]).readLines()[0].contains("ampir"), 30 | process.out.versions).match() } 31 | ) 32 | } 33 | } 34 | 35 | test("ampcombi2_complete - contigs - stub") { 36 | options "-stub" 37 | when { 38 | process { 39 | """ 40 | input[0] = 41 | [ 42 | file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/ampcombi/ampcombi2/sample_1_ampcombi.tsv', checkIfExists: true), 43 | file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/ampcombi/ampcombi2/sample_2_ampcombi.tsv', checkIfExists: true) 44 | ] 45 | """ 46 | } 47 | } 48 | 49 | then { 50 | assertAll( 51 | { assert process.success }, 52 | { assert snapshot(process.out).match() } 53 | ) 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/complete/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "ampcombi2_complete - contigs - stub": { 3 | "content": [ 4 | { 5 | "0": [ 6 | "Ampcombi_summary.tsv:md5,d41d8cd98f00b204e9800998ecf8427e" 7 | ], 8 | "1": [ 9 | 10 | ], 11 | "2": [ 12 | "versions.yml:md5,bfba0046e0cfa7b0b6d79663823f94c0" 13 | ], 14 | "log": [ 15 | 16 | ], 17 | "tsv": [ 18 | "Ampcombi_summary.tsv:md5,d41d8cd98f00b204e9800998ecf8427e" 19 | ], 20 | "versions": [ 21 | "versions.yml:md5,bfba0046e0cfa7b0b6d79663823f94c0" 22 | ] 23 | } 24 | ], 25 | "meta": { 26 | "nf-test": "0.9.2", 27 | "nextflow": "24.10.2" 28 | }, 29 | "timestamp": "2024-12-03T07:57:53.385349848" 30 | }, 31 | "ampcombi2_complete - contigs": { 32 | "content": [ 33 | true, 34 | [ 35 | "versions.yml:md5,bfba0046e0cfa7b0b6d79663823f94c0" 36 | ] 37 | ], 38 | "meta": { 39 | "nf-test": "0.9.2", 40 | "nextflow": "24.10.2" 41 | }, 42 | "timestamp": "2024-12-03T07:57:40.263912946" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/complete/tests/tags.yml: -------------------------------------------------------------------------------- 1 | ampcombi2/complete: 2 | - "modules/nf-core/ampcombi2/complete/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/parsetables/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::ampcombi=2.0.1 8 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/parsetables/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: AMPCOMBI2_PARSETABLES { 4 | 5 | ext.args = [ 6 | "--aminoacid_length 2000", 7 | "--db_evalue 2000", 8 | "--ampir_file 'ampir.tsv'", 9 | "--amplify_file 'amplify.tsv'", 10 | "--macrel_file '.prediction'", 11 | "--neubi_file '.fasta'", 12 | "--hmmsearch_file 'candidates.txt'", 13 | "--ampgram_file '.tsv'", 14 | "--amptransformer_file '.txt'", 15 | "--log true", 16 | "--interproscan_filter 'nonsense'" 17 | ].join(' ') 18 | 19 | ext.prefix = "sample_1" 20 | 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /modules/nf-core/ampcombi2/parsetables/tests/tags.yml: -------------------------------------------------------------------------------- 1 | ampcombi2/parsetables: 2 | - "modules/nf-core/ampcombi2/parsetables/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/ampir/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - conda-forge::r-ampir=1.1.0 8 | -------------------------------------------------------------------------------- /modules/nf-core/ampir/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process AMPIR" 4 | script "../main.nf" 5 | process "AMPIR" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "ampir" 10 | 11 | test("candidatus_portiera_aleyrodidarum proteome [fasta]") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = [ 17 | [ id:'test', single_end:false ], // meta map 18 | file(params.modules_testdata_base_path + 'genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/proteome.fasta', checkIfExists: true), 19 | ] 20 | input[1] = "precursor" // model 21 | input[2] = 10 // min_length 22 | input[3] = "0.7" // min_probability 23 | """ 24 | } 25 | } 26 | 27 | then { 28 | assertAll( 29 | { assert process.success }, 30 | { assert snapshot(process.out).match() } 31 | ) 32 | } 33 | 34 | } 35 | 36 | test("candidatus_portiera_aleyrodidarum proteome [fasta] - stub") { 37 | 38 | options "-stub" 39 | 40 | when { 41 | process { 42 | """ 43 | input[0] = [ 44 | [ id:'test', single_end:false ], // meta map 45 | file(params.modules_testdata_base_path + 'genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/proteome.fasta', checkIfExists: true), 46 | ] 47 | input[1] = "precursor" // model 48 | input[2] = 10 // min_length 49 | input[3] = "0.7" // min_probability 50 | """ 51 | } 52 | } 53 | 54 | then { 55 | assertAll( 56 | { assert process.success }, 57 | { assert snapshot(process.out.amps_faa.collect { file(it[1]).getName() } + 58 | process.out.amps_tsv.collect { file(it[1]).getName() } + 59 | process.out.versions).match() } 60 | ) 61 | } 62 | 63 | } 64 | 65 | } 66 | -------------------------------------------------------------------------------- /modules/nf-core/ampir/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "sarscov2 - bam - stub": { 3 | "content": [ 4 | [ 5 | "test.faa", 6 | "test.tsv", 7 | "versions.yml:md5,f8d5026ccdd8f72c7ac1b5e4670aab49" 8 | ] 9 | ], 10 | "timestamp": "2023-12-26T18:19:18.308141504" 11 | }, 12 | "sarscov2 - bam": { 13 | "content": [ 14 | { 15 | "0": [ 16 | [ 17 | { 18 | "id": "test", 19 | "single_end": false 20 | }, 21 | "test.faa:md5,0435609144022c55ac196db053f0df89" 22 | ] 23 | ], 24 | "1": [ 25 | [ 26 | { 27 | "id": "test", 28 | "single_end": false 29 | }, 30 | "test.tsv:md5,70a70e8698e8d367707f4b1833e3168c" 31 | ] 32 | ], 33 | "2": [ 34 | "versions.yml:md5,f8d5026ccdd8f72c7ac1b5e4670aab49" 35 | ], 36 | "amps_faa": [ 37 | [ 38 | { 39 | "id": "test", 40 | "single_end": false 41 | }, 42 | "test.faa:md5,0435609144022c55ac196db053f0df89" 43 | ] 44 | ], 45 | "amps_tsv": [ 46 | [ 47 | { 48 | "id": "test", 49 | "single_end": false 50 | }, 51 | "test.tsv:md5,70a70e8698e8d367707f4b1833e3168c" 52 | ] 53 | ], 54 | "versions": [ 55 | "versions.yml:md5,f8d5026ccdd8f72c7ac1b5e4670aab49" 56 | ] 57 | } 58 | ], 59 | "timestamp": "2023-12-26T18:18:57.151185866" 60 | } 61 | } -------------------------------------------------------------------------------- /modules/nf-core/ampir/tests/tags.yml: -------------------------------------------------------------------------------- 1 | ampir: 2 | - "modules/nf-core/ampir/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/amplify/predict/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::amplify=2.0.0 8 | -------------------------------------------------------------------------------- /modules/nf-core/amplify/predict/main.nf: -------------------------------------------------------------------------------- 1 | process AMPLIFY_PREDICT { 2 | tag "$meta.id" 3 | label 'process_single' 4 | 5 | // WARN: Version information not provided by tool on CLI. Please update version string below when bumping container versions. 6 | conda "${moduleDir}/environment.yml" 7 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 8 | 'https://depot.galaxyproject.org/singularity/amplify:2.0.0--py36hdfd78af_1': 9 | 'biocontainers/amplify:2.0.0--py36hdfd78af_1' }" 10 | 11 | input: 12 | tuple val(meta), path(faa) 13 | path(model_dir) 14 | 15 | output: 16 | tuple val(meta), path('*.tsv'), emit: tsv 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | def prefix = task.ext.prefix ?: "${meta.id}" 25 | def custom_model_dir = model_dir ? "-md ${model_dir}" : "" 26 | """ 27 | AMPlify \\ 28 | $args \\ 29 | ${custom_model_dir} \\ 30 | -s '${faa}' 31 | 32 | #rename output, because tool includes date and time in name 33 | mv *.tsv ${prefix}.tsv 34 | 35 | cat <<-END_VERSIONS > versions.yml 36 | "${task.process}": 37 | AMPlify: \$(AMPlify --help | grep 'AMPlify v' | sed -e "s/^.*AMPlify v//") 38 | END_VERSIONS 39 | """ 40 | 41 | stub: 42 | def prefix = task.ext.prefix ?: "${meta.id}" 43 | """ 44 | touch ${prefix}.tsv 45 | 46 | cat <<-END_VERSIONS > versions.yml 47 | "${task.process}": 48 | AMPlify: \$(AMPlify --help | grep 'AMPlify v' | sed -e "s/^.*AMPlify v//") 49 | END_VERSIONS 50 | """ 51 | } 52 | -------------------------------------------------------------------------------- /modules/nf-core/amplify/predict/meta.yml: -------------------------------------------------------------------------------- 1 | name: "amplify_predict" 2 | description: AMPlify is an attentive deep learning model for antimicrobial peptide 3 | prediction. 4 | keywords: 5 | - antimicrobial peptides 6 | - AMPs 7 | - prediction 8 | - model 9 | tools: 10 | - "amplify": 11 | description: "Attentive deep learning model for antimicrobial peptide prediction" 12 | homepage: "https://github.com/bcgsc/AMPlify" 13 | documentation: "https://github.com/bcgsc/AMPlify" 14 | tool_dev_url: "https://github.com/bcgsc/AMPlify" 15 | doi: "10.1186/s12864-022-08310-4" 16 | licence: ["GPL v3"] 17 | identifier: biotools:amplify 18 | input: 19 | - - meta: 20 | type: map 21 | description: | 22 | Groovy Map containing sample information 23 | e.g. [ id:'test', single_end:false ] 24 | - faa: 25 | type: file 26 | description: amino acid sequences fasta 27 | pattern: "*.{fa,fa.gz,faa,faa.gz,fasta,fasta.gz}" 28 | - - model_dir: 29 | type: directory 30 | description: Directory of where models are stored (optional) 31 | output: 32 | - tsv: 33 | - meta: 34 | type: map 35 | description: | 36 | Groovy Map containing sample information 37 | e.g. [ id:'test', single_end:false ] 38 | - "*.tsv": 39 | type: file 40 | description: amino acid sequences with prediction (AMP, non-AMP) and probability 41 | scores 42 | pattern: "*.{tsv}" 43 | - versions: 44 | - versions.yml: 45 | type: file 46 | description: File containing software versions 47 | pattern: "versions.yml" 48 | authors: 49 | - "@louperelo" 50 | maintainers: 51 | - "@louperelo" 52 | -------------------------------------------------------------------------------- /modules/nf-core/amplify/predict/tests/tags.yml: -------------------------------------------------------------------------------- 1 | amplify/predict: 2 | - "modules/nf-core/amplify/predict/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/amrfinderplus/run/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::ncbi-amrfinderplus=4.0.23 8 | -------------------------------------------------------------------------------- /modules/nf-core/amrfinderplus/update/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::ncbi-amrfinderplus=4.0.23 8 | -------------------------------------------------------------------------------- /modules/nf-core/amrfinderplus/update/main.nf: -------------------------------------------------------------------------------- 1 | process AMRFINDERPLUS_UPDATE { 2 | tag "update" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/ncbi-amrfinderplus:4.0.23--hf69ffd2_0': 8 | 'biocontainers/ncbi-amrfinderplus:4.0.23--hf69ffd2_0' }" 9 | 10 | output: 11 | path "amrfinderdb.tar.gz", emit: db 12 | path "versions.yml" , emit: versions 13 | 14 | when: 15 | task.ext.when == null || task.ext.when 16 | 17 | script: 18 | """ 19 | amrfinder_update -d amrfinderdb 20 | tar czvf amrfinderdb.tar.gz -C amrfinderdb/\$(readlink amrfinderdb/latest) ./ 21 | 22 | cat <<-END_VERSIONS > versions.yml 23 | "${task.process}": 24 | amrfinderplus: \$(amrfinder --version) 25 | END_VERSIONS 26 | """ 27 | 28 | stub: 29 | """ 30 | touch amrfinderdb.tar 31 | gzip amrfinderdb.tar 32 | 33 | cat <<-END_VERSIONS > versions.yml 34 | "${task.process}": 35 | amrfinderplus: \$(amrfinder --version) 36 | END_VERSIONS 37 | """ 38 | } 39 | -------------------------------------------------------------------------------- /modules/nf-core/amrfinderplus/update/meta.yml: -------------------------------------------------------------------------------- 1 | name: amrfinderplus_update 2 | description: Identify antimicrobial resistance in gene or protein sequences 3 | keywords: 4 | - bacteria 5 | - fasta 6 | - antibiotic resistance 7 | tools: 8 | - amrfinderplus: 9 | description: AMRFinderPlus finds antimicrobial resistance and other genes in protein 10 | or nucleotide sequences. 11 | homepage: https://github.com/ncbi/amr/wiki 12 | documentation: https://github.com/ncbi/amr/wiki 13 | tool_dev_url: https://github.com/ncbi/amr 14 | doi: "10.1038/s41598-021-91456-0" 15 | licence: ["Public Domain"] 16 | identifier: biotools:amrfinderplus 17 | # this module does have any input. 18 | output: 19 | db: 20 | - amrfinderdb.tar.gz: 21 | type: file 22 | description: The latest AMRFinder+ database in a compressed tarball 23 | pattern: "*.tar.gz" 24 | ontologies: 25 | - edam: "http://edamontology.org/format_3981" # TAR format 26 | - edam: http://edamontology.org/format_3989 # GZIP format 27 | versions: 28 | - versions.yml: 29 | type: file 30 | description: File containing software versions 31 | pattern: "versions.yml" 32 | ontologies: 33 | - edam: "http://edamontology.org/format_3750" # YAML 34 | authors: 35 | - "@rpetit3" 36 | maintainers: 37 | - "@rpetit3" 38 | -------------------------------------------------------------------------------- /modules/nf-core/amrfinderplus/update/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process AMRFINDERPLUS_UPDATE" 4 | script "../main.nf" 5 | process "AMRFINDERPLUS_UPDATE" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "amrfinderplus" 10 | tag "amrfinderplus/update" 11 | 12 | test("amrfinderplus/update") { 13 | 14 | when { 15 | process { 16 | """ 17 | """ 18 | } 19 | } 20 | 21 | then { 22 | assertAll( 23 | { assert process.success }, 24 | { assert snapshot( 25 | process.out.db.collect { file(it).getName() } + 26 | process.out.versions 27 | ).match() 28 | } 29 | ) 30 | } 31 | } 32 | 33 | test("amrfinderplus/update - stub") { 34 | 35 | options "-stub" 36 | 37 | when { 38 | process { 39 | """ 40 | """ 41 | } 42 | } 43 | 44 | then { 45 | assertAll( 46 | { assert process.success }, 47 | { assert snapshot(process.out).match() } 48 | ) 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /modules/nf-core/amrfinderplus/update/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "amrfinderplus/update - stub": { 3 | "content": [ 4 | { 5 | "0": [ 6 | "amrfinderdb.tar.gz:md5,d41d8cd98f00b204e9800998ecf8427e" 7 | ], 8 | "1": [ 9 | "versions.yml:md5,33558f79669c2b83f556fb4142fa2cc6" 10 | ], 11 | "db": [ 12 | "amrfinderdb.tar.gz:md5,d41d8cd98f00b204e9800998ecf8427e" 13 | ], 14 | "versions": [ 15 | "versions.yml:md5,33558f79669c2b83f556fb4142fa2cc6" 16 | ] 17 | } 18 | ], 19 | "meta": { 20 | "nf-test": "0.9.2", 21 | "nextflow": "25.04.6" 22 | }, 23 | "timestamp": "2025-10-04T04:37:11.092721327" 24 | }, 25 | "amrfinderplus/update": { 26 | "content": [ 27 | [ 28 | "amrfinderdb.tar.gz", 29 | "versions.yml:md5,33558f79669c2b83f556fb4142fa2cc6" 30 | ] 31 | ], 32 | "meta": { 33 | "nf-test": "0.9.2", 34 | "nextflow": "25.04.6" 35 | }, 36 | "timestamp": "2025-10-04T04:37:02.101449104" 37 | } 38 | } -------------------------------------------------------------------------------- /modules/nf-core/antismash/antismash/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - "bioconda::antismash=8.0.1" 8 | -------------------------------------------------------------------------------- /modules/nf-core/antismash/antismash/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: ANTISMASH_ANTISMASH { 3 | memory = 7.GB 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/antismash/antismashdownloaddatabases/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - "bioconda::antismash=8.0.1" 8 | -------------------------------------------------------------------------------- /modules/nf-core/antismash/antismashdownloaddatabases/main.nf: -------------------------------------------------------------------------------- 1 | process ANTISMASH_ANTISMASHDOWNLOADDATABASES { 2 | label 'process_single' 3 | 4 | conda "${moduleDir}/environment.yml" 5 | container "nf-core/antismash:8.0.1--pyhdfd78af_0" 6 | 7 | output: 8 | path "antismash_db", emit: database 9 | path "versions.yml", emit: versions 10 | 11 | when: 12 | task.ext.when == null || task.ext.when 13 | 14 | script: 15 | def args = task.ext.args ?: '' 16 | """ 17 | download-antismash-databases \\ 18 | --database-dir antismash_db \\ 19 | ${args} 20 | 21 | cat <<-END_VERSIONS > versions.yml 22 | "${task.process}": 23 | antismash: \$(echo \$(antismash --version) | sed 's/antiSMASH //;s/-.*//g') 24 | END_VERSIONS 25 | """ 26 | 27 | stub: 28 | def args = task.ext.args ?: '' 29 | """ 30 | echo "download-antismash-databases --database-dir antismash_db ${args}" 31 | 32 | mkdir antismash_db 33 | mkdir antismash_db/as-js 34 | mkdir antismash_db/clusterblast 35 | mkdir antismash_db/clustercompare 36 | mkdir antismash_db/comparippson 37 | mkdir antismash_db/knownclusterblast 38 | mkdir antismash_db/mite 39 | mkdir antismash_db/nrps_pks 40 | mkdir antismash_db/pfam 41 | mkdir antismash_db/resfam 42 | mkdir antismash_db/tigrfam 43 | 44 | cat <<-END_VERSIONS > versions.yml 45 | "${task.process}": 46 | antismash: \$(echo \$(antismash --version) | sed 's/antiSMASH //;s/-.*//g') 47 | END_VERSIONS 48 | """ 49 | } 50 | -------------------------------------------------------------------------------- /modules/nf-core/antismash/antismashdownloaddatabases/meta.yml: -------------------------------------------------------------------------------- 1 | name: antismash_antismashdownloaddatabases 2 | description: antiSMASH allows the rapid genome-wide identification, annotation and 3 | analysis of secondary metabolite biosynthesis gene clusters. This module downloads 4 | the antiSMASH databases for conda and docker/singularity runs. 5 | keywords: 6 | - secondary metabolites 7 | - BGC 8 | - biosynthetic gene cluster 9 | - genome mining 10 | - NRPS 11 | - RiPP 12 | - antibiotics 13 | - prokaryotes 14 | - bacteria 15 | - eukaryotes 16 | - fungi 17 | - antismash 18 | - database 19 | tools: 20 | - antismash: 21 | description: antiSMASH - the antibiotics and Secondary Metabolite Analysis SHell 22 | homepage: https://docs.antismash.secondarymetabolites.org 23 | documentation: https://docs.antismash.secondarymetabolites.org 24 | tool_dev_url: https://github.com/antismash/antismash 25 | doi: "10.1093/nar/gkab335" 26 | licence: ["AGPL v3"] 27 | identifier: biotools:antismash 28 | input: [] 29 | output: 30 | - database: 31 | - antismash_db: 32 | type: directory 33 | description: Download directory for antiSMASH databases 34 | pattern: "antismash_db" 35 | - versions: 36 | - versions.yml: 37 | type: file 38 | description: File containing software versions 39 | pattern: "versions.yml" 40 | authors: 41 | - "@jasmezz" 42 | maintainers: 43 | - "@jasmezz" 44 | - "@jfy133" 45 | -------------------------------------------------------------------------------- /modules/nf-core/antismash/antismashdownloaddatabases/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process ANTISMASH_ANTISMASHDOWNLOADDATABASES" 4 | script "../main.nf" 5 | process "ANTISMASH_ANTISMASHDOWNLOADDATABASES" 6 | config './nextflow.config' 7 | 8 | tag "modules" 9 | tag "modules_nfcore" 10 | tag "antismash" 11 | tag "antismash/antismashdownloaddatabases" 12 | 13 | test("antismash/downloaddatabases") { 14 | 15 | when { 16 | process { 17 | """ 18 | """ 19 | } 20 | } 21 | 22 | then { 23 | assertAll( 24 | { assert process.success }, 25 | { assert snapshot( 26 | file(process.out.database.get(0)).list().sort(), 27 | path(process.out.versions[0]).yaml, 28 | file(process.out.versions[0]).name, 29 | ).match() 30 | } 31 | ) 32 | } 33 | } 34 | 35 | test("antismash/downloaddatabases - stub") { 36 | 37 | options "-stub" 38 | 39 | when { 40 | process { 41 | """ 42 | """ 43 | } 44 | } 45 | 46 | then { 47 | assertAll( 48 | { assert process.success }, 49 | { assert snapshot( 50 | file(process.out.database.get(0)).list().sort(), 51 | file(process.out.versions[0]).name, 52 | ).match() 53 | } 54 | ) 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /modules/nf-core/antismash/antismashdownloaddatabases/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "antismash/downloaddatabases - stub": { 3 | "content": [ 4 | [ 5 | "as-js", 6 | "clusterblast", 7 | "clustercompare", 8 | "comparippson", 9 | "knownclusterblast", 10 | "mite", 11 | "nrps_pks", 12 | "pfam", 13 | "resfam", 14 | "tigrfam" 15 | ], 16 | "versions.yml" 17 | ], 18 | "meta": { 19 | "nf-test": "0.9.2", 20 | "nextflow": "25.04.2" 21 | }, 22 | "timestamp": "2025-05-22T07:52:56.373189968" 23 | }, 24 | "antismash/downloaddatabases": { 25 | "content": [ 26 | [ 27 | "as-js", 28 | "clusterblast", 29 | "clustercompare", 30 | "comparippson", 31 | "knownclusterblast", 32 | "mite", 33 | "nrps_pks", 34 | "pfam", 35 | "resfam", 36 | "tigrfam" 37 | ], 38 | { 39 | "ANTISMASH_ANTISMASHDOWNLOADDATABASES": { 40 | "antismash": "8.0.1" 41 | } 42 | }, 43 | "versions.yml" 44 | ], 45 | "meta": { 46 | "nf-test": "0.9.2", 47 | "nextflow": "25.04.2" 48 | }, 49 | "timestamp": "2025-06-03T08:25:39.61984576" 50 | } 51 | } -------------------------------------------------------------------------------- /modules/nf-core/antismash/antismashdownloaddatabases/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: ANTISMASH_ANTISMASHDOWNLOADDATABASES { 3 | memory = 7.GB 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/argnorm/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::argnorm=0.5.0 8 | -------------------------------------------------------------------------------- /modules/nf-core/argnorm/meta.yml: -------------------------------------------------------------------------------- 1 | name: "argnorm" 2 | description: Normalize antibiotic resistance genes (ARGs) using the ARO ontology (developed 3 | by CARD). 4 | keywords: 5 | - amr 6 | - antimicrobial resistance 7 | - arg 8 | - antimicrobial resistance genes 9 | - genomics 10 | - metagenomics 11 | - normalization 12 | - drug categorization 13 | tools: 14 | - "argnorm": 15 | description: "Normalize antibiotic resistance genes (ARGs) using the ARO ontology 16 | (developed by CARD)." 17 | homepage: "https://argnorm.readthedocs.io/en/latest/" 18 | documentation: "https://argnorm.readthedocs.io/en/latest/" 19 | tool_dev_url: "https://github.com/BigDataBiology/argNorm" 20 | licence: ["MIT"] 21 | identifier: biotools:argnorm 22 | 23 | input: 24 | - - meta: 25 | type: map 26 | description: | 27 | Groovy Map containing sample information 28 | e.g. `[ id:'sample1', single_end:false ]` 29 | - input_tsv: 30 | type: file 31 | description: ARG annotation output 32 | pattern: "*.tsv" 33 | - - tool: 34 | type: string 35 | description: ARG annotation tool used 36 | pattern: "argsoap|abricate|deeparg|resfinder|amrfinderplus" 37 | - - db: 38 | type: string 39 | description: Database used for ARG annotation 40 | pattern: "sarg|ncbi|resfinder|deeparg|megares|argannot|resfinderfg" 41 | output: 42 | - tsv: 43 | - meta: 44 | type: map 45 | description: | 46 | Groovy Map containing sample information 47 | e.g. `[ id:'sample1', single_end:false ]` 48 | - "*.tsv": 49 | type: file 50 | description: Normalized argNorm output 51 | pattern: "*.tsv" 52 | - versions: 53 | - versions.yml: 54 | type: file 55 | description: File containing software versions 56 | pattern: "versions.yml" 57 | authors: 58 | - "@Vedanth-Ramji" 59 | maintainers: 60 | - "@Vedanth-Ramji" 61 | -------------------------------------------------------------------------------- /modules/nf-core/argnorm/tests/argnorm_hamronized.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: ARGNORM { 3 | ext.args = '--hamronized' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/argnorm/tests/argnorm_raw.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: ARGNORM { 3 | ext.args = '' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/argnorm/tests/tags.yml: -------------------------------------------------------------------------------- 1 | argnorm: 2 | - "modules/nf-core/argnorm/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/bakta/bakta/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::bakta=1.10.4 8 | -------------------------------------------------------------------------------- /modules/nf-core/bakta/bakta/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: 'BAKTA_BAKTADBDOWNLOAD' { 4 | ext.args = "--type light" 5 | } 6 | 7 | withName: 'BAKTA_BAKTA' { 8 | memory = 7.GB 9 | } 10 | 11 | } 12 | -------------------------------------------------------------------------------- /modules/nf-core/bakta/bakta/tests/tags.yml: -------------------------------------------------------------------------------- 1 | bakta/bakta: 2 | - "modules/nf-core/bakta/bakta/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/bakta/baktadbdownload/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::bakta=1.10.4 8 | -------------------------------------------------------------------------------- /modules/nf-core/bakta/baktadbdownload/main.nf: -------------------------------------------------------------------------------- 1 | process BAKTA_BAKTADBDOWNLOAD { 2 | label 'process_single' 3 | 4 | conda "${moduleDir}/environment.yml" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/bakta:1.10.4--pyhdfd78af_0' : 7 | 'biocontainers/bakta:1.10.4--pyhdfd78af_0' }" 8 | 9 | output: 10 | path "db*" , emit: db 11 | path "versions.yml" , emit: versions 12 | 13 | when: 14 | task.ext.when == null || task.ext.when 15 | 16 | script: 17 | def args = task.ext.args ?: '' 18 | """ 19 | bakta_db \\ 20 | download \\ 21 | $args 22 | 23 | cat <<-END_VERSIONS > versions.yml 24 | "${task.process}": 25 | bakta: \$(echo \$(bakta_db --version) 2>&1 | cut -f '2' -d ' ') 26 | END_VERSIONS 27 | """ 28 | 29 | stub: 30 | def args = task.ext.args ?: '' 31 | """ 32 | echo "bakta_db \\ 33 | download \\ 34 | $args" 35 | 36 | mkdir db 37 | 38 | cat <<-END_VERSIONS > versions.yml 39 | "${task.process}": 40 | bakta: \$(echo \$(bakta_db --version) 2>&1 | cut -f '2' -d ' ') 41 | END_VERSIONS 42 | """ 43 | } 44 | -------------------------------------------------------------------------------- /modules/nf-core/bakta/baktadbdownload/meta.yml: -------------------------------------------------------------------------------- 1 | name: "bakta_baktadbdownload" 2 | description: Downloads BAKTA database from Zenodo 3 | keywords: 4 | - bakta 5 | - annotation 6 | - fasta 7 | - bacteria 8 | - database 9 | - download 10 | tools: 11 | - bakta: 12 | description: Rapid & standardized annotation of bacterial genomes, MAGs & plasmids 13 | homepage: https://github.com/oschwengers/bakta 14 | documentation: https://github.com/oschwengers/bakta 15 | tool_dev_url: https://github.com/oschwengers/bakta 16 | doi: "10.1099/mgen.0.000685" 17 | licence: ["GPL v3"] 18 | identifier: biotools:bakta 19 | output: 20 | - db: 21 | - db*: 22 | type: directory 23 | description: BAKTA database directory 24 | pattern: "db*/" 25 | - versions: 26 | - versions.yml: 27 | type: file 28 | description: File containing software versions 29 | pattern: "versions.yml" 30 | authors: 31 | - "@jfy133" 32 | - "@jasmezz" 33 | maintainers: 34 | - "@jfy133" 35 | - "@jasmezz" 36 | -------------------------------------------------------------------------------- /modules/nf-core/bakta/baktadbdownload/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process BAKTA_BAKTADBDOWNLOAD" 4 | script "../main.nf" 5 | process "BAKTA_BAKTADBDOWNLOAD" 6 | config "./nextflow.config" 7 | 8 | tag "modules" 9 | tag "modules_nfcore" 10 | tag "bakta" 11 | tag "bakta/baktadbdownload" 12 | 13 | test("Bakta database download") { 14 | 15 | when { 16 | process { 17 | """ 18 | """ 19 | } 20 | } 21 | 22 | then { 23 | assertAll( 24 | { assert process.success }, 25 | { assert path(process.out.db.get(0)).exists() }, 26 | { assert snapshot(process.out.versions).match() } 27 | ) 28 | } 29 | 30 | } 31 | 32 | test("Bakta database download - stub") { 33 | 34 | options "-stub" 35 | 36 | when { 37 | process { 38 | """ 39 | """ 40 | } 41 | } 42 | 43 | then { 44 | assertAll( 45 | { assert process.success }, 46 | { assert snapshot( 47 | process.out.db + 48 | process.out.versions 49 | ).match() } 50 | ) 51 | } 52 | 53 | } 54 | 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/bakta/baktadbdownload/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Bakta database download": { 3 | "content": [ 4 | [ 5 | "versions.yml:md5,29d6ec77dc88492b2c53141e6541c289" 6 | ] 7 | ], 8 | "meta": { 9 | "nf-test": "0.9.2", 10 | "nextflow": "24.10.4" 11 | }, 12 | "timestamp": "2025-01-25T12:30:51.853371" 13 | }, 14 | "Bakta database download - stub": { 15 | "content": [ 16 | [ 17 | [ 18 | 19 | ], 20 | "versions.yml:md5,29d6ec77dc88492b2c53141e6541c289" 21 | ] 22 | ], 23 | "meta": { 24 | "nf-test": "0.9.2", 25 | "nextflow": "24.10.4" 26 | }, 27 | "timestamp": "2025-01-25T12:31:08.390845" 28 | } 29 | } -------------------------------------------------------------------------------- /modules/nf-core/bakta/baktadbdownload/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: 'BAKTA_BAKTADBDOWNLOAD' { 4 | ext.args = "--type light" 5 | } 6 | 7 | } 8 | -------------------------------------------------------------------------------- /modules/nf-core/bakta/baktadbdownload/tests/tags.yml: -------------------------------------------------------------------------------- 1 | bakta/baktadbdownload: 2 | - "modules/nf-core/bakta/baktadbdownload/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/deeparg/downloaddata/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::deeparg=1.0.4 8 | -------------------------------------------------------------------------------- /modules/nf-core/deeparg/downloaddata/meta.yml: -------------------------------------------------------------------------------- 1 | name: deeparg_downloaddata 2 | description: A deep learning based approach to predict Antibiotic Resistance Genes 3 | (ARGs) from metagenomes 4 | keywords: 5 | - download 6 | - database 7 | - deeparg 8 | - antimicrobial resistance genes 9 | - deep learning 10 | - prediction 11 | tools: 12 | - deeparg: 13 | description: A deep learning based approach to predict Antibiotic Resistance Genes 14 | (ARGs) from metagenomes 15 | homepage: https://github.com/gaarangoa/deeparg 16 | documentation: https://github.com/gaarangoa/deeparg 17 | tool_dev_url: https://github.com/gaarangoa/deeparg 18 | doi: "10.1186/s40168-018-0401-z" 19 | licence: ["MIT"] 20 | identifier: "" 21 | # No input required for download module. 22 | output: 23 | - db: 24 | - db/: 25 | type: directory 26 | description: Directory containing database required for deepARG. 27 | pattern: "db/" 28 | - versions: 29 | - versions.yml: 30 | type: file 31 | description: File containing software versions 32 | pattern: "versions.yml" 33 | authors: 34 | - "@jfy133" 35 | maintainers: 36 | - "@jfy133" 37 | -------------------------------------------------------------------------------- /modules/nf-core/deeparg/downloaddata/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process DEEPARG_DOWNLOADDATA" 4 | script "../main.nf" 5 | process "DEEPARG_DOWNLOADDATA" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "deeparg" 10 | tag "deeparg/downloaddata" 11 | 12 | test("downloaddata") { 13 | 14 | 15 | when { 16 | process { 17 | """ 18 | // No input required 19 | """ 20 | } 21 | } 22 | 23 | then { 24 | assertAll( 25 | { assert process.success }, 26 | { assert snapshot ( 27 | file(process.out.db.get(0)).list().sort(), 28 | process.out.versions, 29 | ).match() } 30 | ) 31 | } 32 | 33 | } 34 | 35 | test("downloaddata - stub") { 36 | 37 | options "-stub" 38 | 39 | when { 40 | process { 41 | """ 42 | // No input required 43 | """ 44 | } 45 | } 46 | 47 | then { 48 | assertAll( 49 | { assert process.success }, 50 | { assert snapshot(process.out).match() } 51 | ) 52 | } 53 | 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /modules/nf-core/deeparg/downloaddata/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "downloaddata": { 3 | "content": [ 4 | [ 5 | "LICENSE:md5,f244898ceed024da6d64a1b97746edb1", 6 | "README.md:md5,6c0450350c2d52c0f9b5d81c3d22ea7b", 7 | "__MACOSX", 8 | "bin", 9 | "database", 10 | "deeparg", 11 | "deeparg.gz", 12 | "gg13", 13 | "model", 14 | "scripts" 15 | ], 16 | [ 17 | "versions.yml:md5,30e73617295a9f10ac7781bfe8ba617f" 18 | ] 19 | ], 20 | "meta": { 21 | "nf-test": "0.8.4", 22 | "nextflow": "23.10.1" 23 | }, 24 | "timestamp": "2024-02-07T16:40:43.022804921" 25 | }, 26 | "downloaddata - stub": { 27 | "content": [ 28 | { 29 | "0": [ 30 | [ 31 | 32 | ] 33 | ], 34 | "1": [ 35 | "versions.yml:md5,30e73617295a9f10ac7781bfe8ba617f" 36 | ], 37 | "db": [ 38 | [ 39 | 40 | ] 41 | ], 42 | "versions": [ 43 | "versions.yml:md5,30e73617295a9f10ac7781bfe8ba617f" 44 | ] 45 | } 46 | ], 47 | "meta": { 48 | "nf-test": "0.8.4", 49 | "nextflow": "23.10.1" 50 | }, 51 | "timestamp": "2024-02-07T16:40:47.261220647" 52 | } 53 | } -------------------------------------------------------------------------------- /modules/nf-core/deeparg/downloaddata/tests/tags.yml: -------------------------------------------------------------------------------- 1 | deeparg/downloaddata: 2 | - "modules/nf-core/deeparg/downloaddata/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/deeparg/predict/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::deeparg=1.0.4 8 | -------------------------------------------------------------------------------- /modules/nf-core/deeparg/predict/tests/tags.yml: -------------------------------------------------------------------------------- 1 | deeparg/predict: 2 | - "modules/nf-core/deeparg/predict/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/deepbgc/download/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::deepbgc=0.1.31 8 | -------------------------------------------------------------------------------- /modules/nf-core/deepbgc/download/main.nf: -------------------------------------------------------------------------------- 1 | process DEEPBGC_DOWNLOAD { 2 | label 'process_single' 3 | 4 | conda "${moduleDir}/environment.yml" 5 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 6 | ? 'https://depot.galaxyproject.org/singularity/deepbgc:0.1.31--pyhca03a8a_0' 7 | : 'biocontainers/deepbgc:0.1.31--pyhca03a8a_0'}" 8 | 9 | output: 10 | path "deepbgc_db/", emit: db 11 | path "versions.yml", emit: versions 12 | 13 | when: 14 | task.ext.when == null || task.ext.when 15 | 16 | script: 17 | def args = task.ext.args ?: '' 18 | 19 | """ 20 | export DEEPBGC_DOWNLOADS_DIR='./deepbgc_db' 21 | 22 | deepbgc \\ 23 | download 24 | 25 | cat <<-END_VERSIONS > versions.yml 26 | "${task.process}": 27 | deepbgc: \$(echo \$(deepbgc info 2>&1 /dev/null/ | grep 'version' | cut -d " " -f3) ) 28 | END_VERSIONS 29 | """ 30 | 31 | stub: 32 | """ 33 | mkdir -p deepbgc_db 34 | 35 | cat <<-END_VERSIONS > versions.yml 36 | "${task.process}": 37 | deepbgc: \$(echo \$(deepbgc info 2>&1 /dev/null/ | grep 'version' | cut -d " " -f3) ) 38 | END_VERSIONS 39 | """ 40 | } 41 | -------------------------------------------------------------------------------- /modules/nf-core/deepbgc/download/meta.yml: -------------------------------------------------------------------------------- 1 | name: "deepbgc_download" 2 | description: Database download module for DeepBGC which detects BGCs in bacterial 3 | and fungal genomes using deep learning. 4 | keywords: 5 | - database 6 | - download 7 | - BGC 8 | - biosynthetic gene cluster 9 | - deep learning 10 | - neural network 11 | - random forest 12 | - genomes 13 | - bacteria 14 | - fungi 15 | tools: 16 | - "deepbgc": 17 | description: "DeepBGC - Biosynthetic Gene Cluster detection and classification" 18 | homepage: "https://github.com/Merck/deepbgc" 19 | documentation: "https://github.com/Merck/deepbgc" 20 | tool_dev_url: "https://github.com/Merck/deepbgc" 21 | doi: "10.1093/nar/gkz654" 22 | licence: ["MIT"] 23 | identifier: biotools:DeepBGC 24 | output: 25 | - db: 26 | - deepbgc_db/: 27 | type: directory 28 | description: Directory containing the DeepBGC database 29 | pattern: "deepbgc_db/" 30 | - versions: 31 | - versions.yml: 32 | type: file 33 | description: File containing software versions 34 | pattern: "versions.yml" 35 | authors: 36 | - "@louperelo" 37 | maintainers: 38 | - "@louperelo" 39 | -------------------------------------------------------------------------------- /modules/nf-core/deepbgc/download/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process DEEPBGC_DOWNLOAD" 4 | script "../main.nf" 5 | process "DEEPBGC_DOWNLOAD" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "deepbgc" 10 | tag "deepbgc/download" 11 | 12 | test("deepbgc download db") { 13 | 14 | when { 15 | process { 16 | """ 17 | """ 18 | } 19 | } 20 | 21 | then { 22 | assertAll( 23 | { assert process.success }, 24 | { assert snapshot(process.out).match() } 25 | ) 26 | } 27 | } 28 | 29 | test("deepbgc download db - stub") { 30 | 31 | options "-stub" 32 | 33 | when { 34 | process { 35 | """ 36 | """ 37 | } 38 | } 39 | 40 | then { 41 | assertAll( 42 | { assert process.success }, 43 | { assert snapshot(process.out).match() } 44 | ) 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /modules/nf-core/deepbgc/download/tests/tags.yml: -------------------------------------------------------------------------------- 1 | deepbgc/download: 2 | - "modules/nf-core/deepbgc/download/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/deepbgc/pipeline/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::deepbgc=0.1.31 8 | -------------------------------------------------------------------------------- /modules/nf-core/deepbgc/pipeline/tests/tags.yml: -------------------------------------------------------------------------------- 1 | deepbgc/pipeline: 2 | - "modules/nf-core/deepbgc/pipeline/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/fargene/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::fargene=0.1 8 | -------------------------------------------------------------------------------- /modules/nf-core/fargene/tests/tags.yml: -------------------------------------------------------------------------------- 1 | fargene: 2 | - "modules/nf-core/fargene/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/gecco/run/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::gecco=0.9.10 8 | -------------------------------------------------------------------------------- /modules/nf-core/gecco/run/main.nf: -------------------------------------------------------------------------------- 1 | process GECCO_RUN { 2 | tag "$meta.id" 3 | label 'process_low' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/gecco:0.9.10--pyhdfd78af_0': 8 | 'biocontainers/gecco:0.9.10--pyhdfd78af_0' }" 9 | 10 | input: 11 | tuple val(meta), path(input), path(hmm) 12 | path model_dir 13 | 14 | output: 15 | tuple val(meta), path("*.genes.tsv") , optional: true, emit: genes 16 | tuple val(meta), path("*.features.tsv") , emit: features 17 | tuple val(meta), path("*.clusters.tsv") , optional: true, emit: clusters 18 | tuple val(meta), path("*_cluster_*.gbk"), optional: true, emit: gbk 19 | tuple val(meta), path("*.json") , optional: true, emit: json 20 | 21 | path "versions.yml" , emit: versions 22 | 23 | when: 24 | task.ext.when == null || task.ext.when 25 | 26 | script: 27 | def args = task.ext.args ?: '' 28 | def prefix = task.ext.prefix ?: "${meta.id}" 29 | def custom_model = model_dir ? "--model ${model_dir}" : "" 30 | def custom_hmm = hmm ? "--hmm ${hmm}" : "" 31 | """ 32 | gecco \\ 33 | run \\ 34 | $args \\ 35 | -j $task.cpus \\ 36 | -o ./ \\ 37 | -g ${input} \\ 38 | $custom_model \\ 39 | $custom_hmm 40 | 41 | for i in \$(find -name '${input.baseName}*' -type f); do 42 | mv \$i \${i/${input.baseName}/${prefix}}; 43 | done 44 | 45 | 46 | cat <<-END_VERSIONS > versions.yml 47 | "${task.process}": 48 | gecco: \$(echo \$(gecco --version) | cut -f 2 -d ' ' ) 49 | END_VERSIONS 50 | """ 51 | 52 | stub: 53 | def prefix = task.ext.prefix ?: "${meta.id}" 54 | """ 55 | touch ${prefix}.genes.tsv 56 | touch ${prefix}.features.tsv 57 | touch ${prefix}.clusters.tsv 58 | touch NC_018507.1_cluster_1.gbk 59 | 60 | cat <<-END_VERSIONS > versions.yml 61 | "${task.process}": 62 | gecco: \$(echo \$(gecco --version) | cut -f 2 -d ' ' ) 63 | END_VERSIONS 64 | """ 65 | } 66 | -------------------------------------------------------------------------------- /modules/nf-core/gecco/run/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process GECCO_RUN" 4 | script "../main.nf" 5 | process "GECCO_RUN" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "gecco" 10 | tag "gecco/run" 11 | 12 | test("gecco/run - candidatus_portiera_aleyrodidarum - genome_fasta") { 13 | 14 | when { 15 | process { 16 | """ 17 | input[0] = [ 18 | [ id:'test', single_end:false ], // meta map 19 | file(params.modules_testdata_base_path + 'genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.fasta', checkIfExists: true), 20 | [] 21 | ] 22 | input[1] = [] 23 | """ 24 | } 25 | } 26 | 27 | then { 28 | assertAll( 29 | { assert process.success }, 30 | { assert snapshot( 31 | process.out.genes + 32 | process.out.features + 33 | process.out.clusters + 34 | process.out.versions 35 | ).match() }, 36 | { assert path(process.out.gbk.get(0).get(1)).text.contains("MVKNDIDILILGGGCTGLSLAYYLSFLPNTVRIFLIENKFIYNND") } 37 | ) 38 | } 39 | 40 | } 41 | 42 | test("gecco/run - candidatus_portiera_aleyrodidarum - genome_fasta - stub") { 43 | 44 | options "-stub" 45 | 46 | when { 47 | process { 48 | """ 49 | input[0] = [ 50 | [ id:'test', single_end:false ], // meta map 51 | file(params.modules_testdata_base_path + 'genomics/prokaryotes/candidatus_portiera_aleyrodidarum/genome/genome.fasta', checkIfExists: true), 52 | [] 53 | ] 54 | input[1] = [] 55 | """ 56 | } 57 | } 58 | 59 | then { 60 | assertAll( 61 | { assert process.success }, 62 | { assert snapshot(process.out).match() } 63 | ) 64 | } 65 | 66 | } 67 | 68 | } 69 | -------------------------------------------------------------------------------- /modules/nf-core/gecco/run/tests/tags.yml: -------------------------------------------------------------------------------- 1 | gecco/run: 2 | - "modules/nf-core/gecco/run/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - conda-forge::coreutils=9.5 8 | - conda-forge::grep=3.11 9 | - conda-forge::gzip=1.13 10 | - conda-forge::lbzip2=2.5 11 | - conda-forge::sed=4.8 12 | - conda-forge::tar=1.34 13 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/main.nf: -------------------------------------------------------------------------------- 1 | process GUNZIP { 2 | tag "${archive}" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 7 | ? 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/52/52ccce28d2ab928ab862e25aae26314d69c8e38bd41ca9431c67ef05221348aa/data' 8 | : 'community.wave.seqera.io/library/coreutils_grep_gzip_lbzip2_pruned:838ba80435a629f8'}" 9 | 10 | input: 11 | tuple val(meta), path(archive) 12 | 13 | output: 14 | tuple val(meta), path("${gunzip}"), emit: gunzip 15 | path "versions.yml", emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def extension = (archive.toString() - '.gz').tokenize('.')[-1] 23 | def name = archive.toString() - '.gz' - ".${extension}" 24 | def prefix = task.ext.prefix ?: name 25 | gunzip = prefix + ".${extension}" 26 | """ 27 | # Not calling gunzip itself because it creates files 28 | # with the original group ownership rather than the 29 | # default one for that user / the work directory 30 | gzip \\ 31 | -cd \\ 32 | ${args} \\ 33 | ${archive} \\ 34 | > ${gunzip} 35 | 36 | cat <<-END_VERSIONS > versions.yml 37 | "${task.process}": 38 | gunzip: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') 39 | END_VERSIONS 40 | """ 41 | 42 | stub: 43 | def args = task.ext.args ?: '' 44 | def extension = (archive.toString() - '.gz').tokenize('.')[-1] 45 | def name = archive.toString() - '.gz' - ".${extension}" 46 | def prefix = task.ext.prefix ?: name 47 | gunzip = prefix + ".${extension}" 48 | """ 49 | touch ${gunzip} 50 | cat <<-END_VERSIONS > versions.yml 51 | "${task.process}": 52 | gunzip: \$(echo \$(gunzip --version 2>&1) | sed 's/^.*(gzip) //; s/ Copyright.*\$//') 53 | END_VERSIONS 54 | """ 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/meta.yml: -------------------------------------------------------------------------------- 1 | name: gunzip 2 | description: Compresses and decompresses files. 3 | keywords: 4 | - gunzip 5 | - compression 6 | - decompression 7 | tools: 8 | - gunzip: 9 | description: | 10 | gzip is a file format and a software application used for file compression and decompression. 11 | documentation: https://www.gnu.org/software/gzip/manual/gzip.html 12 | licence: ["GPL-3.0-or-later"] 13 | identifier: "" 14 | input: 15 | - - meta: 16 | type: map 17 | description: | 18 | Optional groovy Map containing meta information 19 | e.g. [ id:'test', single_end:false ] 20 | - archive: 21 | type: file 22 | description: File to be compressed/uncompressed 23 | pattern: "*.*" 24 | output: 25 | - gunzip: 26 | - meta: 27 | type: file 28 | description: Compressed/uncompressed file 29 | pattern: "*.*" 30 | - ${gunzip}: 31 | type: file 32 | description: Compressed/uncompressed file 33 | pattern: "*.*" 34 | - versions: 35 | - versions.yml: 36 | type: file 37 | description: File containing software versions 38 | pattern: "versions.yml" 39 | authors: 40 | - "@joseespinosa" 41 | - "@drpatelh" 42 | - "@jfy133" 43 | maintainers: 44 | - "@joseespinosa" 45 | - "@drpatelh" 46 | - "@jfy133" 47 | - "@gallvp" 48 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: GUNZIP { 3 | ext.prefix = { "${meta.id}.xyz" } 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/gunzip/tests/tags.yml: -------------------------------------------------------------------------------- 1 | gunzip: 2 | - modules/nf-core/gunzip/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/abricate/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::hamronization=1.1.9 8 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/abricate/main.nf: -------------------------------------------------------------------------------- 1 | process HAMRONIZATION_ABRICATE { 2 | tag "${meta.id}" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 7 | ? 'https://depot.galaxyproject.org/singularity/hamronization:1.1.9--pyhdfd78af_0' 8 | : 'biocontainers/hamronization:1.1.9--pyhdfd78af_0'}" 9 | 10 | input: 11 | tuple val(meta), path(report) 12 | val format 13 | val software_version 14 | val reference_db_version 15 | 16 | output: 17 | tuple val(meta), path("*.json"), optional: true, emit: json 18 | tuple val(meta), path("*.tsv"), optional: true, emit: tsv 19 | path "versions.yml", emit: versions 20 | 21 | when: 22 | task.ext.when == null || task.ext.when 23 | 24 | script: 25 | def args = task.ext.args ?: '' 26 | def prefix = task.ext.prefix ?: "${meta.id}" 27 | """ 28 | hamronize \\ 29 | abricate \\ 30 | ${report} \\ 31 | ${args} \\ 32 | --format ${format} \\ 33 | --analysis_software_version ${software_version} \\ 34 | --reference_database_version ${reference_db_version} \\ 35 | > ${prefix}.${format} 36 | 37 | cat <<-END_VERSIONS > versions.yml 38 | "${task.process}": 39 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 40 | END_VERSIONS 41 | """ 42 | 43 | stub: 44 | def prefix = task.ext.prefix ?: "${meta.id}" 45 | """ 46 | touch ${prefix}.${format} 47 | 48 | cat <<-END_VERSIONS > versions.yml 49 | "${task.process}": 50 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 51 | END_VERSIONS 52 | """ 53 | } 54 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/abricate/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process HAMRONIZATION_ABRICATE" 4 | script "../main.nf" 5 | process "HAMRONIZATION_ABRICATE" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "hamronization" 10 | tag "hamronization/abricate" 11 | 12 | test("hamronization/abricate - bacteroides_fragilis - genome_abricate_tsv") { 13 | 14 | when { 15 | process { 16 | """ 17 | input[0] = [ [ id:"test" ], file(params.modules_testdata_base_path + 'genomics/prokaryotes/bacteroides_fragilis/hamronization/genome.abricate.tsv', checkIfExists: true) ] 18 | input[1] = 'tsv' 19 | input[2] = '1.0.1' 20 | input[3] = '2021-Mar-27' 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | assertAll( 27 | { assert process.success }, 28 | { assert snapshot(process.out).match() } 29 | ) 30 | } 31 | } 32 | 33 | test("hamronization/abricate - bacteroides_fragilis - genome_abricate_tsv - stub") { 34 | 35 | options "-stub" 36 | 37 | when { 38 | process { 39 | """ 40 | input[0] = [ [ id:"test" ], file(params.modules_testdata_base_path + 'genomics/prokaryotes/bacteroides_fragilis/hamronization/genome.abricate.tsv', checkIfExists: true) ] 41 | input[1] = 'tsv' 42 | input[2] = '1.0.1' 43 | input[3] = '2021-Mar-27' 44 | """ 45 | } 46 | } 47 | 48 | then { 49 | assertAll( 50 | { assert process.success }, 51 | { assert snapshot(process.out).match() } 52 | ) 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/amrfinderplus/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::hamronization=1.1.9 8 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/amrfinderplus/main.nf: -------------------------------------------------------------------------------- 1 | process HAMRONIZATION_AMRFINDERPLUS { 2 | tag "${meta.id}" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 7 | ? 'https://depot.galaxyproject.org/singularity/hamronization:1.1.9--pyhdfd78af_0' 8 | : 'biocontainers/hamronization:1.1.9--pyhdfd78af_0'}" 9 | 10 | input: 11 | tuple val(meta), path(report) 12 | val format 13 | val software_version 14 | val reference_db_version 15 | 16 | output: 17 | tuple val(meta), path("*.json"), optional: true, emit: json 18 | tuple val(meta), path("*.tsv"), optional: true, emit: tsv 19 | path "versions.yml", emit: versions 20 | 21 | when: 22 | task.ext.when == null || task.ext.when 23 | 24 | script: 25 | def args = task.ext.args ?: '' 26 | def prefix = task.ext.prefix ?: "${meta.id}" 27 | """ 28 | hamronize \\ 29 | amrfinderplus \\ 30 | ${report} \\ 31 | ${args} \\ 32 | --format ${format} \\ 33 | --analysis_software_version ${software_version} \\ 34 | --reference_database_version ${reference_db_version} \\ 35 | --input_file_name ${prefix} \\ 36 | > ${prefix}.${format} 37 | 38 | cat <<-END_VERSIONS > versions.yml 39 | "${task.process}": 40 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 41 | END_VERSIONS 42 | """ 43 | 44 | stub: 45 | def prefix = task.ext.prefix ?: "${meta.id}" 46 | """ 47 | touch ${prefix}.${format} 48 | 49 | cat <<-END_VERSIONS > versions.yml 50 | "${task.process}": 51 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 52 | END_VERSIONS 53 | """ 54 | } 55 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/deeparg/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::hamronization=1.1.9 8 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/deeparg/main.nf: -------------------------------------------------------------------------------- 1 | process HAMRONIZATION_DEEPARG { 2 | tag "${meta.id}" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 7 | ? 'https://depot.galaxyproject.org/singularity/hamronization:1.1.9--pyhdfd78af_0' 8 | : 'biocontainers/hamronization:1.1.9--pyhdfd78af_0'}" 9 | 10 | input: 11 | tuple val(meta), path(report) 12 | val format 13 | val software_version 14 | val reference_db_version 15 | 16 | output: 17 | tuple val(meta), path("*.json"), optional: true, emit: json 18 | tuple val(meta), path("*.tsv"), optional: true, emit: tsv 19 | path "versions.yml", emit: versions 20 | 21 | when: 22 | task.ext.when == null || task.ext.when 23 | 24 | script: 25 | def args = task.ext.args ?: '' 26 | def prefix = task.ext.prefix ?: "${meta.id}" 27 | """ 28 | hamronize \\ 29 | deeparg \\ 30 | ${report} \\ 31 | ${args} \\ 32 | --format ${format} \\ 33 | --analysis_software_version ${software_version} \\ 34 | --reference_database_version ${reference_db_version} \\ 35 | --input_file_name ${prefix} \\ 36 | > ${prefix}.${format} 37 | 38 | 39 | cat <<-END_VERSIONS > versions.yml 40 | "${task.process}": 41 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 42 | END_VERSIONS 43 | """ 44 | 45 | stub: 46 | def prefix = task.ext.prefix ?: "${meta.id}" 47 | """ 48 | touch ${prefix}.${format} 49 | 50 | cat <<-END_VERSIONS > versions.yml 51 | "${task.process}": 52 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 53 | END_VERSIONS 54 | """ 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/fargene/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::hamronization=1.1.9 8 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/fargene/main.nf: -------------------------------------------------------------------------------- 1 | process HAMRONIZATION_FARGENE { 2 | tag "${meta.id}" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 7 | ? 'https://depot.galaxyproject.org/singularity/hamronization:1.1.9--pyhdfd78af_0' 8 | : 'biocontainers/hamronization:1.1.9--pyhdfd78af_0'}" 9 | 10 | input: 11 | tuple val(meta), path(report) 12 | val format 13 | val software_version 14 | val reference_db_version 15 | 16 | output: 17 | tuple val(meta), path("*.json"), optional: true, emit: json 18 | tuple val(meta), path("*.tsv"), optional: true, emit: tsv 19 | path "versions.yml", emit: versions 20 | 21 | when: 22 | task.ext.when == null || task.ext.when 23 | 24 | script: 25 | def args = task.ext.args ?: '' 26 | def prefix = task.ext.prefix ?: "${meta.id}" 27 | """ 28 | hamronize \\ 29 | fargene \\ 30 | ${report} \\ 31 | ${args} \\ 32 | --format ${format} \\ 33 | --analysis_software_version ${software_version} \\ 34 | --reference_database_version ${reference_db_version} \\ 35 | --input_file_name ${prefix} \\ 36 | > ${prefix}.${format} 37 | 38 | cat <<-END_VERSIONS > versions.yml 39 | "${task.process}": 40 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 41 | END_VERSIONS 42 | """ 43 | 44 | stub: 45 | def prefix = task.ext.prefix ?: "${meta.id}" 46 | """ 47 | echo "stub" > ${prefix}.${format} 48 | 49 | cat <<-END_VERSIONS > versions.yml 50 | "${task.process}": 51 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 52 | END_VERSIONS 53 | """ 54 | } 55 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/fargene/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "versions": { 3 | "content": [ 4 | [ 5 | "versions.yml:md5,47191a10b21a94d1bc74adc90004da28" 6 | ] 7 | ], 8 | "meta": { 9 | "nf-test": "0.9.2", 10 | "nextflow": "25.04.7" 11 | }, 12 | "timestamp": "2025-10-04T06:45:22.733458727" 13 | }, 14 | "hamronization/fargene - stub": { 15 | "content": [ 16 | { 17 | "0": [ 18 | 19 | ], 20 | "1": [ 21 | [ 22 | { 23 | "id": "test" 24 | }, 25 | "test.tsv:md5,f50b84b1db4b83ba62ec1deacc69c260" 26 | ] 27 | ], 28 | "2": [ 29 | "versions.yml:md5,47191a10b21a94d1bc74adc90004da28" 30 | ], 31 | "json": [ 32 | 33 | ], 34 | "tsv": [ 35 | [ 36 | { 37 | "id": "test" 38 | }, 39 | "test.tsv:md5,f50b84b1db4b83ba62ec1deacc69c260" 40 | ] 41 | ], 42 | "versions": [ 43 | "versions.yml:md5,47191a10b21a94d1bc74adc90004da28" 44 | ] 45 | } 46 | ], 47 | "meta": { 48 | "nf-test": "0.9.2", 49 | "nextflow": "25.04.7" 50 | }, 51 | "timestamp": "2025-10-04T06:45:29.001707963" 52 | } 53 | } -------------------------------------------------------------------------------- /modules/nf-core/hamronization/rgi/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::hamronization=1.1.9 8 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/rgi/main.nf: -------------------------------------------------------------------------------- 1 | process HAMRONIZATION_RGI { 2 | tag "${meta.id}" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 7 | ? 'https://depot.galaxyproject.org/singularity/hamronization:1.1.9--pyhdfd78af_0' 8 | : 'biocontainers/hamronization:1.1.9--pyhdfd78af_0'}" 9 | 10 | input: 11 | tuple val(meta), path(report) 12 | val format 13 | val software_version 14 | val reference_db_version 15 | 16 | output: 17 | tuple val(meta), path("*.json"), optional: true, emit: json 18 | tuple val(meta), path("*.tsv"), optional: true, emit: tsv 19 | path "versions.yml", emit: versions 20 | 21 | when: 22 | task.ext.when == null || task.ext.when 23 | 24 | script: 25 | def args = task.ext.args ?: '' 26 | def prefix = task.ext.prefix ?: "${meta.id}" 27 | """ 28 | hamronize \\ 29 | rgi \\ 30 | ${report} \\ 31 | ${args} \\ 32 | --format ${format} \\ 33 | --analysis_software_version ${software_version} \\ 34 | --reference_database_version ${reference_db_version} \\ 35 | --input_file_name ${prefix} \\ 36 | > ${prefix}.${format} 37 | 38 | cat <<-END_VERSIONS > versions.yml 39 | "${task.process}": 40 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 41 | END_VERSIONS 42 | """ 43 | 44 | stub: 45 | def prefix = task.ext.prefix ?: "${meta.id}" 46 | """ 47 | touch ${prefix}.${format} 48 | 49 | cat <<-END_VERSIONS > versions.yml 50 | "${task.process}": 51 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 52 | END_VERSIONS 53 | """ 54 | } 55 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/summarize/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::hamronization=1.1.9 8 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/summarize/main.nf: -------------------------------------------------------------------------------- 1 | process HAMRONIZATION_SUMMARIZE { 2 | label 'process_single' 3 | 4 | conda "${moduleDir}/environment.yml" 5 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 6 | ? 'https://depot.galaxyproject.org/singularity/hamronization:1.1.9--pyhdfd78af_0' 7 | : 'biocontainers/hamronization:1.1.9--pyhdfd78af_0'}" 8 | 9 | input: 10 | path reports 11 | val format 12 | 13 | output: 14 | path ("hamronization_combined_report.json"), optional: true, emit: json 15 | path ("hamronization_combined_report.tsv"), optional: true, emit: tsv 16 | path ("hamronization_combined_report.html"), optional: true, emit: html 17 | path "versions.yml", emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | def outformat = format == 'interactive' ? 'html' : format 25 | """ 26 | hamronize \\ 27 | summarize \\ 28 | ${reports.join(' ')} \\ 29 | -t ${format} \\ 30 | ${args} \\ 31 | -o hamronization_combined_report.${outformat} 32 | 33 | cat <<-END_VERSIONS > versions.yml 34 | "${task.process}": 35 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 36 | END_VERSIONS 37 | """ 38 | 39 | stub: 40 | def outformat = format == 'interactive' ? 'html' : format 41 | """ 42 | touch hamronization_combined_report.${outformat} 43 | 44 | cat <<-END_VERSIONS > versions.yml 45 | "${task.process}": 46 | hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) 47 | END_VERSIONS 48 | """ 49 | } 50 | -------------------------------------------------------------------------------- /modules/nf-core/hamronization/summarize/meta.yml: -------------------------------------------------------------------------------- 1 | name: hamronization_summarize 2 | description: Tool to summarize and combine all hAMRonization reports into a single 3 | file 4 | keywords: 5 | - amr 6 | - antimicrobial resistance 7 | - reporting 8 | tools: 9 | - hamronization: 10 | description: Tool to convert and summarize AMR gene detection outputs using the 11 | hAMRonization specification 12 | homepage: https://github.com/pha4ge/hAMRonization/ 13 | documentation: https://github.com/pha4ge/hAMRonization/ 14 | tool_dev_url: https://github.com/pha4ge/hAMRonization 15 | licence: ["GNU Lesser General Public v3 (LGPL v3)"] 16 | identifier: biotools:hamronization 17 | input: 18 | - reports: 19 | type: file 20 | description: List of multiple hAMRonization reports in either JSON or TSV format 21 | pattern: "*.{json,tsv}" 22 | ontologies: 23 | - edam: http://edamontology.org/format_3464 # JSON 24 | - edam: http://edamontology.org/format_3475 # TSV 25 | - format: 26 | type: string 27 | description: Type of final combined report file to be produced 28 | pattern: "tsv|json|interactive" 29 | output: 30 | json: 31 | - hamronization_combined_report.json: 32 | type: file 33 | description: hAMRonised summary in JSON format 34 | pattern: "*.json" 35 | ontologies: 36 | - edam: http://edamontology.org/format_3464 # JSON 37 | tsv: 38 | - hamronization_combined_report.tsv: 39 | type: file 40 | description: hAMRonised summary in TSV format 41 | pattern: "*.json" 42 | ontologies: 43 | - edam: http://edamontology.org/format_3464 # JSON 44 | html: 45 | - hamronization_combined_report.html: 46 | type: file 47 | description: hAMRonised summary in HTML format 48 | pattern: "*.html" 49 | ontologies: [] 50 | versions: 51 | - versions.yml: 52 | type: file 53 | description: File containing software versions 54 | pattern: "versions.yml" 55 | ontologies: 56 | - edam: http://edamontology.org/format_3750 # YAML 57 | authors: 58 | - "@jfy133" 59 | maintainers: 60 | - "@jfy133" 61 | -------------------------------------------------------------------------------- /modules/nf-core/hmmer/hmmsearch/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::hmmer=3.4 8 | -------------------------------------------------------------------------------- /modules/nf-core/hmmer/hmmsearch/tests/tags.yml: -------------------------------------------------------------------------------- 1 | hmmer/hmmsearch: 2 | - "modules/nf-core/hmmer/hmmsearch/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/interproscan/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::interproscan=5.59_91.0 8 | -------------------------------------------------------------------------------- /modules/nf-core/interproscan/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: INTERPROSCAN { 3 | ext.args = '-appl Coils' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/interproscan/tests/tags.yml: -------------------------------------------------------------------------------- 1 | interproscan: 2 | - modules/nf-core/interproscan/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/macrel/contigs/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::macrel=1.4.0 8 | -------------------------------------------------------------------------------- /modules/nf-core/macrel/contigs/main.nf: -------------------------------------------------------------------------------- 1 | process MACREL_CONTIGS { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/macrel:1.4.0--pyh7e72e81_0': 8 | 'biocontainers/macrel:1.4.0--pyh7e72e81_0' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta) 12 | 13 | output: 14 | tuple val(meta), path("*/*.smorfs.faa.gz") , emit: smorfs 15 | tuple val(meta), path("*/*.all_orfs.faa.gz") , emit: all_orfs 16 | tuple val(meta), path("*/*.prediction.gz") , emit: amp_prediction 17 | tuple val(meta), path("*/*.md") , emit: readme_file 18 | tuple val(meta), path("*/*_log.txt") , emit: log_file 19 | path "versions.yml" , emit: versions 20 | 21 | when: 22 | task.ext.when == null || task.ext.when 23 | 24 | script: 25 | def args = task.ext.args ?: '' 26 | def prefix = task.ext.prefix ?: "${meta.id}" 27 | """ 28 | macrel contigs \\ 29 | $args \\ 30 | --fasta $fasta \\ 31 | --output ${prefix}/ \\ 32 | --tag ${prefix} \\ 33 | --log-file ${prefix}/${prefix}_log.txt \\ 34 | --threads $task.cpus 35 | 36 | gzip --no-name ${prefix}/*.faa 37 | 38 | cat <<-END_VERSIONS > versions.yml 39 | "${task.process}": 40 | macrel: \$(echo \$(macrel --version | sed 's/macrel //g')) 41 | END_VERSIONS 42 | """ 43 | 44 | stub: 45 | def prefix = task.ext.prefix ?: "${meta.id}" 46 | """ 47 | mkdir ${prefix} 48 | 49 | touch ${prefix}/${prefix}_log.txt 50 | echo | gzip > ${prefix}/${prefix}.smorfs.faa.gz 51 | echo | gzip > ${prefix}/${prefix}.all_orfs.faa.gz 52 | echo | gzip > ${prefix}/${prefix}.prediction.gz 53 | touch ${prefix}/${prefix}.md 54 | 55 | 56 | cat <<-END_VERSIONS > versions.yml 57 | "${task.process}": 58 | macrel: \$(echo \$(macrel --version | sed 's/macrel //g')) 59 | END_VERSIONS 60 | """ 61 | } 62 | -------------------------------------------------------------------------------- /modules/nf-core/macrel/contigs/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | 2 | nextflow_process { 3 | 4 | name "Test Process MACREL_CONTIGS" 5 | script "../main.nf" 6 | process "MACREL_CONTIGS" 7 | 8 | tag "modules" 9 | tag "modules_nfcore" 10 | tag "macrel" 11 | tag "macrel/contigs" 12 | 13 | test("test-macrel-contigs") { 14 | 15 | when { 16 | process { 17 | """ 18 | input[0] = [ 19 | [ id:'test', single_end:false ], // meta map 20 | file(params.modules_testdata_base_path + 'genomics/prokaryotes/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz', checkIfExists: true) 21 | ] 22 | 23 | """ 24 | } 25 | } 26 | 27 | then { 28 | assertAll( 29 | { assert process.success }, 30 | { assert snapshot( 31 | process.out.smorfs, 32 | path(process.out.all_orfs[0][1]).linesGzip[0], 33 | process.out.amp_prediction, 34 | process.out.readme_file, 35 | file(process.out.log_file[0][1]).name, 36 | process.out.versions 37 | ).match() 38 | } 39 | ) 40 | } 41 | } 42 | 43 | test("test-macrel-contigs-stub") { 44 | options '-stub' 45 | 46 | when { 47 | process { 48 | """ 49 | input[0] = [ 50 | [ id:'test', single_end:false ], // meta map 51 | file(params.modules_testdata_base_path + 'genomics/prokaryotes/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz', checkIfExists: true) 52 | ] 53 | 54 | """ 55 | } 56 | } 57 | 58 | then { 59 | assertAll( 60 | { assert process.success }, 61 | { assert snapshot(process.out).match() } 62 | ) 63 | } 64 | } 65 | 66 | } 67 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/createdb/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::mmseqs2=17.b804f 8 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/createdb/main.nf: -------------------------------------------------------------------------------- 1 | process MMSEQS_CREATEDB { 2 | tag "$meta.id" 3 | label 'process_low' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/mmseqs2:17.b804f--hd6d6fdc_1': 8 | 'biocontainers/mmseqs2:17.b804f--hd6d6fdc_1' }" 9 | 10 | input: 11 | tuple val(meta), path(sequence) 12 | 13 | output: 14 | tuple val(meta), path("${prefix}/"), emit: db 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | prefix = task.ext.prefix ?: "${meta.id}" 23 | def is_compressed = sequence.getExtension() == "gz" ? true : false 24 | def sequence_name = is_compressed ? sequence.getBaseName() : sequence 25 | """ 26 | if [ "${is_compressed}" == "true" ]; then 27 | gzip -c -d ${sequence} > ${sequence_name} 28 | fi 29 | 30 | mkdir -p ${prefix} 31 | 32 | mmseqs \\ 33 | createdb \\ 34 | ${sequence_name} \\ 35 | ${prefix}/${prefix} \\ 36 | $args 37 | 38 | cat <<-END_VERSIONS > versions.yml 39 | "${task.process}": 40 | mmseqs: \$(mmseqs | grep 'Version' | sed 's/MMseqs2 Version: //') 41 | END_VERSIONS 42 | """ 43 | 44 | stub: 45 | def args = task.ext.args ?: '' 46 | prefix = task.ext.prefix ?: "${meta.id}" 47 | """ 48 | mkdir -p ${prefix} 49 | 50 | touch ${prefix}/${prefix} 51 | touch ${prefix}/${prefix}.dbtype 52 | touch ${prefix}/${prefix}.index 53 | touch ${prefix}/${prefix}.lookup 54 | touch ${prefix}/${prefix}.source 55 | touch ${prefix}/${prefix}_h 56 | touch ${prefix}/${prefix}_h.dbtype 57 | touch ${prefix}/${prefix}_h.index 58 | 59 | cat <<-END_VERSIONS > versions.yml 60 | "${task.process}": 61 | mmseqs: \$(mmseqs | grep 'Version' | sed 's/MMseqs2 Version: //') 62 | END_VERSIONS 63 | """ 64 | } 65 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/createdb/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json 2 | name: "mmseqs_createdb" 3 | description: Create an MMseqs database from an existing FASTA/Q file 4 | keywords: 5 | - protein sequence 6 | - databases 7 | - clustering 8 | - searching 9 | - indexing 10 | - mmseqs2 11 | tools: 12 | - "mmseqs": 13 | description: "MMseqs2: ultra fast and sensitive sequence search and clustering 14 | suite" 15 | homepage: "https://github.com/soedinglab/MMseqs2" 16 | documentation: "https://mmseqs.com/latest/userguide.pdf" 17 | tool_dev_url: "https://github.com/soedinglab/MMseqs2" 18 | doi: "10.1093/bioinformatics/btw006" 19 | licence: ["GPL v3"] 20 | identifier: biotools:mmseqs 21 | input: 22 | - - meta: 23 | type: map 24 | description: | 25 | Groovy Map containing sample information 26 | e.g. `[ id:'test', single_end:false ]` 27 | - sequence: 28 | type: file 29 | description: Input sequences in FASTA/Q (zipped or unzipped) format to parse 30 | into an mmseqs database 31 | pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz,fastq,fastq.gz,fq,fq.gz}" 32 | output: 33 | - db: 34 | - meta: 35 | type: map 36 | description: | 37 | Groovy Map containing sample information 38 | e.g. `[ id:'test', single_end:false ]` 39 | - ${prefix}/: 40 | type: directory 41 | description: The created MMseqs2 database 42 | - versions: 43 | - versions.yml: 44 | type: file 45 | description: File containing software versions 46 | pattern: "versions.yml" 47 | authors: 48 | - "@Joon-Klaps" 49 | maintainers: 50 | - "@Joon-Klaps" 51 | - "@vagkaratzas" 52 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/createdb/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process MMSEQS_CREATEDB" 4 | script "../main.nf" 5 | process "MMSEQS_CREATEDB" 6 | tag "modules" 7 | tag "modules_nfcore" 8 | tag "mmseqs" 9 | tag "mmseqs/createdb" 10 | 11 | test("Should build an mmseqs db from a contigs fasta file") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = [ 17 | [ id:'test', single_end:false ], // meta map 18 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fasta/contigs.fasta', checkIfExists: true) 19 | ] 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll( 26 | { assert process.success }, 27 | { assert snapshot( 28 | process.out.db, 29 | process.out.versions 30 | ).match() 31 | } 32 | ) 33 | } 34 | 35 | } 36 | 37 | test("Should build an mmseqs db from a zipped amino acid sequence file") { 38 | 39 | when { 40 | process { 41 | """ 42 | 43 | input[0] = [ 44 | [ id:'test' ], 45 | file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/proteome.fasta.gz', checkIfExists: true) 46 | ] 47 | """ 48 | } 49 | } 50 | 51 | then { 52 | assertAll( 53 | { assert process.success }, 54 | { assert snapshot( 55 | process.out.db, 56 | process.out.versions 57 | ).match() 58 | } 59 | ) 60 | } 61 | 62 | } 63 | 64 | } 65 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/createtsv/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::mmseqs2=17.b804f 8 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/createtsv/tests/cluster.nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: MMSEQS_CREATETSV { 4 | ext.args2 = '*_clu.dbtype' 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/createtsv/tests/taxonomy.nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | 3 | withName: MMSEQS_TAXONOMY { 4 | ext.args = '--search-type 2' 5 | } 6 | 7 | } 8 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/databases/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::mmseqs2=17.b804f 8 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/databases/main.nf: -------------------------------------------------------------------------------- 1 | process MMSEQS_DATABASES { 2 | tag "${database}" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/mmseqs2:17.b804f--hd6d6fdc_1': 8 | 'biocontainers/mmseqs2:17.b804f--hd6d6fdc_1' }" 9 | 10 | input: 11 | val database 12 | 13 | output: 14 | path "${prefix}/" , emit: database 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | prefix = task.ext.prefix ?: 'mmseqs_database' 23 | """ 24 | mkdir ${prefix}/ 25 | 26 | mmseqs databases \\ 27 | ${database} \\ 28 | ${prefix}/database \\ 29 | tmp/ \\ 30 | --threads ${task.cpus} \\ 31 | ${args} 32 | 33 | cat <<-END_VERSIONS > versions.yml 34 | "${task.process}": 35 | mmseqs: \$(mmseqs | grep 'Version' | sed 's/MMseqs2 Version: //') 36 | END_VERSIONS 37 | """ 38 | 39 | stub: 40 | prefix = task.ext.prefix ?: 'mmseqs_database' 41 | """ 42 | mkdir ${prefix}/ 43 | 44 | touch ${prefix}/database 45 | touch ${prefix}/database.dbtype 46 | touch ${prefix}/database_h 47 | touch ${prefix}/database_h.dbtype 48 | touch ${prefix}/database_h.index 49 | touch ${prefix}/database.index 50 | touch ${prefix}/database.lookup 51 | touch ${prefix}/database_mapping 52 | touch ${prefix}/database.source 53 | touch ${prefix}/database_taxonomy 54 | touch ${prefix}/database.version 55 | 56 | cat <<-END_VERSIONS > versions.yml 57 | "${task.process}": 58 | mmseqs: \$(mmseqs | grep 'Version' | sed 's/MMseqs2 Version: /') 59 | END_VERSIONS 60 | """ 61 | } 62 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/databases/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json 2 | name: "mmseqs_databases" 3 | description: Download an mmseqs-formatted database 4 | keywords: 5 | - database 6 | - indexing 7 | - clustering 8 | - searching 9 | tools: 10 | - "mmseqs": 11 | description: "MMseqs2: ultra fast and sensitive sequence search and clustering 12 | suite" 13 | homepage: "https://github.com/soedinglab/MMseqs2" 14 | documentation: "https://mmseqs.com/latest/userguide.pdf" 15 | tool_dev_url: "https://github.com/soedinglab/MMseqs2" 16 | doi: "10.1093/bioinformatics/btw006" 17 | licence: ["GPL v3"] 18 | identifier: biotools:mmseqs 19 | input: 20 | - - database: 21 | type: string 22 | description: Database available through the mmseqs2 databases interface - see 23 | https://github.com/soedinglab/MMseqs2/wiki#downloading-databases for details 24 | output: 25 | - database: 26 | - ${prefix}/: 27 | type: directory 28 | description: Directory containing processed mmseqs database 29 | - versions: 30 | - versions.yml: 31 | type: file 32 | description: File containing software versions 33 | pattern: "versions.yml" 34 | authors: 35 | - "@prototaxites" 36 | maintainers: 37 | - "@prototaxites" 38 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/databases/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | 2 | nextflow_process { 3 | 4 | name "Test Process MMSEQS_DATABASES" 5 | script "../main.nf" 6 | process "MMSEQS_DATABASES" 7 | 8 | tag "modules" 9 | tag "modules_nfcore" 10 | tag "mmseqs" 11 | tag "mmseqs/databases" 12 | 13 | test("test-mmseqs-databases") { 14 | 15 | when { 16 | process { 17 | """ 18 | input[0] = "SILVA" 19 | 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll( 26 | { assert process.success }, 27 | { assert snapshot( 28 | file(process.out.database[0]).listFiles().collect { it.name }.toSorted(), // unstable 29 | process.out.versions 30 | ).match() 31 | } 32 | ) 33 | } 34 | } 35 | 36 | test("test-mmseqs-databases-stub") { 37 | options '-stub' 38 | when { 39 | process { 40 | """ 41 | input[0] = "SILVA" 42 | 43 | """ 44 | } 45 | } 46 | 47 | then { 48 | assertAll( 49 | { assert process.success }, 50 | { assert snapshot(process.out).match() } 51 | ) 52 | } 53 | } 54 | 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/taxonomy/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::mmseqs2=17.b804f 8 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/taxonomy/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json 2 | name: "mmseqs_taxonomy" 3 | description: Computes the lowest common ancestor by identifying the query sequence 4 | homologs against the target database. 5 | keywords: 6 | - protein sequence 7 | - nucleotide sequence 8 | - databases 9 | - taxonomy 10 | - homologs 11 | - mmseqs2 12 | tools: 13 | - "mmseqs": 14 | description: "MMseqs2: ultra fast and sensitive sequence search and clustering 15 | suite" 16 | homepage: "https://github.com/soedinglab/MMseqs2" 17 | documentation: "https://mmseqs.com/latest/userguide.pdf" 18 | tool_dev_url: "https://github.com/soedinglab/MMseqs2" 19 | doi: "10.1093/bioinformatics/btw006" 20 | licence: ["GPL v3"] 21 | identifier: biotools:mmseqs 22 | input: 23 | - - meta: 24 | type: map 25 | description: | 26 | Groovy Map containing sample information 27 | e.g. `[ id:'test', single_end:false ]` 28 | - db_query: 29 | type: directory 30 | description: An MMseqs2 database with query data 31 | - - db_target: 32 | type: directory 33 | description: an MMseqs2 database with target data including the taxonomy classification 34 | output: 35 | - db_taxonomy: 36 | - meta: 37 | type: map 38 | description: | 39 | Groovy Map containing sample information 40 | e.g. `[ id:'test', single_end:false ]` 41 | - ${prefix}_taxonomy: 42 | type: directory 43 | description: An MMseqs2 database with target data including the taxonomy classification 44 | - versions: 45 | - versions.yml: 46 | type: file 47 | description: File containing software versions 48 | pattern: "versions.yml" 49 | authors: 50 | - "@darcy220606" 51 | maintainers: 52 | - "@darcy220606" 53 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/taxonomy/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: MMSEQS_TAXONOMY { 3 | ext.args = '--search-type 2' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/mmseqs/taxonomy/tests/tags.yml: -------------------------------------------------------------------------------- 1 | mmseqs/taxonomy: 2 | - "modules/nf-core/mmseqs/taxonomy/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::multiqc=1.29 8 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/main.nf: -------------------------------------------------------------------------------- 1 | process MULTIQC { 2 | label 'process_single' 3 | 4 | conda "${moduleDir}/environment.yml" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/multiqc:1.29--pyhdfd78af_0' : 7 | 'biocontainers/multiqc:1.29--pyhdfd78af_0' }" 8 | 9 | input: 10 | path multiqc_files, stageAs: "?/*" 11 | path(multiqc_config) 12 | path(extra_multiqc_config) 13 | path(multiqc_logo) 14 | path(replace_names) 15 | path(sample_names) 16 | 17 | output: 18 | path "*multiqc_report.html", emit: report 19 | path "*_data" , emit: data 20 | path "*_plots" , optional:true, emit: plots 21 | path "versions.yml" , emit: versions 22 | 23 | when: 24 | task.ext.when == null || task.ext.when 25 | 26 | script: 27 | def args = task.ext.args ?: '' 28 | def prefix = task.ext.prefix ? "--filename ${task.ext.prefix}.html" : '' 29 | def config = multiqc_config ? "--config $multiqc_config" : '' 30 | def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' 31 | def logo = multiqc_logo ? "--cl-config 'custom_logo: \"${multiqc_logo}\"'" : '' 32 | def replace = replace_names ? "--replace-names ${replace_names}" : '' 33 | def samples = sample_names ? "--sample-names ${sample_names}" : '' 34 | """ 35 | multiqc \\ 36 | --force \\ 37 | $args \\ 38 | $config \\ 39 | $prefix \\ 40 | $extra_config \\ 41 | $logo \\ 42 | $replace \\ 43 | $samples \\ 44 | . 45 | 46 | cat <<-END_VERSIONS > versions.yml 47 | "${task.process}": 48 | multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) 49 | END_VERSIONS 50 | """ 51 | 52 | stub: 53 | """ 54 | mkdir multiqc_data 55 | mkdir multiqc_plots 56 | touch multiqc_report.html 57 | 58 | cat <<-END_VERSIONS > versions.yml 59 | "${task.process}": 60 | multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) 61 | END_VERSIONS 62 | """ 63 | } 64 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "multiqc_versions_single": { 3 | "content": [ 4 | [ 5 | "versions.yml:md5,c1fe644a37468f6dae548d98bc72c2c1" 6 | ] 7 | ], 8 | "meta": { 9 | "nf-test": "0.9.2", 10 | "nextflow": "25.04.2" 11 | }, 12 | "timestamp": "2025-05-22T11:50:41.182332996" 13 | }, 14 | "multiqc_stub": { 15 | "content": [ 16 | [ 17 | "multiqc_report.html", 18 | "multiqc_data", 19 | "multiqc_plots", 20 | "versions.yml:md5,c1fe644a37468f6dae548d98bc72c2c1" 21 | ] 22 | ], 23 | "meta": { 24 | "nf-test": "0.9.2", 25 | "nextflow": "25.04.2" 26 | }, 27 | "timestamp": "2025-05-22T11:51:22.448739369" 28 | }, 29 | "multiqc_versions_config": { 30 | "content": [ 31 | [ 32 | "versions.yml:md5,c1fe644a37468f6dae548d98bc72c2c1" 33 | ] 34 | ], 35 | "meta": { 36 | "nf-test": "0.9.2", 37 | "nextflow": "25.04.2" 38 | }, 39 | "timestamp": "2025-05-22T11:51:06.198928424" 40 | } 41 | } -------------------------------------------------------------------------------- /modules/nf-core/multiqc/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'MULTIQC' { 3 | ext.prefix = null 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/prodigal/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::prodigal=2.6.3 8 | - conda-forge::pigz=2.6 9 | -------------------------------------------------------------------------------- /modules/nf-core/prodigal/tests/tags.yml: -------------------------------------------------------------------------------- 1 | prodigal: 2 | - "modules/nf-core/prodigal/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/prokka/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::prokka=1.14.6 8 | - conda-forge::openjdk=8.0.412 9 | -------------------------------------------------------------------------------- /modules/nf-core/prokka/tests/tags.yml: -------------------------------------------------------------------------------- 1 | prokka: 2 | - "modules/nf-core/prokka/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/pyrodigal/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::pyrodigal=3.6.3 8 | - conda-forge::pigz=2.8 9 | -------------------------------------------------------------------------------- /modules/nf-core/pyrodigal/main.nf: -------------------------------------------------------------------------------- 1 | process PYRODIGAL { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/mulled-v2-2fe9a8ce513c91df34b43a6610df94c3a2eb3bd0:da1134ad604a59a6f439bdcc3f6df690eba47e9a-0': 8 | 'biocontainers/mulled-v2-2fe9a8ce513c91df34b43a6610df94c3a2eb3bd0:da1134ad604a59a6f439bdcc3f6df690eba47e9a-0' }" 9 | 10 | input: 11 | tuple val(meta), path(fasta) 12 | val(output_format) 13 | 14 | output: 15 | tuple val(meta), path("*.${output_format}.gz") , emit: annotations 16 | tuple val(meta), path("*.fna.gz") , emit: fna 17 | tuple val(meta), path("*.faa.gz") , emit: faa 18 | tuple val(meta), path("*.score.gz") , emit: score 19 | path "versions.yml" , emit: versions 20 | 21 | when: 22 | task.ext.when == null || task.ext.when 23 | 24 | script: 25 | def args = task.ext.args ?: '' 26 | def prefix = task.ext.prefix ?: "${meta.id}" 27 | """ 28 | pigz -cdf ${fasta} > pigz_fasta.fna 29 | 30 | pyrodigal \\ 31 | -j ${task.cpus} \\ 32 | $args \\ 33 | -i pigz_fasta.fna \\ 34 | -f $output_format \\ 35 | -o "${prefix}.${output_format}" \\ 36 | -d ${prefix}.fna \\ 37 | -a ${prefix}.faa \\ 38 | -s ${prefix}.score 39 | 40 | pigz -nmf ${prefix}* 41 | 42 | cat <<-END_VERSIONS > versions.yml 43 | "${task.process}": 44 | pyrodigal: \$(echo \$(pyrodigal --version 2>&1 | sed 's/pyrodigal v//')) 45 | END_VERSIONS 46 | """ 47 | stub: 48 | def args = task.ext.args ?: '' 49 | def prefix = task.ext.prefix ?: "${meta.id}" 50 | """ 51 | touch ${prefix}.${output_format}.gz 52 | touch ${prefix}.fna.gz 53 | touch ${prefix}.faa.gz 54 | touch ${prefix}.score.gz 55 | touch versions.yml 56 | 57 | cat <<-END_VERSIONS > versions.yml 58 | "${task.process}": 59 | pyrodigal: \$(echo \$(pyrodigal --version 2>&1 | sed 's/pyrodigal v//')) 60 | END_VERSIONS 61 | """ 62 | } 63 | -------------------------------------------------------------------------------- /modules/nf-core/pyrodigal/tests/tags.yml: -------------------------------------------------------------------------------- 1 | pyrodigal: 2 | - "modules/nf-core/pyrodigal/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/rgi/cardannotation/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::rgi=6.0.5 8 | -------------------------------------------------------------------------------- /modules/nf-core/rgi/cardannotation/main.nf: -------------------------------------------------------------------------------- 1 | process RGI_CARDANNOTATION { 2 | label 'process_medium' 3 | 4 | conda "${moduleDir}/environment.yml" 5 | container "${workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container 6 | ? 'https://depot.galaxyproject.org/singularity/rgi:6.0.5--pyh05cac1d_0' 7 | : 'biocontainers/rgi:6.0.5--pyh05cac1d_0'}" 8 | 9 | input: 10 | path card 11 | 12 | output: 13 | path ("card_database_processed"), emit: db 14 | env RGI_VERSION, emit: tool_version 15 | env DB_VERSION, emit: db_version 16 | path "versions.yml", emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | 24 | """ 25 | rgi card_annotation \\ 26 | -i ${card}/card.json \\ 27 | ${args} 28 | 29 | DB_VERSION=\$(ls card_database_*_all.fasta | sed "s/card_database_v\\([0-9].*[0-9]\\).*/\\1/") 30 | 31 | mkdir card_database_processed 32 | mv card*.fasta card_database_processed 33 | cp ${card}/* card_database_processed 34 | 35 | RGI_VERSION=\$(rgi main --version) 36 | 37 | cat <<-END_VERSIONS > versions.yml 38 | "${task.process}": 39 | rgi: \$(echo \$RGI_VERSION) 40 | rgi-database: \$(echo \$DB_VERSION) 41 | END_VERSIONS 42 | """ 43 | 44 | stub: 45 | """ 46 | touch card.fasta 47 | touch card_all.fasta 48 | 49 | mkdir card_database_processed 50 | mv card*.fasta card_database_processed 51 | 52 | RGI_VERSION=\$(rgi main --version) 53 | DB_VERSION=stub_version 54 | 55 | cat <<-END_VERSIONS > versions.yml 56 | "${task.process}": 57 | rgi: \$(echo \$RGI_VERSION) 58 | rgi-database: \$(echo \$DB_VERSION) 59 | END_VERSIONS 60 | """ 61 | } 62 | -------------------------------------------------------------------------------- /modules/nf-core/rgi/cardannotation/meta.yml: -------------------------------------------------------------------------------- 1 | name: rgi_cardannotation 2 | description: Preprocess the CARD database for RGI to predict antibiotic resistance 3 | from protein or nucleotide data 4 | keywords: 5 | - bacteria 6 | - fasta 7 | - antibiotic resistance 8 | tools: 9 | - rgi: 10 | description: This module preprocesses the downloaded Comprehensive Antibiotic 11 | Resistance Database (CARD) which can then be used as input for RGI. 12 | homepage: https://card.mcmaster.ca 13 | documentation: https://github.com/arpcard/rgi 14 | tool_dev_url: https://github.com/arpcard/rgi 15 | doi: "10.1093/nar/gkz935" 16 | licence: ["https://card.mcmaster.ca/about"] 17 | identifier: "" 18 | input: 19 | - card: 20 | type: directory 21 | description: Directory containing the CARD database 22 | pattern: "*/" 23 | output: 24 | db: 25 | - card_database_processed: 26 | type: directory 27 | description: Directory containing the processed CARD database files 28 | pattern: "*/" 29 | tool_version: 30 | - RGI_VERSION: 31 | type: string 32 | description: The version of the tool in string format (useful for downstream 33 | tools such as hAMRronization) 34 | db_version: 35 | - DB_VERSION: 36 | type: string 37 | description: The version of the used database in string format (useful for downstream 38 | tools such as hAMRronization) 39 | versions: 40 | - versions.yml: 41 | type: file 42 | description: File containing software versions 43 | pattern: "versions.yml" 44 | ontologies: 45 | - edam: http://edamontology.org/format_3750 # YAML 46 | authors: 47 | - "@rpetit3" 48 | - "@jfy133" 49 | - "@jasmezz" 50 | maintainers: 51 | - "@rpetit3" 52 | - "@jfy133" 53 | - "@jasmezz" 54 | -------------------------------------------------------------------------------- /modules/nf-core/rgi/cardannotation/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process RGI_CARDANNOTATION" 4 | script "../main.nf" 5 | process "RGI_CARDANNOTATION" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "rgi" 10 | tag "rgi/cardannotation" 11 | tag "untar" 12 | 13 | setup { 14 | run("UNTAR") { 15 | script "modules/nf-core/untar/main.nf" 16 | process { 17 | """ 18 | file('https://card.mcmaster.ca/latest/data', checkIfExists: true).copyTo('data.tar.gz') 19 | 20 | input[0] = [ 21 | [ ], 22 | file("data.tar.gz") 23 | ] 24 | """ 25 | } 26 | } 27 | } 28 | 29 | test("rgi/cardannotation") { 30 | 31 | when { 32 | process { 33 | """ 34 | input[0] = UNTAR.out.untar.map{ it[1] } 35 | """ 36 | } 37 | } 38 | 39 | then { 40 | assertAll( 41 | { assert process.success }, 42 | { assert snapshot(process.out).match() } 43 | ) 44 | } 45 | } 46 | 47 | test("rgi/cardannotation - stub") { 48 | 49 | options "-stub" 50 | 51 | when { 52 | process { 53 | """ 54 | input[0] = UNTAR.out.untar.map{ it[1] } 55 | """ 56 | } 57 | } 58 | 59 | then { 60 | assertAll( 61 | { assert process.success }, 62 | { assert snapshot(process.out).match() } 63 | ) 64 | } 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /modules/nf-core/rgi/main/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::rgi=6.0.5 8 | -------------------------------------------------------------------------------- /modules/nf-core/seqkit/seq/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - bioconda::seqkit=2.9.0 8 | -------------------------------------------------------------------------------- /modules/nf-core/seqkit/seq/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json 2 | name: "seqkit_seq" 3 | description: Transforms sequences (extract ID, filter by length, remove gaps, reverse 4 | complement...) 5 | keywords: 6 | - genomics 7 | - fasta 8 | - fastq 9 | - transform 10 | - filter 11 | - gaps 12 | - complement 13 | tools: 14 | - "seqkit": 15 | description: "A cross-platform and ultrafast toolkit for FASTA/Q file manipulation" 16 | homepage: "https://bioinf.shenwei.me/seqkit/" 17 | documentation: "https://bioinf.shenwei.me/seqkit/usage/" 18 | tool_dev_url: "https://github.com/shenwei356/seqkit" 19 | doi: "10.1371/journal.pone.0163962" 20 | licence: ["MIT"] 21 | identifier: biotools:seqkit 22 | input: 23 | - - meta: 24 | type: map 25 | description: | 26 | Groovy Map containing sample information 27 | e.g. `[ id:'sample1' ]` 28 | - fastx: 29 | type: file 30 | description: Input fasta/fastq file 31 | pattern: "*.{fsa,fas,fa,fasta,fastq,fq,fsa.gz,fas.gz,fa.gz,fasta.gz,fastq.gz,fq.gz}" 32 | output: 33 | - fastx: 34 | - meta: 35 | type: map 36 | description: | 37 | Groovy Map containing sample information 38 | e.g. `[ id:'sample1' ]` 39 | - ${prefix}.*: 40 | type: file 41 | description: Output fasta/fastq file 42 | pattern: "*.{fasta,fasta.gz,fastq,fastq.gz}" 43 | - versions: 44 | - versions.yml: 45 | type: file 46 | description: File containing software versions 47 | pattern: "versions.yml" 48 | authors: 49 | - "@GallVp" 50 | maintainers: 51 | - "@GallVp" 52 | -------------------------------------------------------------------------------- /modules/nf-core/seqkit/seq/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | ext.args2 = '-n' 3 | } 4 | -------------------------------------------------------------------------------- /modules/nf-core/seqkit/seq/tests/tags.yml: -------------------------------------------------------------------------------- 1 | seqkit/seq: 2 | - "modules/nf-core/seqkit/seq/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/tabix/bgzip/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | 7 | dependencies: 8 | - bioconda::htslib=1.20 9 | - bioconda::tabix=1.11 10 | -------------------------------------------------------------------------------- /modules/nf-core/tabix/bgzip/main.nf: -------------------------------------------------------------------------------- 1 | process TABIX_BGZIP { 2 | tag "$meta.id" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/htslib:1.20--h5efdd21_2' : 8 | 'biocontainers/htslib:1.20--h5efdd21_2' }" 9 | 10 | input: 11 | tuple val(meta), path(input) 12 | 13 | output: 14 | tuple val(meta), path("${output}") , emit: output 15 | tuple val(meta), path("${output}.gzi"), emit: gzi, optional: true 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | prefix = task.ext.prefix ?: "${meta.id}" 24 | in_bgzip = ["gz", "bgz", "bgzf"].contains(input.getExtension()) 25 | extension = in_bgzip ? input.getBaseName().tokenize(".")[-1] : input.getExtension() 26 | output = in_bgzip ? "${prefix}.${extension}" : "${prefix}.${extension}.gz" 27 | command = in_bgzip ? '-d' : '' 28 | // Name the index according to $prefix, unless a name has been requested 29 | if ((args.matches("(^| )-i\\b") || args.matches("(^| )--index(\$| )")) && !args.matches("(^| )-I\\b") && !args.matches("(^| )--index-name\\b")) { 30 | args = args + " -I ${output}.gzi" 31 | } 32 | """ 33 | bgzip $command -c $args -@${task.cpus} $input > ${output} 34 | 35 | cat <<-END_VERSIONS > versions.yml 36 | "${task.process}": 37 | tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') 38 | END_VERSIONS 39 | """ 40 | 41 | stub: 42 | prefix = task.ext.prefix ?: "${meta.id}" 43 | in_bgzip = ["gz", "bgz", "bgzf"].contains(input.getExtension()) 44 | output = in_bgzip ? input.getBaseName() : "${prefix}.${input.getExtension()}.gz" 45 | 46 | """ 47 | echo "" | gzip > ${output} 48 | touch ${output}.gzi 49 | 50 | cat <<-END_VERSIONS > versions.yml 51 | "${task.process}": 52 | tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') 53 | END_VERSIONS 54 | """ 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/tabix/bgzip/meta.yml: -------------------------------------------------------------------------------- 1 | name: tabix_bgzip 2 | description: Compresses/decompresses files 3 | keywords: 4 | - compress 5 | - decompress 6 | - bgzip 7 | - tabix 8 | tools: 9 | - bgzip: 10 | description: | 11 | Bgzip compresses or decompresses files in a similar manner to, and compatible with, gzip. 12 | homepage: https://www.htslib.org/doc/tabix.html 13 | documentation: http://www.htslib.org/doc/bgzip.html 14 | doi: 10.1093/bioinformatics/btp352 15 | licence: ["MIT"] 16 | identifier: biotools:tabix 17 | input: 18 | - - meta: 19 | type: map 20 | description: | 21 | Groovy Map containing sample information 22 | e.g. [ id:'test', single_end:false ] 23 | - input: 24 | type: file 25 | description: file to compress or to decompress 26 | output: 27 | - output: 28 | - meta: 29 | type: map 30 | description: | 31 | Groovy Map containing sample information 32 | e.g. [ id:'test', single_end:false ] 33 | - ${output}: 34 | type: file 35 | description: Output compressed/decompressed file 36 | pattern: "*." 37 | - gzi: 38 | - meta: 39 | type: map 40 | description: | 41 | Groovy Map containing sample information 42 | e.g. [ id:'test', single_end:false ] 43 | - ${output}.gzi: 44 | type: file 45 | description: Optional gzip index file for compressed inputs 46 | pattern: "*.gzi" 47 | - versions: 48 | - versions.yml: 49 | type: file 50 | description: File containing software versions 51 | pattern: "versions.yml" 52 | authors: 53 | - "@joseespinosa" 54 | - "@drpatelh" 55 | - "@maxulysse" 56 | - "@nvnieuwk" 57 | maintainers: 58 | - "@joseespinosa" 59 | - "@drpatelh" 60 | - "@maxulysse" 61 | - "@nvnieuwk" 62 | -------------------------------------------------------------------------------- /modules/nf-core/tabix/bgzip/tests/bgzip_compress.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: TABIX_BGZIP { 3 | ext.args = ' -i' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/tabix/bgzip/tests/tags.yml: -------------------------------------------------------------------------------- 1 | tabix/bgzip: 2 | - "modules/nf-core/tabix/bgzip/**" 3 | -------------------------------------------------------------------------------- /modules/nf-core/tabix/bgzip/tests/vcf_none.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: TABIX_BGZIP { 3 | ext.args = '' 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /modules/nf-core/untar/environment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | dependencies: 7 | - conda-forge::coreutils=9.5 8 | - conda-forge::grep=3.11 9 | - conda-forge::gzip=1.13 10 | - conda-forge::lbzip2=2.5 11 | - conda-forge::sed=4.8 12 | - conda-forge::tar=1.34 13 | -------------------------------------------------------------------------------- /modules/nf-core/untar/meta.yml: -------------------------------------------------------------------------------- 1 | name: untar 2 | description: Extract files. 3 | keywords: 4 | - untar 5 | - uncompress 6 | - extract 7 | tools: 8 | - untar: 9 | description: | 10 | Extract tar.gz files. 11 | documentation: https://www.gnu.org/software/tar/manual/ 12 | licence: ["GPL-3.0-or-later"] 13 | identifier: "" 14 | input: 15 | - - meta: 16 | type: map 17 | description: | 18 | Groovy Map containing sample information 19 | e.g. [ id:'test', single_end:false ] 20 | - archive: 21 | type: file 22 | description: File to be untar 23 | pattern: "*.{tar}.{gz}" 24 | output: 25 | - untar: 26 | - meta: 27 | type: map 28 | description: | 29 | Groovy Map containing sample information 30 | e.g. [ id:'test', single_end:false ] 31 | pattern: "*/" 32 | - ${prefix}: 33 | type: map 34 | description: | 35 | Groovy Map containing sample information 36 | e.g. [ id:'test', single_end:false ] 37 | pattern: "*/" 38 | - versions: 39 | - versions.yml: 40 | type: file 41 | description: File containing software versions 42 | pattern: "versions.yml" 43 | authors: 44 | - "@joseespinosa" 45 | - "@drpatelh" 46 | - "@matthdsm" 47 | - "@jfy133" 48 | maintainers: 49 | - "@joseespinosa" 50 | - "@drpatelh" 51 | - "@matthdsm" 52 | - "@jfy133" 53 | -------------------------------------------------------------------------------- /modules/nf-core/untar/tests/tags.yml: -------------------------------------------------------------------------------- 1 | untar: 2 | - modules/nf-core/untar/** 3 | -------------------------------------------------------------------------------- /nf-test.config: -------------------------------------------------------------------------------- 1 | config { 2 | // location for all nf-test tests 3 | testsDir "." 4 | 5 | // nf-test directory including temporary files for each test 6 | workDir System.getenv("NFT_WORKDIR") ?: ".nf-test" 7 | 8 | // location of an optional nextflow.config file specific for executing tests 9 | configFile "tests/nextflow.config" 10 | 11 | // ignore tests coming from the nf-core/modules repo 12 | ignore 'modules/nf-core/**/tests/*', 'subworkflows/nf-core/**/tests/*' 13 | 14 | // run all test with defined profile(s) from the main nextflow.config 15 | profile "test" 16 | 17 | // list of filenames or patterns that should be trigger a full test run 18 | triggers 'nextflow.config', 'nf-test.config', 'conf/test.config', 'tests/nextflow.config', 'tests/.nftignore' 19 | 20 | // load the necessary plugins 21 | plugins { 22 | load "nft-utils@0.0.3" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NEXTFLOW_PIPELINE" 3 | description: Subworkflow with functionality that may be useful for any Nextflow pipeline 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - version 9 | components: [] 10 | input: 11 | - print_version: 12 | type: boolean 13 | description: | 14 | Print the version of the pipeline and exit 15 | - dump_parameters: 16 | type: boolean 17 | description: | 18 | Dump the parameters of the pipeline to a JSON file 19 | - output_directory: 20 | type: directory 21 | description: Path to output dir to write JSON file to. 22 | pattern: "results/" 23 | - check_conda_channel: 24 | type: boolean 25 | description: | 26 | Check if the conda channel priority is correct. 27 | output: 28 | - dummy_emit: 29 | type: boolean 30 | description: | 31 | Dummy emit to make nf-core subworkflows lint happy 32 | authors: 33 | - "@adamrtalbot" 34 | - "@drpatelh" 35 | maintainers: 36 | - "@adamrtalbot" 37 | - "@drpatelh" 38 | - "@maxulysse" 39 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test: -------------------------------------------------------------------------------- 1 | 2 | nextflow_function { 3 | 4 | name "Test Functions" 5 | script "subworkflows/nf-core/utils_nextflow_pipeline/main.nf" 6 | config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" 7 | tag 'subworkflows' 8 | tag 'utils_nextflow_pipeline' 9 | tag 'subworkflows/utils_nextflow_pipeline' 10 | 11 | test("Test Function getWorkflowVersion") { 12 | 13 | function "getWorkflowVersion" 14 | 15 | then { 16 | assertAll( 17 | { assert function.success }, 18 | { assert snapshot(function.result).match() } 19 | ) 20 | } 21 | } 22 | 23 | test("Test Function dumpParametersToJSON") { 24 | 25 | function "dumpParametersToJSON" 26 | 27 | when { 28 | function { 29 | """ 30 | // define inputs of the function here. Example: 31 | input[0] = "$outputDir" 32 | """.stripIndent() 33 | } 34 | } 35 | 36 | then { 37 | assertAll( 38 | { assert function.success } 39 | ) 40 | } 41 | } 42 | 43 | test("Test Function checkCondaChannels") { 44 | 45 | function "checkCondaChannels" 46 | 47 | then { 48 | assertAll( 49 | { assert function.success }, 50 | { assert snapshot(function.result).match() } 51 | ) 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Test Function getWorkflowVersion": { 3 | "content": [ 4 | "v9.9.9" 5 | ], 6 | "meta": { 7 | "nf-test": "0.8.4", 8 | "nextflow": "23.10.1" 9 | }, 10 | "timestamp": "2024-02-28T12:02:05.308243" 11 | }, 12 | "Test Function checkCondaChannels": { 13 | "content": null, 14 | "meta": { 15 | "nf-test": "0.8.4", 16 | "nextflow": "23.10.1" 17 | }, 18 | "timestamp": "2024-02-28T12:02:12.425833" 19 | } 20 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | manifest { 2 | name = 'nextflow_workflow' 3 | author = """nf-core""" 4 | homePage = 'https://127.0.0.1' 5 | description = """Dummy pipeline""" 6 | nextflowVersion = '!>=23.04.0' 7 | version = '9.9.9' 8 | doi = 'https://doi.org/10.5281/zenodo.5070524' 9 | } 10 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/utils_nextflow_pipeline: 2 | - subworkflows/nf-core/utils_nextflow_pipeline/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NFCORE_PIPELINE" 3 | description: Subworkflow with utility functions specific to the nf-core pipeline template 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - version 9 | components: [] 10 | input: 11 | - nextflow_cli_args: 12 | type: list 13 | description: | 14 | Nextflow CLI positional arguments 15 | output: 16 | - success: 17 | type: boolean 18 | description: | 19 | Dummy output to indicate success 20 | authors: 21 | - "@adamrtalbot" 22 | maintainers: 23 | - "@adamrtalbot" 24 | - "@maxulysse" 25 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow UTILS_NFCORE_PIPELINE" 4 | script "../main.nf" 5 | config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" 6 | workflow "UTILS_NFCORE_PIPELINE" 7 | tag "subworkflows" 8 | tag "subworkflows_nfcore" 9 | tag "utils_nfcore_pipeline" 10 | tag "subworkflows/utils_nfcore_pipeline" 11 | 12 | test("Should run without failures") { 13 | 14 | when { 15 | workflow { 16 | """ 17 | input[0] = [] 18 | """ 19 | } 20 | } 21 | 22 | then { 23 | assertAll( 24 | { assert workflow.success }, 25 | { assert snapshot(workflow.out).match() } 26 | ) 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | true 7 | ], 8 | "valid_config": [ 9 | true 10 | ] 11 | } 12 | ], 13 | "meta": { 14 | "nf-test": "0.8.4", 15 | "nextflow": "23.10.1" 16 | }, 17 | "timestamp": "2024-02-28T12:03:25.726491" 18 | } 19 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | manifest { 2 | name = 'nextflow_workflow' 3 | author = """nf-core""" 4 | homePage = 'https://127.0.0.1' 5 | description = """Dummy pipeline""" 6 | nextflowVersion = '!>=23.04.0' 7 | version = '9.9.9' 8 | doi = 'https://doi.org/10.5281/zenodo.5070524' 9 | } 10 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/utils_nfcore_pipeline: 2 | - subworkflows/nf-core/utils_nfcore_pipeline/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfschema_plugin/main.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Subworkflow that uses the nf-schema plugin to validate parameters and render the parameter summary 3 | // 4 | 5 | include { paramsSummaryLog } from 'plugin/nf-schema' 6 | include { validateParameters } from 'plugin/nf-schema' 7 | 8 | workflow UTILS_NFSCHEMA_PLUGIN { 9 | 10 | take: 11 | input_workflow // workflow: the workflow object used by nf-schema to get metadata from the workflow 12 | validate_params // boolean: validate the parameters 13 | parameters_schema // string: path to the parameters JSON schema. 14 | // this has to be the same as the schema given to `validation.parametersSchema` 15 | // when this input is empty it will automatically use the configured schema or 16 | // "${projectDir}/nextflow_schema.json" as default. This input should not be empty 17 | // for meta pipelines 18 | 19 | main: 20 | 21 | // 22 | // Print parameter summary to stdout. This will display the parameters 23 | // that differ from the default given in the JSON schema 24 | // 25 | if(parameters_schema) { 26 | log.info paramsSummaryLog(input_workflow, parameters_schema:parameters_schema) 27 | } else { 28 | log.info paramsSummaryLog(input_workflow) 29 | } 30 | 31 | // 32 | // Validate the parameters using nextflow_schema.json or the schema 33 | // given via the validation.parametersSchema configuration option 34 | // 35 | if(validate_params) { 36 | if(parameters_schema) { 37 | validateParameters(parameters_schema:parameters_schema) 38 | } else { 39 | validateParameters() 40 | } 41 | } 42 | 43 | emit: 44 | dummy_emit = true 45 | } 46 | 47 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfschema_plugin/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "utils_nfschema_plugin" 3 | description: Run nf-schema to validate parameters and create a summary of changed parameters 4 | keywords: 5 | - validation 6 | - JSON schema 7 | - plugin 8 | - parameters 9 | - summary 10 | components: [] 11 | input: 12 | - input_workflow: 13 | type: object 14 | description: | 15 | The workflow object of the used pipeline. 16 | This object contains meta data used to create the params summary log 17 | - validate_params: 18 | type: boolean 19 | description: Validate the parameters and error if invalid. 20 | - parameters_schema: 21 | type: string 22 | description: | 23 | Path to the parameters JSON schema. 24 | This has to be the same as the schema given to the `validation.parametersSchema` config 25 | option. When this input is empty it will automatically use the configured schema or 26 | "${projectDir}/nextflow_schema.json" as default. The schema should not be given in this way 27 | for meta pipelines. 28 | output: 29 | - dummy_emit: 30 | type: boolean 31 | description: Dummy emit to make nf-core subworkflows lint happy 32 | authors: 33 | - "@nvnieuwk" 34 | maintainers: 35 | - "@nvnieuwk" 36 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | plugins { 2 | id "nf-schema@2.4.2" 3 | } 4 | 5 | validation { 6 | parametersSchema = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json" 7 | monochromeLogs = true 8 | } -------------------------------------------------------------------------------- /tests/.nftignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | multiqc/multiqc_data/BETA-multiqc.parquet 3 | multiqc/multiqc_data/multiqc.log 4 | multiqc/multiqc_data/multiqc_data.json 5 | multiqc/multiqc_data/multiqc_sources.txt 6 | multiqc/multiqc_data/multiqc_software_versions.txt 7 | multiqc/multiqc_plots/{svg,pdf,png}/*.{svg,pdf,png} 8 | multiqc/multiqc_report.html 9 | pipeline_info/*.{html,json,txt,yml} 10 | -------------------------------------------------------------------------------- /tests/nextflow.config: -------------------------------------------------------------------------------- 1 | /* 2 | ======================================================================================== 3 | Nextflow config file for running nf-test tests 4 | ======================================================================================== 5 | */ 6 | 7 | // Or any resources requirements 8 | params { 9 | modules_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/' 10 | pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/refs/heads/funcscan' 11 | } 12 | 13 | aws.client.anonymous = true // fixes S3 access issues on self-hosted runners 14 | -------------------------------------------------------------------------------- /tests/test_bgc_bakta.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "antismash_css": { 3 | "content": [ 4 | [ 5 | "bacteria.css:md5,e5b4d3ceaa91b03f6393d9b3d5f072e7" 6 | ] 7 | ], 8 | "meta": { 9 | "nf-test": "0.9.2", 10 | "nextflow": "25.04.6" 11 | }, 12 | "timestamp": "2025-07-08T14:33:24.89234599" 13 | }, 14 | "deepbgc_bgc_gbk": { 15 | "content": [ 16 | "sample_2.bgc.gbk:md5,d41d8cd98f00b204e9800998ecf8427e" 17 | ], 18 | "meta": { 19 | "nf-test": "0.9.0", 20 | "nextflow": "24.04.3" 21 | }, 22 | "timestamp": "2024-07-24T10:32:18.378687548" 23 | }, 24 | "gecco": { 25 | "content": [ 26 | "sample_2.genes.tsv:md5,66e3724c7e7da102bf58acd564211e8b", 27 | "sample_2.features.tsv:md5,2ef146213836ca80d3079776f17c7cb2" 28 | ], 29 | "meta": { 30 | "nf-test": "0.9.0", 31 | "nextflow": "24.04.3" 32 | }, 33 | "timestamp": "2024-07-24T10:32:18.404694725" 34 | } 35 | } -------------------------------------------------------------------------------- /tests/test_bgc_prokka.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "antismash_css": { 3 | "content": [ 4 | [ 5 | "bacteria.css:md5,e5b4d3ceaa91b03f6393d9b3d5f072e7" 6 | ] 7 | ], 8 | "meta": { 9 | "nf-test": "0.9.2", 10 | "nextflow": "25.04.6" 11 | }, 12 | "timestamp": "2025-07-08T14:19:44.456092051" 13 | }, 14 | "deepbgc_bgc_gbk": { 15 | "content": [ 16 | "sample_2.bgc.gbk:md5,d41d8cd98f00b204e9800998ecf8427e" 17 | ], 18 | "meta": { 19 | "nf-test": "0.9.0", 20 | "nextflow": "24.04.3" 21 | }, 22 | "timestamp": "2024-07-24T10:39:33.920624113" 23 | }, 24 | "gecco": { 25 | "content": [ 26 | "sample_2.genes.tsv:md5,050b82ca462430ecc0635acb2e297531", 27 | "sample_2.features.tsv:md5,79354868ee3de6fdc419195b8fa8edb6" 28 | ], 29 | "meta": { 30 | "nf-test": "0.9.0", 31 | "nextflow": "24.04.3" 32 | }, 33 | "timestamp": "2024-07-24T10:39:33.944935473" 34 | } 35 | } -------------------------------------------------------------------------------- /tests/test_bgc_pyrodigal.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "antismash_css": { 3 | "content": [ 4 | [ 5 | "bacteria.css:md5,e5b4d3ceaa91b03f6393d9b3d5f072e7" 6 | ] 7 | ], 8 | "meta": { 9 | "nf-test": "0.9.2", 10 | "nextflow": "25.04.6" 11 | }, 12 | "timestamp": "2025-07-08T14:06:24.158167102" 13 | }, 14 | "deepbgc_bgc_gbk": { 15 | "content": [ 16 | "sample_2.bgc.gbk:md5,d41d8cd98f00b204e9800998ecf8427e" 17 | ], 18 | "meta": { 19 | "nf-test": "0.9.0", 20 | "nextflow": "24.04.3" 21 | }, 22 | "timestamp": "2024-07-24T10:45:44.435766452" 23 | }, 24 | "gecco": { 25 | "content": [ 26 | "sample_2.genes.tsv:md5,66e3724c7e7da102bf58acd564211e8b", 27 | "sample_2.features.tsv:md5,2ef146213836ca80d3079776f17c7cb2" 28 | ], 29 | "meta": { 30 | "nf-test": "0.9.0", 31 | "nextflow": "24.04.3" 32 | }, 33 | "timestamp": "2024-07-24T10:45:25.732866237" 34 | } 35 | } -------------------------------------------------------------------------------- /tests/test_minimal.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_pipeline { 2 | 3 | name "Test pipeline: NFCORE_FUNCSCAN" 4 | script "main.nf" 5 | tag "pipeline" 6 | tag "nfcore_funcscan" 7 | tag "test_minimal" 8 | profile "test_minimal" 9 | 10 | test("-profile test_minimal") { 11 | 12 | when { 13 | params { 14 | outdir = "$outputDir" 15 | } 16 | } 17 | 18 | then { 19 | assertAll( 20 | { assert workflow.success }, 21 | { assert new File("$outputDir/pipeline_info/nf_core_funcscan_software_mqc_versions.yml").exists() }, 22 | { assert new File("$outputDir/multiqc/multiqc_report.html").exists() }, 23 | ) 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /tests/test_preannotated_bgc.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "antismash_css": { 3 | "content": [ 4 | [ 5 | "bacteria.css:md5,e5b4d3ceaa91b03f6393d9b3d5f072e7" 6 | ], 7 | [ 8 | "bacteria.css:md5,e5b4d3ceaa91b03f6393d9b3d5f072e7" 9 | ], 10 | [ 11 | "bacteria.css:md5,e5b4d3ceaa91b03f6393d9b3d5f072e7" 12 | ] 13 | ], 14 | "meta": { 15 | "nf-test": "0.9.2", 16 | "nextflow": "25.04.6" 17 | }, 18 | "timestamp": "2025-07-08T14:38:53.077348266" 19 | }, 20 | "deepbgc_bgc_gbk": { 21 | "content": [ 22 | "sample_1.bgc.gbk:md5,e50e429959e9c4bf0c4b97d9dcd54a08", 23 | "sample_2.bgc.gbk:md5,effe3cfc91772eb4e4b50ac46f13a941", 24 | "sample_3.bgc.gbk:md5,41920a93524a1bb32ae1003d69327642" 25 | ], 26 | "meta": { 27 | "nf-test": "0.9.0", 28 | "nextflow": "24.04.3" 29 | }, 30 | "timestamp": "2024-07-24T11:06:00.388012579" 31 | }, 32 | "gecco": { 33 | "content": [ 34 | "sample_1.genes.tsv:md5,804af8236a7148baf8919e3acf30947d", 35 | "sample_1.features.tsv:md5,a84d59fd63e2593dc5872b4f9bb268b2", 36 | "sample_2.genes.tsv:md5,5a2b20c5c1cd821a2af405229c4c0f78", 37 | "sample_2.features.tsv:md5,579a27490188f5bc47a4deb4d1d1b8dc", 38 | "sample_3.genes.tsv:md5,6874723404b3326f0f73e59f03b96837", 39 | "sample_3.features.tsv:md5,490f98655089b3c73f88b93347cca465" 40 | ], 41 | "meta": { 42 | "nf-test": "0.9.0", 43 | "nextflow": "24.04.3" 44 | }, 45 | "timestamp": "2024-07-24T10:49:00.44526019" 46 | } 47 | } -------------------------------------------------------------------------------- /tower.yml: -------------------------------------------------------------------------------- 1 | reports: 2 | multiqc_report.html: 3 | display: "MultiQC HTML report" 4 | samplesheet.csv: 5 | display: "Auto-created samplesheet with collated metadata and FASTQ paths" 6 | --------------------------------------------------------------------------------