├── .devcontainer └── devcontainer.json ├── .editorconfig ├── .gitattributes ├── .github ├── .dockstore.yml ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ └── feature_request.yml ├── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── awsfulltest.yml │ ├── awstest.yml │ ├── branch.yml │ ├── ci.yml │ ├── clean-up.yml │ ├── download_pipeline.yml │ ├── fix-linting.yml │ ├── linting.yml │ ├── linting_comment.yml │ └── release-announcements.yml ├── .gitignore ├── .gitpod.yml ├── .nf-core.yml ├── .pre-commit-config.yaml ├── .prettierignore ├── .prettierrc.yml ├── CHANGELOG.md ├── CITATIONS.md ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── assets ├── adaptivecard.json ├── email_template.html ├── email_template.txt ├── methods_description_template.yml ├── multiqc_config.yml ├── nf-core-seqinspector_logo_light.png ├── samplesheet.csv ├── schema_input.json ├── sendmail_template.txt └── slackreport.json ├── conf ├── base.config ├── igenomes.config ├── modules.config ├── test.config └── test_full.config ├── docs ├── README.md ├── images │ ├── mqc_fastqc_adapter.png │ ├── mqc_fastqc_counts.png │ ├── mqc_fastqc_quality.png │ ├── nf-core-seqinspector_logo_dark.png │ └── nf-core-seqinspector_logo_light.png ├── output.md └── usage.md ├── main.nf ├── modules.json ├── modules └── nf-core │ ├── fastqc │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── tags.yml │ └── multiqc │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ ├── main.nf.test │ ├── main.nf.test.snap │ └── tags.yml ├── nextflow.config ├── nextflow_schema.json ├── pyproject.toml ├── subworkflows ├── local │ └── utils_nfcore_seqinspector_pipeline │ │ └── main.nf └── nf-core │ ├── utils_nextflow_pipeline │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.function.nf.test │ │ ├── main.function.nf.test.snap │ │ ├── main.workflow.nf.test │ │ ├── nextflow.config │ │ └── tags.yml │ ├── utils_nfcore_pipeline │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.function.nf.test │ │ ├── main.function.nf.test.snap │ │ ├── main.workflow.nf.test │ │ ├── main.workflow.nf.test.snap │ │ ├── nextflow.config │ │ └── tags.yml │ └── utils_nfvalidation_plugin │ ├── main.nf │ ├── meta.yml │ └── tests │ ├── main.nf.test │ ├── nextflow_schema.json │ └── tags.yml ├── tower.yml └── workflows └── seqinspector.nf /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nfcore", 3 | "image": "nfcore/gitpod:latest", 4 | "remoteUser": "gitpod", 5 | "runArgs": ["--privileged"], 6 | 7 | // Configure tool-specific properties. 8 | "customizations": { 9 | // Configure properties specific to VS Code. 10 | "vscode": { 11 | // Set *default* container specific settings.json values on container create. 12 | "settings": { 13 | "python.defaultInterpreterPath": "/opt/conda/bin/python" 14 | }, 15 | 16 | // Add the IDs of extensions you want installed when the container is created. 17 | "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"] 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | end_of_line = lf 6 | insert_final_newline = true 7 | trim_trailing_whitespace = true 8 | indent_size = 4 9 | indent_style = space 10 | 11 | [*.{md,yml,yaml,html,css,scss,js}] 12 | indent_size = 2 13 | 14 | # These files are edited and tested upstream in nf-core/modules 15 | [/modules/nf-core/**] 16 | charset = unset 17 | end_of_line = unset 18 | insert_final_newline = unset 19 | trim_trailing_whitespace = unset 20 | indent_style = unset 21 | [/subworkflows/nf-core/**] 22 | charset = unset 23 | end_of_line = unset 24 | insert_final_newline = unset 25 | trim_trailing_whitespace = unset 26 | indent_style = unset 27 | 28 | [/assets/email*] 29 | indent_size = unset 30 | 31 | # ignore Readme 32 | [README.md] 33 | indent_style = unset 34 | 35 | # ignore python 36 | [*.{py,md}] 37 | indent_style = unset 38 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.config linguist-language=nextflow 2 | *.nf.test linguist-language=nextflow 3 | modules/nf-core/** linguist-generated 4 | subworkflows/nf-core/** linguist-generated 5 | -------------------------------------------------------------------------------- /.github/.dockstore.yml: -------------------------------------------------------------------------------- 1 | # Dockstore config version, not pipeline version 2 | version: 1.2 3 | workflows: 4 | - subclass: nfl 5 | primaryDescriptorPath: /nextflow.config 6 | publish: True 7 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # nf-core/seqinspector: Contributing Guidelines 2 | 3 | Hi there! 4 | Many thanks for taking an interest in improving nf-core/seqinspector. 5 | 6 | We try to manage the required tasks for nf-core/seqinspector using GitHub issues, you probably came to this page when creating one. 7 | Please use the pre-filled template to save time. 8 | 9 | However, don't be put off by this template - other more general issues and suggestions are welcome! 10 | Contributions to the code are even more welcome ;) 11 | 12 | > [!NOTE] 13 | > If you need help using or modifying nf-core/seqinspector then the best place to ask is on the nf-core Slack [#seqinspector](https://nfcore.slack.com/channels/seqinspector) channel ([join our Slack here](https://nf-co.re/join/slack)). 14 | 15 | ## Contribution workflow 16 | 17 | If you'd like to write some code for nf-core/seqinspector, the standard workflow is as follows: 18 | 19 | 1. Check that there isn't already an issue about your idea in the [nf-core/seqinspector issues](https://github.com/nf-core/seqinspector/issues) to avoid duplicating work. If there isn't one already, please create one so that others know you're working on this 20 | 2. [Fork](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) the [nf-core/seqinspector repository](https://github.com/nf-core/seqinspector) to your GitHub account 21 | 3. Make the necessary changes / additions within your forked repository following [Pipeline conventions](#pipeline-contribution-conventions) 22 | 4. Use `nf-core schema build` and add any new parameters to the pipeline JSON schema (requires [nf-core tools](https://github.com/nf-core/tools) >= 1.10). 23 | 5. Submit a Pull Request against the `dev` branch and wait for the code to be reviewed and merged 24 | 25 | If you're not used to this workflow with git, you can start with some [docs from GitHub](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests) or even their [excellent `git` resources](https://try.github.io/). 26 | 27 | ## Tests 28 | 29 | You have the option to test your changes locally by running the pipeline. For receiving warnings about process selectors and other `debug` information, it is recommended to use the debug profile. Execute all the tests with the following command: 30 | 31 | ```bash 32 | nf-test test --profile debug,test,docker --verbose 33 | ``` 34 | 35 | When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests. 36 | Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then. 37 | 38 | There are typically two types of tests that run: 39 | 40 | ### Lint tests 41 | 42 | `nf-core` has a [set of guidelines](https://nf-co.re/developers/guidelines) which all pipelines must adhere to. 43 | To enforce these and ensure that all pipelines stay in sync, we have developed a helper tool which runs checks on the pipeline code. This is in the [nf-core/tools repository](https://github.com/nf-core/tools) and once installed can be run locally with the `nf-core lint ` command. 44 | 45 | If any failures or warnings are encountered, please follow the listed URL for more documentation. 46 | 47 | ### Pipeline tests 48 | 49 | Each `nf-core` pipeline should be set up with a minimal set of test-data. 50 | `GitHub Actions` then runs the pipeline on this data to ensure that it exits successfully. 51 | If there are any failures then the automated tests fail. 52 | These tests are run both with the latest available version of `Nextflow` and also the minimum required version that is stated in the pipeline code. 53 | 54 | ## Patch 55 | 56 | :warning: Only in the unlikely and regretful event of a release happening with a bug. 57 | 58 | - On your own fork, make a new branch `patch` based on `upstream/master`. 59 | - Fix the bug, and bump version (X.Y.Z+1). 60 | - A PR should be made on `master` from patch to directly this particular bug. 61 | 62 | ## Getting help 63 | 64 | For further information/help, please consult the [nf-core/seqinspector documentation](https://nf-co.re/seqinspector/usage) and don't hesitate to get in touch on the nf-core Slack [#seqinspector](https://nfcore.slack.com/channels/seqinspector) channel ([join our Slack here](https://nf-co.re/join/slack)). 65 | 66 | ## Pipeline contribution conventions 67 | 68 | To make the nf-core/seqinspector code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written. 69 | 70 | ### Adding a new step 71 | 72 | If you wish to contribute a new step, please use the following coding standards: 73 | 74 | 1. Define the corresponding input channel into your new process from the expected previous process channel 75 | 2. Write the process block (see below). 76 | 3. Define the output channel if needed (see below). 77 | 4. Add any new parameters to `nextflow.config` with a default (see below). 78 | 5. Add any new parameters to `nextflow_schema.json` with help text (via the `nf-core schema build` tool). 79 | 6. Add sanity checks and validation for all relevant parameters. 80 | 7. Perform local tests to validate that the new code works as expected. 81 | 8. If applicable, add a new test command in `.github/workflow/ci.yml`. 82 | 9. Update MultiQC config `assets/multiqc_config.yml` so relevant suffixes, file name clean up and module plots are in the appropriate order. If applicable, add a [MultiQC](https://https://multiqc.info/) module. 83 | 10. Add a description of the output files and if relevant any appropriate images from the MultiQC report to `docs/output.md`. 84 | 85 | ### Default values 86 | 87 | Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope. 88 | 89 | Once there, use `nf-core schema build` to add to `nextflow_schema.json`. 90 | 91 | ### Default processes resource requirements 92 | 93 | Sensible defaults for process resource requirements (CPUs / memory / time) for a process should be defined in `conf/base.config`. These should generally be specified generic with `withLabel:` selectors so they can be shared across multiple processes/steps of the pipeline. A nf-core standard set of labels that should be followed where possible can be seen in the [nf-core pipeline template](https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config), which has the default process as a single core-process, and then different levels of multi-core configurations for increasingly large memory requirements defined with standardised labels. 94 | 95 | The process resources can be passed on to the tool dynamically within the process with the `${task.cpus}` and `${task.memory}` variables in the `script:` block. 96 | 97 | ### Naming schemes 98 | 99 | Please use the following naming schemes, to make it easy to understand what is going where. 100 | 101 | - initial process channel: `ch_output_from_` 102 | - intermediate and terminal channels: `ch__for_` 103 | 104 | ### Nextflow version bumping 105 | 106 | If you are using a new feature from core Nextflow, you may bump the minimum required version of nextflow in the pipeline with: `nf-core bump-version --nextflow . [min-nf-version]` 107 | 108 | ### Images and figures 109 | 110 | For overview images and other documents we follow the nf-core [style guidelines and examples](https://nf-co.re/developers/design_guidelines). 111 | 112 | ## GitHub Codespaces 113 | 114 | This repo includes a devcontainer configuration which will create a GitHub Codespaces for Nextflow development! This is an online developer environment that runs in your browser, complete with VSCode and a terminal. 115 | 116 | To get started: 117 | 118 | - Open the repo in [Codespaces](https://github.com/nf-core/seqinspector/codespaces) 119 | - Tools installed 120 | - nf-core 121 | - Nextflow 122 | 123 | Devcontainer specs: 124 | 125 | - [DevContainer config](.devcontainer/devcontainer.json) 126 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug report 2 | description: Report something that is broken or incorrect 3 | labels: bug 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Before you post this issue, please check the documentation: 9 | 10 | - [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) 11 | - [nf-core/seqinspector pipeline documentation](https://nf-co.re/seqinspector/usage) 12 | 13 | - type: textarea 14 | id: description 15 | attributes: 16 | label: Description of the bug 17 | description: A clear and concise description of what the bug is. 18 | validations: 19 | required: true 20 | 21 | - type: textarea 22 | id: command_used 23 | attributes: 24 | label: Command used and terminal output 25 | description: Steps to reproduce the behaviour. Please paste the command you used to launch the pipeline and the output from your terminal. 26 | render: console 27 | placeholder: | 28 | $ nextflow run ... 29 | 30 | Some output where something broke 31 | 32 | - type: textarea 33 | id: files 34 | attributes: 35 | label: Relevant files 36 | description: | 37 | Please drag and drop the relevant files here. Create a `.zip` archive if the extension is not allowed. 38 | Your verbose log file `.nextflow.log` is often useful _(this is a hidden file in the directory where you launched the pipeline)_ as well as custom Nextflow configuration files. 39 | 40 | - type: textarea 41 | id: system 42 | attributes: 43 | label: System information 44 | description: | 45 | * Nextflow version _(eg. 23.04.0)_ 46 | * Hardware _(eg. HPC, Desktop, Cloud)_ 47 | * Executor _(eg. slurm, local, awsbatch)_ 48 | * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter, Charliecloud, or Apptainer)_ 49 | * OS _(eg. CentOS Linux, macOS, Linux Mint)_ 50 | * Version of nf-core/seqinspector _(eg. 1.1, 1.5, 1.8.2)_ 51 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | contact_links: 2 | - name: Join nf-core 3 | url: https://nf-co.re/join 4 | about: Please join the nf-core community here 5 | - name: "Slack #seqinspector channel" 6 | url: https://nfcore.slack.com/channels/seqinspector 7 | about: Discussion about the nf-core/seqinspector pipeline 8 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature request 2 | description: Suggest an idea for the nf-core/seqinspector pipeline 3 | labels: enhancement 4 | body: 5 | - type: textarea 6 | id: description 7 | attributes: 8 | label: Description of feature 9 | description: Please describe your suggestion for a new feature. It might help to describe a problem or use case, plus any alternatives that you have considered. 10 | validations: 11 | required: true 12 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 13 | 14 | ## PR checklist 15 | 16 | - [ ] This comment contains a description of changes (with reason). 17 | - [ ] If you've fixed a bug or added code that should be tested, add tests! 18 | - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/seqinspector/tree/master/.github/CONTRIBUTING.md) 19 | - [ ] If necessary, also make a PR on the nf-core/seqinspector _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. 20 | - [ ] Make sure your code lints (`nf-core lint`). 21 | - [ ] Ensure the test suite passes (`nf-test test main.nf.test -profile test,docker`). 22 | - [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir `). 23 | - [ ] Usage Documentation in `docs/usage.md` is updated. 24 | - [ ] Output Documentation in `docs/output.md` is updated. 25 | - [ ] `CHANGELOG.md` is updated. 26 | - [ ] `README.md` is updated (including new tool citations and authors/contributors). 27 | -------------------------------------------------------------------------------- /.github/workflows/awsfulltest.yml: -------------------------------------------------------------------------------- 1 | name: nf-core AWS full size tests 2 | # This workflow is triggered on published releases. 3 | # It can be additionally triggered manually with GitHub actions workflow dispatch button. 4 | # It runs the -profile 'test_full' on AWS batch 5 | 6 | on: 7 | release: 8 | types: [published] 9 | workflow_dispatch: 10 | jobs: 11 | run-tower: 12 | name: Run AWS full tests 13 | if: github.repository == 'nf-core/seqinspector' 14 | runs-on: ubuntu-latest 15 | steps: 16 | - name: Launch workflow via tower 17 | uses: seqeralabs/action-tower-launch@v2 18 | # TODO nf-core: You can customise AWS full pipeline tests as required 19 | # Add full size test data (but still relatively small datasets for few samples) 20 | # on the `test_full.config` test runs with only one set of parameters 21 | with: 22 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 23 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 24 | compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} 25 | revision: ${{ github.sha }} 26 | workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/seqinspector/work-${{ github.sha }} 27 | parameters: | 28 | { 29 | "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", 30 | "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/seqinspector/results-${{ github.sha }}" 31 | } 32 | profiles: test_full 33 | 34 | - uses: actions/upload-artifact@v4 35 | with: 36 | name: Tower debug log file 37 | path: | 38 | tower_action_*.log 39 | tower_action_*.json 40 | -------------------------------------------------------------------------------- /.github/workflows/awstest.yml: -------------------------------------------------------------------------------- 1 | name: nf-core AWS test 2 | # This workflow can be triggered manually with the GitHub actions workflow dispatch button. 3 | # It runs the -profile 'test' on AWS batch 4 | 5 | on: 6 | workflow_dispatch: 7 | jobs: 8 | run-tower: 9 | name: Run AWS tests 10 | if: github.repository == 'nf-core/seqinspector' 11 | runs-on: ubuntu-latest 12 | steps: 13 | # Launch workflow using Tower CLI tool action 14 | - name: Launch workflow via tower 15 | uses: seqeralabs/action-tower-launch@v2 16 | with: 17 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 18 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 19 | compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} 20 | revision: ${{ github.sha }} 21 | workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/seqinspector/work-${{ github.sha }} 22 | parameters: | 23 | { 24 | "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/seqinspector/results-test-${{ github.sha }}" 25 | } 26 | profiles: test 27 | 28 | - uses: actions/upload-artifact@v4 29 | with: 30 | name: Tower debug log file 31 | path: | 32 | tower_action_*.log 33 | tower_action_*.json 34 | -------------------------------------------------------------------------------- /.github/workflows/branch.yml: -------------------------------------------------------------------------------- 1 | name: nf-core branch protection 2 | # This workflow is triggered on PRs to master branch on the repository 3 | # It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev` 4 | on: 5 | pull_request_target: 6 | branches: [master] 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | steps: 12 | # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches 13 | - name: Check PRs 14 | if: github.repository == 'nf-core/seqinspector' 15 | run: | 16 | { [[ ${{github.event.pull_request.head.repo.full_name }} == nf-core/seqinspector ]] && [[ $GITHUB_HEAD_REF == "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] 17 | 18 | # If the above check failed, post a comment on the PR explaining the failure 19 | # NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets 20 | - name: Post PR comment 21 | if: failure() 22 | uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 23 | with: 24 | message: | 25 | ## This PR is against the `master` branch :x: 26 | 27 | * Do not close this PR 28 | * Click _Edit_ and change the `base` to `dev` 29 | * This CI test will remain failed until you push a new commit 30 | 31 | --- 32 | 33 | Hi @${{ github.event.pull_request.user.login }}, 34 | 35 | It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `master` branch. 36 | The `master` branch on nf-core repositories should always contain code from the latest release. 37 | Because of this, PRs to `master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. 38 | 39 | You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. 40 | Note that even after this, the test will continue to show as failing until you push a new commit. 41 | 42 | Thanks again for your contribution! 43 | repo-token: ${{ secrets.GITHUB_TOKEN }} 44 | allow-repeats: false 45 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: nf-core CI 2 | # This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors 3 | on: 4 | push: 5 | branches: 6 | - dev 7 | pull_request: 8 | release: 9 | types: [published] 10 | 11 | env: 12 | NXF_ANSI_LOG: false 13 | 14 | concurrency: 15 | group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" 16 | cancel-in-progress: true 17 | 18 | jobs: 19 | test: 20 | name: Run pipeline with test data 21 | # Only run on push if this is the nf-core dev branch (merged PRs) 22 | if: "${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/seqinspector') }}" 23 | runs-on: ubuntu-latest 24 | strategy: 25 | matrix: 26 | NXF_VER: 27 | - "23.04.0" 28 | - "latest-everything" 29 | steps: 30 | - name: Check out pipeline code 31 | uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 32 | 33 | - name: Install Nextflow 34 | uses: nf-core/setup-nextflow@v1 35 | with: 36 | version: "${{ matrix.NXF_VER }}" 37 | 38 | - name: Disk space cleanup 39 | uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1 40 | 41 | - name: Run pipeline with test data 42 | # TODO nf-core: You can customise CI pipeline run tests as required 43 | # For example: adding multiple test runs with different parameters 44 | # Remember that you can parallelise this by using strategy.matrix 45 | run: | 46 | nextflow run ${GITHUB_WORKSPACE} -profile test,docker --outdir ./results 47 | -------------------------------------------------------------------------------- /.github/workflows/clean-up.yml: -------------------------------------------------------------------------------- 1 | name: "Close user-tagged issues and PRs" 2 | on: 3 | schedule: 4 | - cron: "0 0 * * 0" # Once a week 5 | 6 | jobs: 7 | clean-up: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | issues: write 11 | pull-requests: write 12 | steps: 13 | - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 14 | with: 15 | stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." 16 | stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." 17 | close-issue-message: "This issue was closed because it has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor and then staled for 20 days with no activity." 18 | days-before-stale: 30 19 | days-before-close: 20 20 | days-before-pr-close: -1 21 | any-of-labels: "awaiting-changes,awaiting-feedback" 22 | exempt-issue-labels: "WIP" 23 | exempt-pr-labels: "WIP" 24 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 25 | -------------------------------------------------------------------------------- /.github/workflows/download_pipeline.yml: -------------------------------------------------------------------------------- 1 | name: Test successful pipeline download with 'nf-core download' 2 | 3 | # Run the workflow when: 4 | # - dispatched manually 5 | # - when a PR is opened or reopened to master branch 6 | # - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev. 7 | on: 8 | workflow_dispatch: 9 | inputs: 10 | testbranch: 11 | description: "The specific branch you wish to utilize for the test execution of nf-core download." 12 | required: true 13 | default: "dev" 14 | pull_request: 15 | types: 16 | - opened 17 | branches: 18 | - master 19 | pull_request_target: 20 | branches: 21 | - master 22 | 23 | env: 24 | NXF_ANSI_LOG: false 25 | 26 | jobs: 27 | download: 28 | runs-on: ubuntu-latest 29 | steps: 30 | - name: Install Nextflow 31 | uses: nf-core/setup-nextflow@v1 32 | 33 | - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 34 | with: 35 | python-version: "3.11" 36 | architecture: "x64" 37 | - uses: eWaterCycle/setup-singularity@931d4e31109e875b13309ae1d07c70ca8fbc8537 # v7 38 | with: 39 | singularity-version: 3.8.3 40 | 41 | - name: Install dependencies 42 | run: | 43 | python -m pip install --upgrade pip 44 | pip install git+https://github.com/nf-core/tools.git@dev 45 | 46 | - name: Get the repository name and current branch set as environment variable 47 | run: | 48 | echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV} 49 | echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV} 50 | echo "REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> ${GITHUB_ENV} 51 | 52 | - name: Download the pipeline 53 | env: 54 | NXF_SINGULARITY_CACHEDIR: ./ 55 | run: | 56 | nf-core download ${{ env.REPO_LOWERCASE }} \ 57 | --revision ${{ env.REPO_BRANCH }} \ 58 | --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ 59 | --compress "none" \ 60 | --container-system 'singularity' \ 61 | --container-library "quay.io" -l "docker.io" -l "ghcr.io" \ 62 | --container-cache-utilisation 'amend' \ 63 | --download-configuration 64 | 65 | - name: Inspect download 66 | run: tree ./${{ env.REPOTITLE_LOWERCASE }} 67 | 68 | - name: Run the downloaded pipeline 69 | env: 70 | NXF_SINGULARITY_CACHEDIR: ./ 71 | NXF_SINGULARITY_HOME_MOUNT: true 72 | run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results 73 | -------------------------------------------------------------------------------- /.github/workflows/fix-linting.yml: -------------------------------------------------------------------------------- 1 | name: Fix linting from a comment 2 | on: 3 | issue_comment: 4 | types: [created] 5 | 6 | jobs: 7 | fix-linting: 8 | # Only run if comment is on a PR with the main repo, and if it contains the magic keywords 9 | if: > 10 | contains(github.event.comment.html_url, '/pull/') && 11 | contains(github.event.comment.body, '@nf-core-bot fix linting') && 12 | github.repository == 'nf-core/seqinspector' 13 | runs-on: ubuntu-latest 14 | steps: 15 | # Use the @nf-core-bot token to check out so we can push later 16 | - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 17 | with: 18 | token: ${{ secrets.nf_core_bot_auth_token }} 19 | 20 | # indication that the linting is being fixed 21 | - name: React on comment 22 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 23 | with: 24 | comment-id: ${{ github.event.comment.id }} 25 | reactions: eyes 26 | 27 | # Action runs on the issue comment, so we don't get the PR by default 28 | # Use the gh cli to check out the PR 29 | - name: Checkout Pull Request 30 | run: gh pr checkout ${{ github.event.issue.number }} 31 | env: 32 | GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} 33 | 34 | # Install and run pre-commit 35 | - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 36 | with: 37 | python-version: 3.11 38 | 39 | - name: Install pre-commit 40 | run: pip install pre-commit 41 | 42 | - name: Run pre-commit 43 | id: pre-commit 44 | run: pre-commit run --all-files 45 | continue-on-error: true 46 | 47 | # indication that the linting has finished 48 | - name: react if linting finished succesfully 49 | if: steps.pre-commit.outcome == 'success' 50 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 51 | with: 52 | comment-id: ${{ github.event.comment.id }} 53 | reactions: "+1" 54 | 55 | - name: Commit & push changes 56 | id: commit-and-push 57 | if: steps.pre-commit.outcome == 'failure' 58 | run: | 59 | git config user.email "core@nf-co.re" 60 | git config user.name "nf-core-bot" 61 | git config push.default upstream 62 | git add . 63 | git status 64 | git commit -m "[automated] Fix code linting" 65 | git push 66 | 67 | - name: react if linting errors were fixed 68 | id: react-if-fixed 69 | if: steps.commit-and-push.outcome == 'success' 70 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 71 | with: 72 | comment-id: ${{ github.event.comment.id }} 73 | reactions: hooray 74 | 75 | - name: react if linting errors were not fixed 76 | if: steps.commit-and-push.outcome == 'failure' 77 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 78 | with: 79 | comment-id: ${{ github.event.comment.id }} 80 | reactions: confused 81 | 82 | - name: react if linting errors were not fixed 83 | if: steps.commit-and-push.outcome == 'failure' 84 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 85 | with: 86 | issue-number: ${{ github.event.issue.number }} 87 | body: | 88 | @${{ github.actor }} I tried to fix the linting errors, but it didn't work. Please fix them manually. 89 | See [CI log](https://github.com/nf-core/seqinspector/actions/runs/${{ github.run_id }}) for more details. 90 | -------------------------------------------------------------------------------- /.github/workflows/linting.yml: -------------------------------------------------------------------------------- 1 | name: nf-core linting 2 | # This workflow is triggered on pushes and PRs to the repository. 3 | # It runs the `nf-core lint` and markdown lint tests to ensure 4 | # that the code meets the nf-core guidelines. 5 | on: 6 | push: 7 | branches: 8 | - dev 9 | pull_request: 10 | release: 11 | types: [published] 12 | 13 | jobs: 14 | pre-commit: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 18 | 19 | - name: Set up Python 3.11 20 | uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 21 | with: 22 | python-version: 3.11 23 | cache: "pip" 24 | 25 | - name: Install pre-commit 26 | run: pip install pre-commit 27 | 28 | - name: Run pre-commit 29 | run: pre-commit run --all-files 30 | 31 | nf-core: 32 | runs-on: ubuntu-latest 33 | steps: 34 | - name: Check out pipeline code 35 | uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 36 | 37 | - name: Install Nextflow 38 | uses: nf-core/setup-nextflow@v1 39 | 40 | - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 41 | with: 42 | python-version: "3.11" 43 | architecture: "x64" 44 | 45 | - name: Install dependencies 46 | run: | 47 | python -m pip install --upgrade pip 48 | pip install nf-core 49 | 50 | - name: Run nf-core lint 51 | env: 52 | GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }} 53 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 54 | GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }} 55 | run: nf-core -l lint_log.txt lint --dir ${GITHUB_WORKSPACE} --markdown lint_results.md 56 | 57 | - name: Save PR number 58 | if: ${{ always() }} 59 | run: echo ${{ github.event.pull_request.number }} > PR_number.txt 60 | 61 | - name: Upload linting log file artifact 62 | if: ${{ always() }} 63 | uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 64 | with: 65 | name: linting-logs 66 | path: | 67 | lint_log.txt 68 | lint_results.md 69 | PR_number.txt 70 | -------------------------------------------------------------------------------- /.github/workflows/linting_comment.yml: -------------------------------------------------------------------------------- 1 | name: nf-core linting comment 2 | # This workflow is triggered after the linting action is complete 3 | # It posts an automated comment to the PR, even if the PR is coming from a fork 4 | 5 | on: 6 | workflow_run: 7 | workflows: ["nf-core linting"] 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Download lint results 14 | uses: dawidd6/action-download-artifact@f6b0bace624032e30a85a8fd9c1a7f8f611f5737 # v3 15 | with: 16 | workflow: linting.yml 17 | workflow_conclusion: completed 18 | 19 | - name: Get PR number 20 | id: pr_number 21 | run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT 22 | 23 | - name: Post PR comment 24 | uses: marocchino/sticky-pull-request-comment@331f8f5b4215f0445d3c07b4967662a32a2d3e31 # v2 25 | with: 26 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 27 | number: ${{ steps.pr_number.outputs.pr_number }} 28 | path: linting-logs/lint_results.md 29 | -------------------------------------------------------------------------------- /.github/workflows/release-announcements.yml: -------------------------------------------------------------------------------- 1 | name: release-announcements 2 | # Automatic release toot and tweet anouncements 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | 8 | jobs: 9 | toot: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: get topics and convert to hashtags 13 | id: get_topics 14 | run: | 15 | curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ' >> $GITHUB_OUTPUT 16 | 17 | - uses: rzr/fediverse-action@master 18 | with: 19 | access-token: ${{ secrets.MASTODON_ACCESS_TOKEN }} 20 | host: "mstdn.science" # custom host if not "mastodon.social" (default) 21 | # GitHub event payload 22 | # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#release 23 | message: | 24 | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! 25 | 26 | Please see the changelog: ${{ github.event.release.html_url }} 27 | 28 | ${{ steps.get_topics.outputs.GITHUB_OUTPUT }} #nfcore #openscience #nextflow #bioinformatics 29 | 30 | send-tweet: 31 | runs-on: ubuntu-latest 32 | 33 | steps: 34 | - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 35 | with: 36 | python-version: "3.10" 37 | - name: Install dependencies 38 | run: pip install tweepy==4.14.0 39 | - name: Send tweet 40 | shell: python 41 | run: | 42 | import os 43 | import tweepy 44 | 45 | client = tweepy.Client( 46 | access_token=os.getenv("TWITTER_ACCESS_TOKEN"), 47 | access_token_secret=os.getenv("TWITTER_ACCESS_TOKEN_SECRET"), 48 | consumer_key=os.getenv("TWITTER_CONSUMER_KEY"), 49 | consumer_secret=os.getenv("TWITTER_CONSUMER_SECRET"), 50 | ) 51 | tweet = os.getenv("TWEET") 52 | client.create_tweet(text=tweet) 53 | env: 54 | TWEET: | 55 | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! 56 | 57 | Please see the changelog: ${{ github.event.release.html_url }} 58 | TWITTER_CONSUMER_KEY: ${{ secrets.TWITTER_CONSUMER_KEY }} 59 | TWITTER_CONSUMER_SECRET: ${{ secrets.TWITTER_CONSUMER_SECRET }} 60 | TWITTER_ACCESS_TOKEN: ${{ secrets.TWITTER_ACCESS_TOKEN }} 61 | TWITTER_ACCESS_TOKEN_SECRET: ${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }} 62 | 63 | bsky-post: 64 | runs-on: ubuntu-latest 65 | steps: 66 | - uses: zentered/bluesky-post-action@80dbe0a7697de18c15ad22f4619919ceb5ccf597 # v0.1.0 67 | with: 68 | post: | 69 | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! 70 | 71 | Please see the changelog: ${{ github.event.release.html_url }} 72 | env: 73 | BSKY_IDENTIFIER: ${{ secrets.BSKY_IDENTIFIER }} 74 | BSKY_PASSWORD: ${{ secrets.BSKY_PASSWORD }} 75 | # 76 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .nextflow* 2 | work/ 3 | data/ 4 | results/ 5 | .DS_Store 6 | testing/ 7 | testing* 8 | *.pyc 9 | -------------------------------------------------------------------------------- /.gitpod.yml: -------------------------------------------------------------------------------- 1 | image: nfcore/gitpod:latest 2 | tasks: 3 | - name: Update Nextflow and setup pre-commit 4 | command: | 5 | pre-commit install --install-hooks 6 | nextflow self-update 7 | - name: unset JAVA_TOOL_OPTIONS 8 | command: | 9 | unset JAVA_TOOL_OPTIONS 10 | 11 | vscode: 12 | extensions: # based on nf-core.nf-core-extensionpack 13 | - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code 14 | - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files 15 | - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar 16 | - mechatroner.rainbow-csv # Highlight columns in csv files in different colors 17 | # - nextflow.nextflow # Nextflow syntax highlighting 18 | - oderwat.indent-rainbow # Highlight indentation level 19 | - streetsidesoftware.code-spell-checker # Spelling checker for source code 20 | - charliermarsh.ruff # Code linter Ruff 21 | -------------------------------------------------------------------------------- /.nf-core.yml: -------------------------------------------------------------------------------- 1 | repository_type: pipeline 2 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/mirrors-prettier 3 | rev: "v3.1.0" 4 | hooks: 5 | - id: prettier 6 | - repo: https://github.com/editorconfig-checker/editorconfig-checker.python 7 | rev: "2.7.3" 8 | hooks: 9 | - id: editorconfig-checker 10 | alias: ec 11 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | email_template.html 2 | adaptivecard.json 3 | slackreport.json 4 | .nextflow* 5 | work/ 6 | data/ 7 | results/ 8 | .DS_Store 9 | testing/ 10 | testing* 11 | *.pyc 12 | bin/ 13 | -------------------------------------------------------------------------------- /.prettierrc.yml: -------------------------------------------------------------------------------- 1 | printWidth: 120 2 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # nf-core/seqinspector: Changelog 2 | 3 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) 4 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 5 | 6 | ## v1.0dev - [date] 7 | 8 | Initial release of nf-core/seqinspector, created with the [nf-core](https://nf-co.re/) template. 9 | 10 | ### `Added` 11 | 12 | ### `Fixed` 13 | 14 | ### `Dependencies` 15 | 16 | ### `Deprecated` 17 | -------------------------------------------------------------------------------- /CITATIONS.md: -------------------------------------------------------------------------------- 1 | # nf-core/seqinspector: Citations 2 | 3 | ## [nf-core](https://pubmed.ncbi.nlm.nih.gov/32055031/) 4 | 5 | > Ewels PA, Peltzer A, Fillinger S, Patel H, Alneberg J, Wilm A, Garcia MU, Di Tommaso P, Nahnsen S. The nf-core framework for community-curated bioinformatics pipelines. Nat Biotechnol. 2020 Mar;38(3):276-278. doi: 10.1038/s41587-020-0439-x. PubMed PMID: 32055031. 6 | 7 | ## [Nextflow](https://pubmed.ncbi.nlm.nih.gov/28398311/) 8 | 9 | > Di Tommaso P, Chatzou M, Floden EW, Barja PP, Palumbo E, Notredame C. Nextflow enables reproducible computational workflows. Nat Biotechnol. 2017 Apr 11;35(4):316-319. doi: 10.1038/nbt.3820. PubMed PMID: 28398311. 10 | 11 | ## Pipeline tools 12 | 13 | - [FastQC](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/) 14 | 15 | > Andrews, S. (2010). FastQC: A Quality Control Tool for High Throughput Sequence Data [Online]. 16 | 17 | - [MultiQC](https://pubmed.ncbi.nlm.nih.gov/27312411/) 18 | 19 | > Ewels P, Magnusson M, Lundin S, Käller M. MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics. 2016 Oct 1;32(19):3047-8. doi: 10.1093/bioinformatics/btw354. Epub 2016 Jun 16. PubMed PMID: 27312411; PubMed Central PMCID: PMC5039924. 20 | 21 | ## Software packaging/containerisation tools 22 | 23 | - [Anaconda](https://anaconda.com) 24 | 25 | > Anaconda Software Distribution. Computer software. Vers. 2-2.4.0. Anaconda, Nov. 2016. Web. 26 | 27 | - [Bioconda](https://pubmed.ncbi.nlm.nih.gov/29967506/) 28 | 29 | > Grüning B, Dale R, Sjödin A, Chapman BA, Rowe J, Tomkins-Tinch CH, Valieris R, Köster J; Bioconda Team. Bioconda: sustainable and comprehensive software distribution for the life sciences. Nat Methods. 2018 Jul;15(7):475-476. doi: 10.1038/s41592-018-0046-7. PubMed PMID: 29967506. 30 | 31 | - [BioContainers](https://pubmed.ncbi.nlm.nih.gov/28379341/) 32 | 33 | > da Veiga Leprevost F, Grüning B, Aflitos SA, Röst HL, Uszkoreit J, Barsnes H, Vaudel M, Moreno P, Gatto L, Weber J, Bai M, Jimenez RC, Sachsenberg T, Pfeuffer J, Alvarez RV, Griss J, Nesvizhskii AI, Perez-Riverol Y. BioContainers: an open-source and community-driven framework for software standardization. Bioinformatics. 2017 Aug 15;33(16):2580-2582. doi: 10.1093/bioinformatics/btx192. PubMed PMID: 28379341; PubMed Central PMCID: PMC5870671. 34 | 35 | - [Docker](https://dl.acm.org/doi/10.5555/2600239.2600241) 36 | 37 | > Merkel, D. (2014). Docker: lightweight linux containers for consistent development and deployment. Linux Journal, 2014(239), 2. doi: 10.5555/2600239.2600241. 38 | 39 | - [Singularity](https://pubmed.ncbi.nlm.nih.gov/28494014/) 40 | 41 | > Kurtzer GM, Sochat V, Bauer MW. Singularity: Scientific containers for mobility of compute. PLoS One. 2017 May 11;12(5):e0177459. doi: 10.1371/journal.pone.0177459. eCollection 2017. PubMed PMID: 28494014; PubMed Central PMCID: PMC5426675. 42 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct at nf-core (v1.4) 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open, collaborative, and welcoming environment, we as contributors and maintainers of nf-core pledge to making participation in our projects and community a harassment-free experience for everyone, regardless of: 6 | 7 | - Age 8 | - Ability 9 | - Body size 10 | - Caste 11 | - Familial status 12 | - Gender identity and expression 13 | - Geographical location 14 | - Level of experience 15 | - Nationality and national origins 16 | - Native language 17 | - Neurodiversity 18 | - Race or ethnicity 19 | - Religion 20 | - Sexual identity and orientation 21 | - Socioeconomic status 22 | 23 | Please note that the list above is alphabetised and is therefore not ranked in any order of preference or importance. 24 | 25 | ## Preamble 26 | 27 | :::note 28 | This Code of Conduct (CoC) has been drafted by Renuka Kudva, Cris Tuñí, and Michael Heuer, with input from the nf-core Core Team and Susanna Marquez from the nf-core community. "We", in this document, refers to the Safety Officers and members of the nf-core Core Team, both of whom are deemed to be members of the nf-core community and are therefore required to abide by this Code of Conduct. This document will be amended periodically to keep it up-to-date. In case of any dispute, the most current version will apply. 29 | ::: 30 | 31 | An up-to-date list of members of the nf-core core team can be found [here](https://nf-co.re/about). 32 | 33 | Our Safety Officers are Saba Nafees, Cris Tuñí, and Michael Heuer. 34 | 35 | nf-core is a young and growing community that welcomes contributions from anyone with a shared vision for [Open Science Policies](https://www.fosteropenscience.eu/taxonomy/term/8). Open science policies encompass inclusive behaviours and we strive to build and maintain a safe and inclusive environment for all individuals. 36 | 37 | We have therefore adopted this CoC, which we require all members of our community and attendees of nf-core events to adhere to in all our workspaces at all times. Workspaces include, but are not limited to, Slack, meetings on Zoom, gather.town, YouTube live etc. 38 | 39 | Our CoC will be strictly enforced and the nf-core team reserves the right to exclude participants who do not comply with our guidelines from our workspaces and future nf-core activities. 40 | 41 | We ask all members of our community to help maintain supportive and productive workspaces and to avoid behaviours that can make individuals feel unsafe or unwelcome. Please help us maintain and uphold this CoC. 42 | 43 | Questions, concerns, or ideas on what we can include? Contact members of the Safety Team on Slack or email safety [at] nf-co [dot] re. 44 | 45 | ## Our Responsibilities 46 | 47 | Members of the Safety Team (the Safety Officers) are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behaviour. 48 | 49 | The Safety Team, in consultation with the nf-core core team, have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this CoC, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 50 | 51 | Members of the core team or the Safety Team who violate the CoC will be required to recuse themselves pending investigation. They will not have access to any reports of the violations and will be subject to the same actions as others in violation of the CoC. 52 | 53 | ## When and where does this Code of Conduct apply? 54 | 55 | Participation in the nf-core community is contingent on following these guidelines in all our workspaces and events, such as hackathons, workshops, bytesize, and collaborative workspaces on gather.town. These guidelines include, but are not limited to, the following (listed alphabetically and therefore in no order of preference): 56 | 57 | - Communicating with an official project email address. 58 | - Communicating with community members within the nf-core Slack channel. 59 | - Participating in hackathons organised by nf-core (both online and in-person events). 60 | - Participating in collaborative work on GitHub, Google Suite, community calls, mentorship meetings, email correspondence, and on the nf-core gather.town workspace. 61 | - Participating in workshops, training, and seminar series organised by nf-core (both online and in-person events). This applies to events hosted on web-based platforms such as Zoom, gather.town, Jitsi, YouTube live etc. 62 | - Representing nf-core on social media. This includes both official and personal accounts. 63 | 64 | ## nf-core cares 😊 65 | 66 | nf-core's CoC and expectations of respectful behaviours for all participants (including organisers and the nf-core team) include, but are not limited to, the following (listed in alphabetical order): 67 | 68 | - Ask for consent before sharing another community member’s personal information (including photographs) on social media. 69 | - Be respectful of differing viewpoints and experiences. We are all here to learn from one another and a difference in opinion can present a good learning opportunity. 70 | - Celebrate your accomplishments! (Get creative with your use of emojis 🎉 🥳 💯 🙌 !) 71 | - Demonstrate empathy towards other community members. (We don’t all have the same amount of time to dedicate to nf-core. If tasks are pending, don’t hesitate to gently remind members of your team. If you are leading a task, ask for help if you feel overwhelmed.) 72 | - Engage with and enquire after others. (This is especially important given the geographically remote nature of the nf-core community, so let’s do this the best we can) 73 | - Focus on what is best for the team and the community. (When in doubt, ask) 74 | - Accept feedback, yet be unafraid to question, deliberate, and learn. 75 | - Introduce yourself to members of the community. (We’ve all been outsiders and we know that talking to strangers can be hard for some, but remember we’re interested in getting to know you and your visions for open science!) 76 | - Show appreciation and **provide clear feedback**. (This is especially important because we don’t see each other in person and it can be harder to interpret subtleties. Also remember that not everyone understands a certain language to the same extent as you do, so **be clear in your communication to be kind.**) 77 | - Take breaks when you feel like you need them. 78 | - Use welcoming and inclusive language. (Participants are encouraged to display their chosen pronouns on Zoom or in communication on Slack) 79 | 80 | ## nf-core frowns on 😕 81 | 82 | The following behaviours from any participants within the nf-core community (including the organisers) will be considered unacceptable under this CoC. Engaging or advocating for any of the following could result in expulsion from nf-core workspaces: 83 | 84 | - Deliberate intimidation, stalking or following and sustained disruption of communication among participants of the community. This includes hijacking shared screens through actions such as using the annotate tool in conferencing software such as Zoom. 85 | - “Doxing” i.e. posting (or threatening to post) another person’s personal identifying information online. 86 | - Spamming or trolling of individuals on social media. 87 | - Use of sexual or discriminatory imagery, comments, jokes, or unwelcome sexual attention. 88 | - Verbal and text comments that reinforce social structures of domination related to gender, gender identity and expression, sexual orientation, ability, physical appearance, body size, race, age, religion, or work experience. 89 | 90 | ### Online Trolling 91 | 92 | The majority of nf-core interactions and events are held online. Unfortunately, holding events online comes with the risk of online trolling. This is unacceptable — reports of such behaviour will be taken very seriously and perpetrators will be excluded from activities immediately. 93 | 94 | All community members are **required** to ask members of the group they are working with for explicit consent prior to taking screenshots of individuals during video calls. 95 | 96 | ## Procedures for reporting CoC violations 97 | 98 | If someone makes you feel uncomfortable through their behaviours or actions, report it as soon as possible. 99 | 100 | You can reach out to members of the Safety Team (Saba Nafees, Cris Tuñí, and Michael Heuer) on Slack. Alternatively, contact a member of the nf-core core team [nf-core core team](https://nf-co.re/about), and they will forward your concerns to the Safety Team. 101 | 102 | Issues directly concerning members of the Core Team or the Safety Team will be dealt with by other members of the core team and the safety manager — possible conflicts of interest will be taken into account. nf-core is also in discussions about having an ombudsperson and details will be shared in due course. 103 | 104 | All reports will be handled with the utmost discretion and confidentiality. 105 | 106 | You can also report any CoC violations to safety [at] nf-co [dot] re. In your email report, please do your best to include: 107 | 108 | - Your contact information. 109 | - Identifying information (e.g. names, nicknames, pseudonyms) of the participant who has violated the Code of Conduct. 110 | - The behaviour that was in violation and the circumstances surrounding the incident. 111 | - The approximate time of the behaviour (if different than the time the report was made). 112 | - Other people involved in the incident, if applicable. 113 | - If you believe the incident is ongoing. 114 | - If there is a publicly available record (e.g. mailing list record, a screenshot). 115 | - Any additional information. 116 | 117 | After you file a report, one or more members of our Safety Team will contact you to follow up on your report. 118 | 119 | ## Who will read and handle reports 120 | 121 | All reports will be read and handled by the members of the Safety Team at nf-core. 122 | 123 | If members of the Safety Team are deemed to have a conflict of interest with a report, they will be required to recuse themselves as per our Code of Conduct and will not have access to any follow-ups. 124 | 125 | To keep this first report confidential from any of the Safety Team members, please submit your first report by direct messaging on Slack/direct email to any of the nf-core members you are comfortable disclosing the information to, and be explicit about which member(s) you do not consent to sharing the information with. 126 | 127 | ## Reviewing reports 128 | 129 | After receiving the report, members of the Safety Team will review the incident report to determine whether immediate action is required, for example, whether there is immediate threat to participants’ safety. 130 | 131 | The Safety Team, in consultation with members of the nf-core core team, will assess the information to determine whether the report constitutes a Code of Conduct violation, for them to decide on a course of action. 132 | 133 | In the case of insufficient information, one or more members of the Safety Team may contact the reporter, the reportee, or any other attendees to obtain more information. 134 | 135 | Once additional information is gathered, the Safety Team will collectively review and decide on the best course of action to take, if any. The Safety Team reserves the right to not act on a report. 136 | 137 | ## Confidentiality 138 | 139 | All reports, and any additional information included, are only shared with the team of safety officers (and possibly members of the core team, in case the safety officer is in violation of the CoC). We will respect confidentiality requests for the purpose of protecting victims of abuse. 140 | 141 | We will not name harassment victims, beyond discussions between the safety officer and members of the nf-core team, without the explicit consent of the individuals involved. 142 | 143 | ## Enforcement 144 | 145 | Actions taken by the nf-core’s Safety Team may include, but are not limited to: 146 | 147 | - Asking anyone to stop a behaviour. 148 | - Asking anyone to leave the event and online spaces either temporarily, for the remainder of the event, or permanently. 149 | - Removing access to the gather.town and Slack, either temporarily or permanently. 150 | - Communicating to all participants to reinforce our expectations for conduct and remind what is unacceptable behaviour; this may be public for practical reasons. 151 | - Communicating to all participants that an incident has taken place and how we will act or have acted — this may be for the purpose of letting event participants know we are aware of and dealing with the incident. 152 | - Banning anyone from participating in nf-core-managed spaces, future events, and activities, either temporarily or permanently. 153 | - No action. 154 | 155 | ## Attribution and Acknowledgements 156 | 157 | - The [Contributor Covenant, version 1.4](http://contributor-covenant.org/version/1/4) 158 | - The [OpenCon 2017 Code of Conduct](http://www.opencon2017.org/code_of_conduct) (CC BY 4.0 OpenCon organisers, SPARC and Right to Research Coalition) 159 | - The [eLife innovation sprint 2020 Code of Conduct](https://sprint.elifesciences.org/code-of-conduct/) 160 | - The [Mozilla Community Participation Guidelines v3.1](https://www.mozilla.org/en-US/about/governance/policies/participation/) (version 3.1, CC BY-SA 3.0 Mozilla) 161 | 162 | ## Changelog 163 | 164 | ### v1.4 - February 8th, 2022 165 | 166 | - Included a new member of the Safety Team. Corrected a typographical error in the text. 167 | 168 | ### v1.3 - December 10th, 2021 169 | 170 | - Added a statement that the CoC applies to nf-core gather.town workspaces. Corrected typographical errors in the text. 171 | 172 | ### v1.2 - November 12th, 2021 173 | 174 | - Removed information specific to reporting CoC violations at the Hackathon in October 2021. 175 | 176 | ### v1.1 - October 14th, 2021 177 | 178 | - Updated with names of new Safety Officers and specific information for the hackathon in October 2021. 179 | 180 | ### v1.0 - March 15th, 2021 181 | 182 | - Complete rewrite from original [Contributor Covenant](http://contributor-covenant.org/) CoC. 183 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Adrien Coulier 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 | 4 | nf-core/seqinspector 5 | 6 |

7 | 8 | [![GitHub Actions CI Status](https://github.com/nf-core/seqinspector/actions/workflows/ci.yml/badge.svg)](https://github.com/nf-core/seqinspector/actions/workflows/ci.yml) 9 | [![GitHub Actions Linting Status](https://github.com/nf-core/seqinspector/actions/workflows/linting.yml/badge.svg)](https://github.com/nf-core/seqinspector/actions/workflows/linting.yml)[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/seqinspector/results)[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) 10 | [![nf-test](https://img.shields.io/badge/unit_tests-nf--test-337ab7.svg)](https://www.nf-test.com) 11 | 12 | [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.04.0-23aa62.svg)](https://www.nextflow.io/) 13 | [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) 14 | [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) 15 | [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) 16 | [![Launch on Seqera Platform](https://img.shields.io/badge/Launch%20%F0%9F%9A%80-Seqera%20Platform-%234256e7)](https://tower.nf/launch?pipeline=https://github.com/nf-core/seqinspector) 17 | 18 | [![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23seqinspector-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/seqinspector)[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core)[![Follow on Mastodon](https://img.shields.io/badge/mastodon-nf__core-6364ff?labelColor=FFFFFF&logo=mastodon)](https://mstdn.science/@nf_core)[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core) 19 | 20 | ## Introduction 21 | 22 | **nf-core/seqinspector** is a bioinformatics pipeline that ... 23 | 24 | 29 | 30 | 32 | 33 | 34 | 1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/)) 35 | 2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)) 36 | 37 | ## Usage 38 | 39 | > [!NOTE] 40 | > If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data. 41 | 42 | 57 | 58 | Now, you can run the pipeline using: 59 | 60 | 61 | 62 | ```bash 63 | nextflow run nf-core/seqinspector \ 64 | -profile \ 65 | --input samplesheet.csv \ 66 | --outdir 67 | ``` 68 | 69 | > [!WARNING] 70 | > Please provide pipeline parameters via the CLI or Nextflow `-params-file` option. Custom config files including those provided by the `-c` Nextflow option can be used to provide any configuration _**except for parameters**_; 71 | > see [docs](https://nf-co.re/usage/configuration#custom-configuration-files). 72 | 73 | For more details and further functionality, please refer to the [usage documentation](https://nf-co.re/seqinspector/usage) and the [parameter documentation](https://nf-co.re/seqinspector/parameters). 74 | 75 | ## Pipeline output 76 | 77 | To see the results of an example test run with a full size dataset refer to the [results](https://nf-co.re/seqinspector/results) tab on the nf-core website pipeline page. 78 | For more details about the output files and reports, please refer to the 79 | [output documentation](https://nf-co.re/seqinspector/output). 80 | 81 | ## Credits 82 | 83 | nf-core/seqinspector was originally written by Adrien Coulier. 84 | 85 | We thank the following people for their extensive assistance in the development of this pipeline: 86 | 87 | 88 | 89 | ## Contributions and Support 90 | 91 | If you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md). 92 | 93 | For further information or help, don't hesitate to get in touch on the [Slack `#seqinspector` channel](https://nfcore.slack.com/channels/seqinspector) (you can join with [this invite](https://nf-co.re/join/slack)). 94 | 95 | ## Citations 96 | 97 | 98 | 99 | 100 | 101 | 102 | An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file. 103 | 104 | You can cite the `nf-core` publication as follows: 105 | 106 | > **The nf-core framework for community-curated bioinformatics pipelines.** 107 | > 108 | > Philip Ewels, Alexander Peltzer, Sven Fillinger, Harshil Patel, Johannes Alneberg, Andreas Wilm, Maxime Ulysse Garcia, Paolo Di Tommaso & Sven Nahnsen. 109 | > 110 | > _Nat Biotechnol._ 2020 Feb 13. doi: [10.1038/s41587-020-0439-x](https://dx.doi.org/10.1038/s41587-020-0439-x). 111 | -------------------------------------------------------------------------------- /assets/adaptivecard.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "message", 3 | "attachments": [ 4 | { 5 | "contentType": "application/vnd.microsoft.card.adaptive", 6 | "contentUrl": null, 7 | "content": { 8 | "\$schema": "http://adaptivecards.io/schemas/adaptive-card.json", 9 | "msteams": { 10 | "width": "Full" 11 | }, 12 | "type": "AdaptiveCard", 13 | "version": "1.2", 14 | "body": [ 15 | { 16 | "type": "TextBlock", 17 | "size": "Large", 18 | "weight": "Bolder", 19 | "color": "<% if (success) { %>Good<% } else { %>Attention<%} %>", 20 | "text": "nf-core/seqinspector v${version} - ${runName}", 21 | "wrap": true 22 | }, 23 | { 24 | "type": "TextBlock", 25 | "spacing": "None", 26 | "text": "Completed at ${dateComplete} (duration: ${duration})", 27 | "isSubtle": true, 28 | "wrap": true 29 | }, 30 | { 31 | "type": "TextBlock", 32 | "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors. The full error message was: ${errorReport}.<% } %>", 33 | "wrap": true 34 | }, 35 | { 36 | "type": "TextBlock", 37 | "text": "The command used to launch the workflow was as follows:", 38 | "wrap": true 39 | }, 40 | { 41 | "type": "TextBlock", 42 | "text": "${commandLine}", 43 | "isSubtle": true, 44 | "wrap": true 45 | } 46 | ], 47 | "actions": [ 48 | { 49 | "type": "Action.ShowCard", 50 | "title": "Pipeline Configuration", 51 | "card": { 52 | "type": "AdaptiveCard", 53 | "\$schema": "http://adaptivecards.io/schemas/adaptive-card.json", 54 | "body": [ 55 | { 56 | "type": "FactSet", 57 | "facts": [<% out << summary.collect{ k,v -> "{\"title\": \"$k\", \"value\" : \"$v\"}"}.join(",\n") %> 58 | ] 59 | } 60 | ] 61 | } 62 | } 63 | ] 64 | } 65 | } 66 | ] 67 | } 68 | -------------------------------------------------------------------------------- /assets/email_template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | nf-core/seqinspector Pipeline Report 9 | 10 | 11 |
12 | 13 | 14 | 15 |

nf-core/seqinspector ${version}

16 |

Run Name: $runName

17 | 18 | <% if (!success){ 19 | out << """ 20 |
21 |

nf-core/seqinspector execution completed unsuccessfully!

22 |

The exit status of the task that caused the workflow execution to fail was: $exitStatus.

23 |

The full error message was:

24 |
${errorReport}
25 |
26 | """ 27 | } else { 28 | out << """ 29 |
30 | nf-core/seqinspector execution completed successfully! 31 |
32 | """ 33 | } 34 | %> 35 | 36 |

The workflow was completed at $dateComplete (duration: $duration)

37 |

The command used to launch the workflow was as follows:

38 |
$commandLine
39 | 40 |

Pipeline Configuration:

41 | 42 | 43 | <% out << summary.collect{ k,v -> "" }.join("\n") %> 44 | 45 |
$k
$v
46 | 47 |

nf-core/seqinspector

48 |

https://github.com/nf-core/seqinspector

49 | 50 |
51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /assets/email_template.txt: -------------------------------------------------------------------------------- 1 | ---------------------------------------------------- 2 | ,--./,-. 3 | ___ __ __ __ ___ /,-._.--~\\ 4 | |\\ | |__ __ / ` / \\ |__) |__ } { 5 | | \\| | \\__, \\__/ | \\ |___ \\`-._,-`-, 6 | `._,._,' 7 | nf-core/seqinspector ${version} 8 | ---------------------------------------------------- 9 | Run Name: $runName 10 | 11 | <% if (success){ 12 | out << "## nf-core/seqinspector execution completed successfully! ##" 13 | } else { 14 | out << """#################################################### 15 | ## nf-core/seqinspector execution completed unsuccessfully! ## 16 | #################################################### 17 | The exit status of the task that caused the workflow execution to fail was: $exitStatus. 18 | The full error message was: 19 | 20 | ${errorReport} 21 | """ 22 | } %> 23 | 24 | 25 | The workflow was completed at $dateComplete (duration: $duration) 26 | 27 | The command used to launch the workflow was as follows: 28 | 29 | $commandLine 30 | 31 | 32 | 33 | Pipeline Configuration: 34 | ----------------------- 35 | <% out << summary.collect{ k,v -> " - $k: $v" }.join("\n") %> 36 | 37 | -- 38 | nf-core/seqinspector 39 | https://github.com/nf-core/seqinspector 40 | -------------------------------------------------------------------------------- /assets/methods_description_template.yml: -------------------------------------------------------------------------------- 1 | id: "nf-core-seqinspector-methods-description" 2 | description: "Suggested text and references to use when describing pipeline usage within the methods section of a publication." 3 | section_name: "nf-core/seqinspector Methods Description" 4 | section_href: "https://github.com/nf-core/seqinspector" 5 | plot_type: "html" 6 | ## TODO nf-core: Update the HTML below to your preferred methods description, e.g. add publication citation for this pipeline 7 | ## You inject any metadata in the Nextflow '${workflow}' object 8 | data: | 9 |

Methods

10 |

Data was processed using nf-core/seqinspector v${workflow.manifest.version} ${doi_text} of the nf-core collection of workflows (Ewels et al., 2020), utilising reproducible software environments from the Bioconda (Grüning et al., 2018) and Biocontainers (da Veiga Leprevost et al., 2017) projects.

11 |

The pipeline was executed with Nextflow v${workflow.nextflow.version} (Di Tommaso et al., 2017) with the following command:

12 |
${workflow.commandLine}
13 |

${tool_citations}

14 |

References

15 |
    16 |
  • Di Tommaso, P., Chatzou, M., Floden, E. W., Barja, P. P., Palumbo, E., & Notredame, C. (2017). Nextflow enables reproducible computational workflows. Nature Biotechnology, 35(4), 316-319. doi: 10.1038/nbt.3820
  • 17 |
  • Ewels, P. A., Peltzer, A., Fillinger, S., Patel, H., Alneberg, J., Wilm, A., Garcia, M. U., Di Tommaso, P., & Nahnsen, S. (2020). The nf-core framework for community-curated bioinformatics pipelines. Nature Biotechnology, 38(3), 276-278. doi: 10.1038/s41587-020-0439-x
  • 18 |
  • Grüning, B., Dale, R., Sjödin, A., Chapman, B. A., Rowe, J., Tomkins-Tinch, C. H., Valieris, R., Köster, J., & Bioconda Team. (2018). Bioconda: sustainable and comprehensive software distribution for the life sciences. Nature Methods, 15(7), 475–476. doi: 10.1038/s41592-018-0046-7
  • 19 |
  • da Veiga Leprevost, F., Grüning, B. A., Alves Aflitos, S., Röst, H. L., Uszkoreit, J., Barsnes, H., Vaudel, M., Moreno, P., Gatto, L., Weber, J., Bai, M., Jimenez, R. C., Sachsenberg, T., Pfeuffer, J., Vera Alvarez, R., Griss, J., Nesvizhskii, A. I., & Perez-Riverol, Y. (2017). BioContainers: an open-source and community-driven framework for software standardization. Bioinformatics (Oxford, England), 33(16), 2580–2582. doi: 10.1093/bioinformatics/btx192
  • 20 | ${tool_bibliography} 21 |
22 |
23 |
Notes:
24 |
    25 | ${nodoi_text} 26 |
  • The command above does not include parameters contained in any configs or profiles that may have been used. Ensure the config file is also uploaded with your publication!
  • 27 |
  • You should also cite all software used within this run. Check the "Software Versions" of this report to get version information.
  • 28 |
29 |
30 | -------------------------------------------------------------------------------- /assets/multiqc_config.yml: -------------------------------------------------------------------------------- 1 | report_comment: > 2 | This report has been generated by the nf-core/seqinspector 3 | analysis pipeline. For information about how to interpret these results, please see the 4 | documentation. 5 | report_section_order: 6 | "nf-core-seqinspector-methods-description": 7 | order: -1000 8 | software_versions: 9 | order: -1001 10 | "nf-core-seqinspector-summary": 11 | order: -1002 12 | 13 | export_plots: true 14 | 15 | disable_version_detection: true 16 | -------------------------------------------------------------------------------- /assets/nf-core-seqinspector_logo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/seqinspector/d169eefc354c9aa7232b5abcbed3f67be3ac996f/assets/nf-core-seqinspector_logo_light.png -------------------------------------------------------------------------------- /assets/samplesheet.csv: -------------------------------------------------------------------------------- 1 | sample,lane,project,fastq_1,fastq_2,rundir 2 | SAMPLE_PAIRED_END,1,P001,/path/to/fastq/files/AEG588A1_S1_L002_R1_001.fastq.gz,/path/to/fastq/files/AEG588A1_S1_L002_R2_001.fastq.gz,/path/to/rundir 3 | SAMPLE_SINGLE_END,2,P002,/path/to/fastq/files/AEG588A4_S4_L003_R1_001.fastq.gz,,/path/to/rundir 4 | -------------------------------------------------------------------------------- /assets/schema_input.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema", 3 | "$id": "https://raw.githubusercontent.com/nf-core/seqinspector/master/assets/schema_input.json", 4 | "title": "nf-core/seqinspector pipeline - params.input schema", 5 | "description": "Schema for the file provided with params.input", 6 | "type": "array", 7 | "items": { 8 | "type": "object", 9 | "properties": { 10 | "sample": { 11 | "type": "string", 12 | "pattern": "^\\S+$", 13 | "errorMessage": "Sample name must be provided and cannot contain spaces", 14 | "meta": ["sample"] 15 | }, 16 | "lane": { 17 | "type": "integer", 18 | "pattern": "^\\d+$", 19 | "errorMessage": "Lane ID must be a number", 20 | "meta": ["lane"] 21 | }, 22 | "project": { 23 | "type": "string", 24 | "pattern": "^\\S+$", 25 | "errorMessage": "Project ID cannot contain spaces", 26 | "meta": ["project"] 27 | }, 28 | "fastq_1": { 29 | "type": "string", 30 | "format": "file-path", 31 | "exists": true, 32 | "pattern": "^\\S+\\.f(ast)?q\\.gz$", 33 | "errorMessage": "FastQ file for reads 1 must be provided, cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'" 34 | }, 35 | "fastq_2": { 36 | "type": "string", 37 | "format": "file-path", 38 | "exists": true, 39 | "pattern": "^\\S+\\.f(ast)?q\\.gz$", 40 | "errorMessage": "FastQ file for reads 2 cannot contain spaces and must have extension '.fq.gz' or '.fastq.gz'" 41 | }, 42 | "rundir": { 43 | "type": "string", 44 | "format": "path", 45 | "exists": true, 46 | "errorMessage": "Run directory must be a path", 47 | "meta": ["rundir"] 48 | } 49 | }, 50 | "required": ["sample", "lane", "fastq_1"], 51 | "dependentRequired": { 52 | "fastq_2": ["fastq_1"] 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /assets/sendmail_template.txt: -------------------------------------------------------------------------------- 1 | To: $email 2 | Subject: $subject 3 | Mime-Version: 1.0 4 | Content-Type: multipart/related;boundary="nfcoremimeboundary" 5 | 6 | --nfcoremimeboundary 7 | Content-Type: text/html; charset=utf-8 8 | 9 | $email_html 10 | 11 | --nfcoremimeboundary 12 | Content-Type: image/png;name="nf-core-seqinspector_logo.png" 13 | Content-Transfer-Encoding: base64 14 | Content-ID: 15 | Content-Disposition: inline; filename="nf-core-seqinspector_logo_light.png" 16 | 17 | <% out << new File("$projectDir/assets/nf-core-seqinspector_logo_light.png"). 18 | bytes. 19 | encodeBase64(). 20 | toString(). 21 | tokenize( '\n' )*. 22 | toList()*. 23 | collate( 76 )*. 24 | collect { it.join() }. 25 | flatten(). 26 | join( '\n' ) %> 27 | 28 | <% 29 | if (mqcFile){ 30 | def mqcFileObj = new File("$mqcFile") 31 | if (mqcFileObj.length() < mqcMaxSize){ 32 | out << """ 33 | --nfcoremimeboundary 34 | Content-Type: text/html; name=\"multiqc_report\" 35 | Content-Transfer-Encoding: base64 36 | Content-ID: 37 | Content-Disposition: attachment; filename=\"${mqcFileObj.getName()}\" 38 | 39 | ${mqcFileObj. 40 | bytes. 41 | encodeBase64(). 42 | toString(). 43 | tokenize( '\n' )*. 44 | toList()*. 45 | collate( 76 )*. 46 | collect { it.join() }. 47 | flatten(). 48 | join( '\n' )} 49 | """ 50 | }} 51 | %> 52 | 53 | --nfcoremimeboundary-- 54 | -------------------------------------------------------------------------------- /assets/slackreport.json: -------------------------------------------------------------------------------- 1 | { 2 | "attachments": [ 3 | { 4 | "fallback": "Plain-text summary of the attachment.", 5 | "color": "<% if (success) { %>good<% } else { %>danger<%} %>", 6 | "author_name": "nf-core/seqinspector ${version} - ${runName}", 7 | "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", 8 | "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", 9 | "fields": [ 10 | { 11 | "title": "Command used to launch the workflow", 12 | "value": "```${commandLine}```", 13 | "short": false 14 | } 15 | <% 16 | if (!success) { %> 17 | , 18 | { 19 | "title": "Full error message", 20 | "value": "```${errorReport}```", 21 | "short": false 22 | }, 23 | { 24 | "title": "Pipeline configuration", 25 | "value": "<% out << summary.collect{ k,v -> k == "hook_url" ? "_${k}_: (_hidden_)" : ( ( v.class.toString().contains('Path') || ( v.class.toString().contains('String') && v.contains('/') ) ) ? "_${k}_: `${v}`" : (v.class.toString().contains('DateTime') ? ("_${k}_: " + v.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM))) : "_${k}_: ${v}") ) }.join(",\n") %>", 26 | "short": false 27 | } 28 | <% } 29 | %> 30 | ], 31 | "footer": "Completed at <% out << dateComplete.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM)) %> (duration: ${duration})" 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /conf/base.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | nf-core/seqinspector Nextflow base config file 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | A 'blank slate' config file, appropriate for general use on most high performance 6 | compute environments. Assumes that all software is installed and available on 7 | the PATH. Runs in `local` mode - all jobs will be run on the logged in environment. 8 | ---------------------------------------------------------------------------------------- 9 | */ 10 | 11 | process { 12 | 13 | // TODO nf-core: Check the defaults for all processes 14 | cpus = { check_max( 1 * task.attempt, 'cpus' ) } 15 | memory = { check_max( 6.GB * task.attempt, 'memory' ) } 16 | time = { check_max( 4.h * task.attempt, 'time' ) } 17 | 18 | errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } 19 | maxRetries = 1 20 | maxErrors = '-1' 21 | 22 | // Process-specific resource requirements 23 | // NOTE - Please try and re-use the labels below as much as possible. 24 | // These labels are used and recognised by default in DSL2 files hosted on nf-core/modules. 25 | // If possible, it would be nice to keep the same label naming convention when 26 | // adding in your local modules too. 27 | // TODO nf-core: Customise requirements for specific processes. 28 | // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors 29 | withLabel:process_single { 30 | cpus = { check_max( 1 , 'cpus' ) } 31 | memory = { check_max( 6.GB * task.attempt, 'memory' ) } 32 | time = { check_max( 4.h * task.attempt, 'time' ) } 33 | } 34 | withLabel:process_low { 35 | cpus = { check_max( 2 * task.attempt, 'cpus' ) } 36 | memory = { check_max( 12.GB * task.attempt, 'memory' ) } 37 | time = { check_max( 4.h * task.attempt, 'time' ) } 38 | } 39 | withLabel:process_medium { 40 | cpus = { check_max( 6 * task.attempt, 'cpus' ) } 41 | memory = { check_max( 36.GB * task.attempt, 'memory' ) } 42 | time = { check_max( 8.h * task.attempt, 'time' ) } 43 | } 44 | withLabel:process_high { 45 | cpus = { check_max( 12 * task.attempt, 'cpus' ) } 46 | memory = { check_max( 72.GB * task.attempt, 'memory' ) } 47 | time = { check_max( 16.h * task.attempt, 'time' ) } 48 | } 49 | withLabel:process_long { 50 | time = { check_max( 20.h * task.attempt, 'time' ) } 51 | } 52 | withLabel:process_high_memory { 53 | memory = { check_max( 200.GB * task.attempt, 'memory' ) } 54 | } 55 | withLabel:error_ignore { 56 | errorStrategy = 'ignore' 57 | } 58 | withLabel:error_retry { 59 | errorStrategy = 'retry' 60 | maxRetries = 2 61 | } 62 | withName:CUSTOM_DUMPSOFTWAREVERSIONS { 63 | cache = false 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /conf/modules.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Config file for defining DSL2 per module options and publishing paths 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Available keys to override module options: 6 | ext.args = Additional arguments appended to command in module. 7 | ext.args2 = Second set of arguments appended to command in module (multi-tool modules). 8 | ext.args3 = Third set of arguments appended to command in module (multi-tool modules). 9 | ext.prefix = File name prefix for output files. 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | process { 14 | 15 | publishDir = [ 16 | path: { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }, 17 | mode: params.publish_dir_mode, 18 | saveAs: { filename -> filename.equals('versions.yml') ? null : filename } 19 | ] 20 | 21 | withName: FASTQC { 22 | ext.args = '--quiet' 23 | } 24 | 25 | withName: CUSTOM_DUMPSOFTWAREVERSIONS { 26 | publishDir = [ 27 | path: { "${params.outdir}/pipeline_info" }, 28 | mode: params.publish_dir_mode, 29 | pattern: '*_versions.yml' 30 | ] 31 | } 32 | 33 | withName: 'MULTIQC' { 34 | ext.args = { params.multiqc_title ? "--title \"$params.multiqc_title\"" : '' } 35 | publishDir = [ 36 | path: { "${params.outdir}/multiqc" }, 37 | mode: params.publish_dir_mode, 38 | saveAs: { filename -> filename.equals('versions.yml') ? null : filename } 39 | ] 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /conf/test.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/seqinspector -profile test, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | params { 14 | config_profile_name = 'Test profile' 15 | config_profile_description = 'Minimal test dataset to check pipeline function' 16 | 17 | // Limit resources so that this can run on GitHub Actions 18 | max_cpus = 2 19 | max_memory = '6.GB' 20 | max_time = '6.h' 21 | 22 | // Input data 23 | // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets 24 | // TODO nf-core: Give any required params for the test so that command line flags are not needed 25 | input = 'https://raw.githubusercontent.com/KarNair/test-datasets/seqinspector/testdata/MiSeq/samplesheet.csv' 26 | 27 | // Genome references 28 | genome = 'R64-1-1' 29 | } 30 | -------------------------------------------------------------------------------- /conf/test_full.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running full-size tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a full size pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/seqinspector -profile test_full, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | params { 14 | config_profile_name = 'Full test profile' 15 | config_profile_description = 'Full test dataset to check pipeline function' 16 | 17 | // Input data for full size test 18 | // TODO nf-core: Specify the paths to your full test data ( on nf-core/test-datasets or directly in repositories, e.g. SRA) 19 | // TODO nf-core: Give any required params for the test so that command line flags are not needed 20 | input = 'https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_full_illumina_amplicon.csv' 21 | 22 | // Genome references 23 | genome = 'R64-1-1' 24 | } 25 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # nf-core/seqinspector: Documentation 2 | 3 | The nf-core/seqinspector documentation is split into the following pages: 4 | 5 | - [Usage](usage.md) 6 | - An overview of how the pipeline works, how to run it and a description of all of the different command-line flags. 7 | - [Output](output.md) 8 | - An overview of the different results produced by the pipeline and how to interpret them. 9 | 10 | You can find a lot more documentation about installing, configuring and running nf-core pipelines on the website: [https://nf-co.re](https://nf-co.re) 11 | -------------------------------------------------------------------------------- /docs/images/mqc_fastqc_adapter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/seqinspector/d169eefc354c9aa7232b5abcbed3f67be3ac996f/docs/images/mqc_fastqc_adapter.png -------------------------------------------------------------------------------- /docs/images/mqc_fastqc_counts.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/seqinspector/d169eefc354c9aa7232b5abcbed3f67be3ac996f/docs/images/mqc_fastqc_counts.png -------------------------------------------------------------------------------- /docs/images/mqc_fastqc_quality.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/seqinspector/d169eefc354c9aa7232b5abcbed3f67be3ac996f/docs/images/mqc_fastqc_quality.png -------------------------------------------------------------------------------- /docs/images/nf-core-seqinspector_logo_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/seqinspector/d169eefc354c9aa7232b5abcbed3f67be3ac996f/docs/images/nf-core-seqinspector_logo_dark.png -------------------------------------------------------------------------------- /docs/images/nf-core-seqinspector_logo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/seqinspector/d169eefc354c9aa7232b5abcbed3f67be3ac996f/docs/images/nf-core-seqinspector_logo_light.png -------------------------------------------------------------------------------- /docs/output.md: -------------------------------------------------------------------------------- 1 | # nf-core/seqinspector: Output 2 | 3 | ## Introduction 4 | 5 | This document describes the output produced by the pipeline. Most of the plots are taken from the MultiQC report, which summarises results at the end of the pipeline. 6 | 7 | The directories listed below will be created in the results directory after the pipeline has finished. All paths are relative to the top-level results directory. 8 | 9 | 10 | 11 | ## Pipeline overview 12 | 13 | The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data using the following steps: 14 | 15 | - [FastQC](#fastqc) - Raw read QC 16 | - [MultiQC](#multiqc) - Aggregate report describing results and QC from the whole pipeline 17 | - [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution 18 | 19 | ### FastQC 20 | 21 |
22 | Output files 23 | 24 | - `fastqc/` 25 | - `*_fastqc.html`: FastQC report containing quality metrics. 26 | - `*_fastqc.zip`: Zip archive containing the FastQC report, tab-delimited data file and plot images. 27 | 28 |
29 | 30 | [FastQC](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/) gives general quality metrics about your sequenced reads. It provides information about the quality score distribution across your reads, per base sequence content (%A/T/G/C), adapter contamination and overrepresented sequences. For further reading and documentation see the [FastQC help pages](http://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/). 31 | 32 | ![MultiQC - FastQC sequence counts plot](images/mqc_fastqc_counts.png) 33 | 34 | ![MultiQC - FastQC mean quality scores plot](images/mqc_fastqc_quality.png) 35 | 36 | ![MultiQC - FastQC adapter content plot](images/mqc_fastqc_adapter.png) 37 | 38 | :::note 39 | The FastQC plots displayed in the MultiQC report shows _untrimmed_ reads. They may contain adapter sequence and potentially regions with low quality. 40 | ::: 41 | 42 | ### MultiQC 43 | 44 |
45 | Output files 46 | 47 | - `multiqc/` 48 | - `multiqc_report.html`: a standalone HTML file that can be viewed in your web browser. 49 | - `multiqc_data/`: directory containing parsed statistics from the different tools used in the pipeline. 50 | - `multiqc_plots/`: directory containing static images from the report in various formats. 51 | 52 |
53 | 54 | [MultiQC](http://multiqc.info) is a visualization tool that generates a single HTML report summarising all samples in your project. Most of the pipeline QC results are visualised in the report and further statistics are available in the report data directory. 55 | 56 | Results generated by MultiQC collate pipeline QC from supported tools e.g. FastQC. The pipeline has special steps which also allow the software versions to be reported in the MultiQC output for future traceability. For more information about how to use MultiQC reports, see . 57 | 58 | ### Pipeline information 59 | 60 |
61 | Output files 62 | 63 | - `pipeline_info/` 64 | - Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. 65 | - Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. 66 | - Reformatted samplesheet files used as input to the pipeline: `samplesheet.valid.csv`. 67 | - Parameters used by the pipeline run: `params.json`. 68 | 69 |
70 | 71 | [Nextflow](https://www.nextflow.io/docs/latest/tracing.html) provides excellent functionality for generating various reports relevant to the running and execution of the pipeline. This will allow you to troubleshoot errors with the running of the pipeline, and also provide you with other information such as launch commands, run times and resource usage. 72 | -------------------------------------------------------------------------------- /docs/usage.md: -------------------------------------------------------------------------------- 1 | # nf-core/seqinspector: Usage 2 | 3 | ## :warning: Please read this documentation on the nf-core website: [https://nf-co.re/seqinspector/usage](https://nf-co.re/seqinspector/usage) 4 | 5 | > _Documentation of pipeline parameters is generated automatically from the pipeline schema and can no longer be found in markdown files._ 6 | 7 | ## Introduction 8 | 9 | 10 | 11 | ## Samplesheet input 12 | 13 | You will need to create a samplesheet with information about the samples you would like to analyse before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row as shown in the examples below. 14 | 15 | ```bash 16 | --input '[path to samplesheet file]' 17 | ``` 18 | 19 | ### Multiple runs of the same sample 20 | 21 | The `sample` identifiers have to be the same when you have re-sequenced the same sample more than once e.g. to increase sequencing depth. The pipeline will concatenate the raw reads before performing any downstream analysis. Below is an example for the same sample sequenced across 3 lanes: 22 | 23 | ```csv title="samplesheet.csv" 24 | sample,fastq_1,fastq_2 25 | CONTROL_REP1,AEG588A1_S1_L002_R1_001.fastq.gz,AEG588A1_S1_L002_R2_001.fastq.gz 26 | CONTROL_REP1,AEG588A1_S1_L003_R1_001.fastq.gz,AEG588A1_S1_L003_R2_001.fastq.gz 27 | CONTROL_REP1,AEG588A1_S1_L004_R1_001.fastq.gz,AEG588A1_S1_L004_R2_001.fastq.gz 28 | ``` 29 | 30 | ### Full samplesheet 31 | 32 | The pipeline will auto-detect whether a sample is single- or paired-end using the information provided in the samplesheet. The samplesheet can have as many columns as you desire, however, there is a strict requirement for the first 3 columns to match those defined in the table below. 33 | 34 | A final samplesheet file consisting of both single- and paired-end data may look something like the one below. This is for 6 samples, where `TREATMENT_REP3` has been sequenced twice. 35 | 36 | ```csv title="samplesheet.csv" 37 | sample,fastq_1,fastq_2 38 | CONTROL_REP1,AEG588A1_S1_L002_R1_001.fastq.gz,AEG588A1_S1_L002_R2_001.fastq.gz 39 | CONTROL_REP2,AEG588A2_S2_L002_R1_001.fastq.gz,AEG588A2_S2_L002_R2_001.fastq.gz 40 | CONTROL_REP3,AEG588A3_S3_L002_R1_001.fastq.gz,AEG588A3_S3_L002_R2_001.fastq.gz 41 | TREATMENT_REP1,AEG588A4_S4_L003_R1_001.fastq.gz, 42 | TREATMENT_REP2,AEG588A5_S5_L003_R1_001.fastq.gz, 43 | TREATMENT_REP3,AEG588A6_S6_L003_R1_001.fastq.gz, 44 | TREATMENT_REP3,AEG588A6_S6_L004_R1_001.fastq.gz, 45 | ``` 46 | 47 | | Column | Description | 48 | | --------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | 49 | | `sample` | Custom sample name. This entry will be identical for multiple sequencing libraries/runs from the same sample. Spaces in sample names are automatically converted to underscores (`_`). | 50 | | `fastq_1` | Full path to FastQ file for Illumina short reads 1. File has to be gzipped and have the extension ".fastq.gz" or ".fq.gz". | 51 | | `fastq_2` | Full path to FastQ file for Illumina short reads 2. File has to be gzipped and have the extension ".fastq.gz" or ".fq.gz". | 52 | 53 | An [example samplesheet](../assets/samplesheet.csv) has been provided with the pipeline. 54 | 55 | ## Running the pipeline 56 | 57 | The typical command for running the pipeline is as follows: 58 | 59 | ```bash 60 | nextflow run nf-core/seqinspector --input ./samplesheet.csv --outdir ./results --genome GRCh37 -profile docker 61 | ``` 62 | 63 | This will launch the pipeline with the `docker` configuration profile. See below for more information about profiles. 64 | 65 | Note that the pipeline will create the following files in your working directory: 66 | 67 | ```bash 68 | work # Directory containing the nextflow working files 69 | # Finished results in specified location (defined with --outdir) 70 | .nextflow_log # Log file from Nextflow 71 | # Other nextflow hidden files, eg. history of pipeline runs and old logs. 72 | ``` 73 | 74 | If you wish to repeatedly use the same parameters for multiple runs, rather than specifying each flag in the command, you can specify these in a params file. 75 | 76 | Pipeline settings can be provided in a `yaml` or `json` file via `-params-file `. 77 | 78 | :::warning 79 | Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). 80 | ::: 81 | 82 | The above pipeline run specified with a params file in yaml format: 83 | 84 | ```bash 85 | nextflow run nf-core/seqinspector -profile docker -params-file params.yaml 86 | ``` 87 | 88 | with `params.yaml` containing: 89 | 90 | ```yaml 91 | input: './samplesheet.csv' 92 | outdir: './results/' 93 | genome: 'GRCh37' 94 | <...> 95 | ``` 96 | 97 | You can also generate such `YAML`/`JSON` files via [nf-core/launch](https://nf-co.re/launch). 98 | 99 | ### Updating the pipeline 100 | 101 | When you run the above command, Nextflow automatically pulls the pipeline code from GitHub and stores it as a cached version. When running the pipeline after this, it will always use the cached version if available - even if the pipeline has been updated since. To make sure that you're running the latest version of the pipeline, make sure that you regularly update the cached version of the pipeline: 102 | 103 | ```bash 104 | nextflow pull nf-core/seqinspector 105 | ``` 106 | 107 | ### Reproducibility 108 | 109 | It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since. 110 | 111 | First, go to the [nf-core/seqinspector releases page](https://github.com/nf-core/seqinspector/releases) and find the latest pipeline version - numeric only (eg. `1.3.1`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.3.1`. Of course, you can switch to another version by changing the number after the `-r` flag. 112 | 113 | This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. For example, at the bottom of the MultiQC reports. 114 | 115 | To further assist in reproducbility, you can use share and re-use [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. 116 | 117 | :::tip 118 | If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. 119 | ::: 120 | 121 | ## Core Nextflow arguments 122 | 123 | :::note 124 | These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen). 125 | ::: 126 | 127 | ### `-profile` 128 | 129 | Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. 130 | 131 | Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Apptainer, Conda) - see below. 132 | 133 | :::info 134 | We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. 135 | ::: 136 | 137 | The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to see if your system is available in these configs please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation). 138 | 139 | Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important! 140 | They are loaded in sequence, so later profiles can overwrite earlier profiles. 141 | 142 | If `-profile` is not specified, the pipeline will run locally and expect all software to be installed and available on the `PATH`. This is _not_ recommended, since it can lead to different results on different machines dependent on the computer enviroment. 143 | 144 | - `test` 145 | - A profile with a complete configuration for automated testing 146 | - Includes links to test data so needs no other parameters 147 | - `docker` 148 | - A generic configuration profile to be used with [Docker](https://docker.com/) 149 | - `singularity` 150 | - A generic configuration profile to be used with [Singularity](https://sylabs.io/docs/) 151 | - `podman` 152 | - A generic configuration profile to be used with [Podman](https://podman.io/) 153 | - `shifter` 154 | - A generic configuration profile to be used with [Shifter](https://nersc.gitlab.io/development/shifter/how-to-use/) 155 | - `charliecloud` 156 | - A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) 157 | - `apptainer` 158 | - A generic configuration profile to be used with [Apptainer](https://apptainer.org/) 159 | - `conda` 160 | - A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter, Charliecloud, or Apptainer. 161 | 162 | ### `-resume` 163 | 164 | Specify this when restarting a pipeline. Nextflow will use cached results from any pipeline steps where the inputs are the same, continuing from where it got to previously. For input to be considered the same, not only the names must be identical but the files' contents as well. For more info about this parameter, see [this blog post](https://www.nextflow.io/blog/2019/demystifying-nextflow-resume.html). 165 | 166 | You can also supply a run name to resume a specific run: `-resume [run-name]`. Use the `nextflow log` command to show previous run names. 167 | 168 | ### `-c` 169 | 170 | Specify the path to a specific config file (this is a core Nextflow command). See the [nf-core website documentation](https://nf-co.re/usage/configuration) for more information. 171 | 172 | ## Custom configuration 173 | 174 | ### Resource requests 175 | 176 | Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher requests (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. 177 | 178 | To change the resource requests, please see the [max resources](https://nf-co.re/docs/usage/configuration#max-resources) and [tuning workflow resources](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources) section of the nf-core website. 179 | 180 | ### Custom Containers 181 | 182 | In some cases you may wish to change which container or conda environment a step of the pipeline uses for a particular tool. By default nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However in some cases the pipeline specified version maybe out of date. 183 | 184 | To use a different container from the default container or conda environment specified in a pipeline, please see the [updating tool versions](https://nf-co.re/docs/usage/configuration#updating-tool-versions) section of the nf-core website. 185 | 186 | ### Custom Tool Arguments 187 | 188 | A pipeline might not always support every possible argument or option of a particular tool used in pipeline. Fortunately, nf-core pipelines provide some freedom to users to insert additional parameters that the pipeline does not include by default. 189 | 190 | To learn how to provide additional arguments to a particular tool of the pipeline, please see the [customising tool arguments](https://nf-co.re/docs/usage/configuration#customising-tool-arguments) section of the nf-core website. 191 | 192 | ### nf-core/configs 193 | 194 | In most cases, you will only need to create a custom config as a one-off but if you and others within your organisation are likely to be running nf-core pipelines regularly and need to use the same settings regularly it may be a good idea to request that your custom config file is uploaded to the `nf-core/configs` git repository. Before you do this please can you test that the config file works with your pipeline of choice using the `-c` parameter. You can then create a pull request to the `nf-core/configs` repository with the addition of your config file, associated documentation file (see examples in [`nf-core/configs/docs`](https://github.com/nf-core/configs/tree/master/docs)), and amending [`nfcore_custom.config`](https://github.com/nf-core/configs/blob/master/nfcore_custom.config) to include your custom profile. 195 | 196 | See the main [Nextflow documentation](https://www.nextflow.io/docs/latest/config.html) for more information about creating your own configuration files. 197 | 198 | If you have any questions or issues please send us a message on [Slack](https://nf-co.re/join/slack) on the [`#configs` channel](https://nfcore.slack.com/channels/configs). 199 | 200 | ## Azure Resource Requests 201 | 202 | To be used with the `azurebatch` profile by specifying the `-profile azurebatch`. 203 | We recommend providing a compute `params.vm_type` of `Standard_D16_v3` VMs by default but these options can be changed if required. 204 | 205 | Note that the choice of VM size depends on your quota and the overall workload during the analysis. 206 | For a thorough list, please refer the [Azure Sizes for virtual machines in Azure](https://docs.microsoft.com/en-us/azure/virtual-machines/sizes). 207 | 208 | ## Running in the background 209 | 210 | Nextflow handles job submissions and supervises the running jobs. The Nextflow process must run until the pipeline is finished. 211 | 212 | The Nextflow `-bg` flag launches Nextflow in the background, detached from your terminal so that the workflow does not stop if you log out of your session. The logs are saved to a file. 213 | 214 | Alternatively, you can use `screen` / `tmux` or similar tool to create a detached session which you can log back into at a later time. 215 | Some HPC setups also allow you to run nextflow within a cluster job submitted your job scheduler (from where it submits more jobs). 216 | 217 | ## Nextflow memory requirements 218 | 219 | In some cases, the Nextflow Java virtual machines can start to request a large amount of memory. 220 | We recommend adding the following line to your environment to limit this (typically in `~/.bashrc` or `~./bash_profile`): 221 | 222 | ```bash 223 | NXF_OPTS='-Xms1g -Xmx4g' 224 | ``` 225 | -------------------------------------------------------------------------------- /main.nf: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env nextflow 2 | /* 3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 4 | nf-core/seqinspector 5 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 6 | Github : https://github.com/nf-core/seqinspector 7 | Website: https://nf-co.re/seqinspector 8 | Slack : https://nfcore.slack.com/channels/seqinspector 9 | ---------------------------------------------------------------------------------------- 10 | */ 11 | 12 | nextflow.enable.dsl = 2 13 | 14 | /* 15 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 16 | IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS / WORKFLOWS 17 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 18 | */ 19 | 20 | include { SEQINSPECTOR } from './workflows/seqinspector' 21 | include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_seqinspector_pipeline' 22 | include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_seqinspector_pipeline' 23 | 24 | include { getGenomeAttribute } from './subworkflows/local/utils_nfcore_seqinspector_pipeline' 25 | 26 | /* 27 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 28 | GENOME PARAMETER VALUES 29 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 30 | */ 31 | 32 | // TODO nf-core: Remove this line if you don't need a FASTA file 33 | // This is an example of how to use getGenomeAttribute() to fetch parameters 34 | // from igenomes.config using `--genome` 35 | // params.fasta = getGenomeAttribute('fasta') 36 | 37 | /* 38 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 39 | NAMED WORKFLOWS FOR PIPELINE 40 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 41 | */ 42 | 43 | // 44 | // WORKFLOW: Run main analysis pipeline depending on type of input 45 | // 46 | workflow NFCORE_SEQINSPECTOR { 47 | 48 | take: 49 | samplesheet // channel: samplesheet read in from --input 50 | 51 | main: 52 | 53 | // 54 | // WORKFLOW: Run pipeline 55 | // 56 | SEQINSPECTOR ( 57 | samplesheet 58 | ) 59 | 60 | emit: 61 | multiqc_report = SEQINSPECTOR.out.multiqc_report // channel: /path/to/multiqc_report.html 62 | 63 | } 64 | /* 65 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 66 | RUN MAIN WORKFLOW 67 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 68 | */ 69 | 70 | workflow { 71 | 72 | main: 73 | 74 | // 75 | // SUBWORKFLOW: Run initialisation tasks 76 | // 77 | PIPELINE_INITIALISATION ( 78 | params.version, 79 | params.help, 80 | params.validate_params, 81 | params.monochrome_logs, 82 | args, 83 | params.outdir, 84 | params.input 85 | ) 86 | 87 | // 88 | // WORKFLOW: Run main workflow 89 | // 90 | NFCORE_SEQINSPECTOR ( 91 | PIPELINE_INITIALISATION.out.samplesheet 92 | ) 93 | 94 | // 95 | // SUBWORKFLOW: Run completion tasks 96 | // 97 | PIPELINE_COMPLETION ( 98 | params.email, 99 | params.email_on_fail, 100 | params.plaintext_email, 101 | params.outdir, 102 | params.monochrome_logs, 103 | params.hook_url, 104 | NFCORE_SEQINSPECTOR.out.multiqc_report 105 | ) 106 | } 107 | 108 | /* 109 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 110 | THE END 111 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 112 | */ 113 | -------------------------------------------------------------------------------- /modules.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nf-core/seqinspector", 3 | "homePage": "https://github.com/nf-core/seqinspector", 4 | "repos": { 5 | "https://github.com/nf-core/modules.git": { 6 | "modules": { 7 | "nf-core": { 8 | "fastqc": { 9 | "branch": "master", 10 | "git_sha": "f4ae1d942bd50c5c0b9bd2de1393ce38315ba57c", 11 | "installed_by": ["modules"] 12 | }, 13 | "multiqc": { 14 | "branch": "master", 15 | "git_sha": "b7ebe95761cd389603f9cc0e0dc384c0f663815a", 16 | "installed_by": ["modules"] 17 | } 18 | } 19 | }, 20 | "subworkflows": { 21 | "nf-core": { 22 | "utils_nextflow_pipeline": { 23 | "branch": "master", 24 | "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", 25 | "installed_by": ["subworkflows"] 26 | }, 27 | "utils_nfcore_pipeline": { 28 | "branch": "master", 29 | "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", 30 | "installed_by": ["subworkflows"] 31 | }, 32 | "utils_nfvalidation_plugin": { 33 | "branch": "master", 34 | "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", 35 | "installed_by": ["subworkflows"] 36 | } 37 | } 38 | } 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /modules/nf-core/fastqc/environment.yml: -------------------------------------------------------------------------------- 1 | name: fastqc 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::fastqc=0.12.1 8 | -------------------------------------------------------------------------------- /modules/nf-core/fastqc/main.nf: -------------------------------------------------------------------------------- 1 | process FASTQC { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/fastqc:0.12.1--hdfd78af_0' : 8 | 'biocontainers/fastqc:0.12.1--hdfd78af_0' }" 9 | 10 | input: 11 | tuple val(meta), path(reads) 12 | 13 | output: 14 | tuple val(meta), path("*.html"), emit: html 15 | tuple val(meta), path("*.zip") , emit: zip 16 | path "versions.yml" , emit: versions 17 | 18 | when: 19 | task.ext.when == null || task.ext.when 20 | 21 | script: 22 | def args = task.ext.args ?: '' 23 | def prefix = task.ext.prefix ?: "${meta.id}" 24 | // Make list of old name and new name pairs to use for renaming in the bash while loop 25 | def old_new_pairs = reads instanceof Path || reads.size() == 1 ? [[ reads, "${prefix}.${reads.extension}" ]] : reads.withIndex().collect { entry, index -> [ entry, "${prefix}_${index + 1}.${entry.extension}" ] } 26 | def rename_to = old_new_pairs*.join(' ').join(' ') 27 | def renamed_files = old_new_pairs.collect{ old_name, new_name -> new_name }.join(' ') 28 | """ 29 | printf "%s %s\\n" $rename_to | while read old_name new_name; do 30 | [ -f "\${new_name}" ] || ln -s \$old_name \$new_name 31 | done 32 | 33 | fastqc \\ 34 | $args \\ 35 | --threads $task.cpus \\ 36 | $renamed_files 37 | 38 | cat <<-END_VERSIONS > versions.yml 39 | "${task.process}": 40 | fastqc: \$( fastqc --version | sed '/FastQC v/!d; s/.*v//' ) 41 | END_VERSIONS 42 | """ 43 | 44 | stub: 45 | def prefix = task.ext.prefix ?: "${meta.id}" 46 | """ 47 | touch ${prefix}.html 48 | touch ${prefix}.zip 49 | 50 | cat <<-END_VERSIONS > versions.yml 51 | "${task.process}": 52 | fastqc: \$( fastqc --version | sed '/FastQC v/!d; s/.*v//' ) 53 | END_VERSIONS 54 | """ 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/fastqc/meta.yml: -------------------------------------------------------------------------------- 1 | name: fastqc 2 | description: Run FastQC on sequenced reads 3 | keywords: 4 | - quality control 5 | - qc 6 | - adapters 7 | - fastq 8 | tools: 9 | - fastqc: 10 | description: | 11 | FastQC gives general quality metrics about your reads. 12 | It provides information about the quality score distribution 13 | across your reads, the per base sequence content (%A/C/G/T). 14 | You get information about adapter contamination and other 15 | overrepresented sequences. 16 | homepage: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/ 17 | documentation: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/Help/ 18 | licence: ["GPL-2.0-only"] 19 | input: 20 | - meta: 21 | type: map 22 | description: | 23 | Groovy Map containing sample information 24 | e.g. [ id:'test', single_end:false ] 25 | - reads: 26 | type: file 27 | description: | 28 | List of input FastQ files of size 1 and 2 for single-end and paired-end data, 29 | respectively. 30 | output: 31 | - meta: 32 | type: map 33 | description: | 34 | Groovy Map containing sample information 35 | e.g. [ id:'test', single_end:false ] 36 | - html: 37 | type: file 38 | description: FastQC report 39 | pattern: "*_{fastqc.html}" 40 | - zip: 41 | type: file 42 | description: FastQC report archive 43 | pattern: "*_{fastqc.zip}" 44 | - versions: 45 | type: file 46 | description: File containing software versions 47 | pattern: "versions.yml" 48 | authors: 49 | - "@drpatelh" 50 | - "@grst" 51 | - "@ewels" 52 | - "@FelixKrueger" 53 | maintainers: 54 | - "@drpatelh" 55 | - "@grst" 56 | - "@ewels" 57 | - "@FelixKrueger" 58 | -------------------------------------------------------------------------------- /modules/nf-core/fastqc/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process FASTQC" 4 | script "../main.nf" 5 | process "FASTQC" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "fastqc" 10 | 11 | test("sarscov2 single-end [fastq]") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = Channel.of([ 17 | [ id: 'test', single_end:true ], 18 | [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) ] 19 | ]) 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll ( 26 | { assert process.success }, 27 | 28 | // NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it. 29 | // looks like this:
Mon 2 Oct 2023
test.gz
30 | // https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039 31 | 32 | { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, 33 | { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, 34 | { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, 35 | 36 | { assert snapshot(process.out.versions).match("fastqc_versions_single") } 37 | ) 38 | } 39 | } 40 | 41 | test("sarscov2 paired-end [fastq]") { 42 | 43 | when { 44 | process { 45 | """ 46 | input[0] = Channel.of([ 47 | [id: 'test', single_end: false], // meta map 48 | [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), 49 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true) ] 50 | ]) 51 | """ 52 | } 53 | } 54 | 55 | then { 56 | assertAll ( 57 | { assert process.success }, 58 | 59 | { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, 60 | { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, 61 | { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, 62 | { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, 63 | { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, 64 | { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, 65 | 66 | { assert snapshot(process.out.versions).match("fastqc_versions_paired") } 67 | ) 68 | } 69 | } 70 | 71 | test("sarscov2 interleaved [fastq]") { 72 | 73 | when { 74 | process { 75 | """ 76 | input[0] = Channel.of([ 77 | [id: 'test', single_end: false], // meta map 78 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_interleaved.fastq.gz', checkIfExists: true) 79 | ]) 80 | """ 81 | } 82 | } 83 | 84 | then { 85 | assertAll ( 86 | { assert process.success }, 87 | 88 | { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, 89 | { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, 90 | { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, 91 | 92 | { assert snapshot(process.out.versions).match("fastqc_versions_interleaved") } 93 | ) 94 | } 95 | } 96 | 97 | test("sarscov2 paired-end [bam]") { 98 | 99 | when { 100 | process { 101 | """ 102 | input[0] = Channel.of([ 103 | [id: 'test', single_end: false], // meta map 104 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true) 105 | ]) 106 | """ 107 | } 108 | } 109 | 110 | then { 111 | assertAll ( 112 | { assert process.success }, 113 | 114 | { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" }, 115 | { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" }, 116 | { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, 117 | 118 | { assert snapshot(process.out.versions).match("fastqc_versions_bam") } 119 | ) 120 | } 121 | } 122 | 123 | test("sarscov2 multiple [fastq]") { 124 | 125 | when { 126 | process { 127 | """ 128 | input[0] = Channel.of([ 129 | [id: 'test', single_end: false], // meta map 130 | [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true), 131 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true), 132 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_1.fastq.gz', checkIfExists: true), 133 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_2.fastq.gz', checkIfExists: true) ] 134 | ]) 135 | """ 136 | } 137 | } 138 | 139 | then { 140 | assertAll ( 141 | { assert process.success }, 142 | 143 | { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" }, 144 | { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" }, 145 | { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" }, 146 | { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" }, 147 | { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" }, 148 | { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" }, 149 | { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" }, 150 | { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" }, 151 | { assert path(process.out.html[0][1][0]).text.contains("File typeConventional base calls") }, 152 | { assert path(process.out.html[0][1][1]).text.contains("File typeConventional base calls") }, 153 | { assert path(process.out.html[0][1][2]).text.contains("File typeConventional base calls") }, 154 | { assert path(process.out.html[0][1][3]).text.contains("File typeConventional base calls") }, 155 | 156 | { assert snapshot(process.out.versions).match("fastqc_versions_multiple") } 157 | ) 158 | } 159 | } 160 | 161 | test("sarscov2 custom_prefix") { 162 | 163 | when { 164 | process { 165 | """ 166 | input[0] = Channel.of([ 167 | [ id:'mysample', single_end:true ], // meta map 168 | file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) 169 | ]) 170 | """ 171 | } 172 | } 173 | 174 | then { 175 | assertAll ( 176 | { assert process.success }, 177 | 178 | { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" }, 179 | { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" }, 180 | { assert path(process.out.html[0][1]).text.contains("File typeConventional base calls") }, 181 | 182 | { assert snapshot(process.out.versions).match("fastqc_versions_custom_prefix") } 183 | ) 184 | } 185 | } 186 | 187 | test("sarscov2 single-end [fastq] - stub") { 188 | 189 | options "-stub" 190 | 191 | when { 192 | process { 193 | """ 194 | input[0] = Channel.of([ 195 | [ id: 'test', single_end:true ], 196 | [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) ] 197 | ]) 198 | """ 199 | } 200 | } 201 | 202 | then { 203 | assertAll ( 204 | { assert process.success }, 205 | { assert snapshot(process.out.html.collect { file(it[1]).getName() } + 206 | process.out.zip.collect { file(it[1]).getName() } + 207 | process.out.versions ).match("fastqc_stub") } 208 | ) 209 | } 210 | } 211 | 212 | } 213 | -------------------------------------------------------------------------------- /modules/nf-core/fastqc/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "fastqc_versions_interleaved": { 3 | "content": [ 4 | [ 5 | "versions.yml:md5,e1cc25ca8af856014824abd842e93978" 6 | ] 7 | ], 8 | "meta": { 9 | "nf-test": "0.8.4", 10 | "nextflow": "23.10.1" 11 | }, 12 | "timestamp": "2024-01-31T17:40:07.293713" 13 | }, 14 | "fastqc_stub": { 15 | "content": [ 16 | [ 17 | "test.html", 18 | "test.zip", 19 | "versions.yml:md5,e1cc25ca8af856014824abd842e93978" 20 | ] 21 | ], 22 | "meta": { 23 | "nf-test": "0.8.4", 24 | "nextflow": "23.10.1" 25 | }, 26 | "timestamp": "2024-01-31T17:31:01.425198" 27 | }, 28 | "fastqc_versions_multiple": { 29 | "content": [ 30 | [ 31 | "versions.yml:md5,e1cc25ca8af856014824abd842e93978" 32 | ] 33 | ], 34 | "meta": { 35 | "nf-test": "0.8.4", 36 | "nextflow": "23.10.1" 37 | }, 38 | "timestamp": "2024-01-31T17:40:55.797907" 39 | }, 40 | "fastqc_versions_bam": { 41 | "content": [ 42 | [ 43 | "versions.yml:md5,e1cc25ca8af856014824abd842e93978" 44 | ] 45 | ], 46 | "meta": { 47 | "nf-test": "0.8.4", 48 | "nextflow": "23.10.1" 49 | }, 50 | "timestamp": "2024-01-31T17:40:26.795862" 51 | }, 52 | "fastqc_versions_single": { 53 | "content": [ 54 | [ 55 | "versions.yml:md5,e1cc25ca8af856014824abd842e93978" 56 | ] 57 | ], 58 | "meta": { 59 | "nf-test": "0.8.4", 60 | "nextflow": "23.10.1" 61 | }, 62 | "timestamp": "2024-01-31T17:39:27.043675" 63 | }, 64 | "fastqc_versions_paired": { 65 | "content": [ 66 | [ 67 | "versions.yml:md5,e1cc25ca8af856014824abd842e93978" 68 | ] 69 | ], 70 | "meta": { 71 | "nf-test": "0.8.4", 72 | "nextflow": "23.10.1" 73 | }, 74 | "timestamp": "2024-01-31T17:39:47.584191" 75 | }, 76 | "fastqc_versions_custom_prefix": { 77 | "content": [ 78 | [ 79 | "versions.yml:md5,e1cc25ca8af856014824abd842e93978" 80 | ] 81 | ], 82 | "meta": { 83 | "nf-test": "0.8.4", 84 | "nextflow": "23.10.1" 85 | }, 86 | "timestamp": "2024-01-31T17:41:14.576531" 87 | } 88 | } -------------------------------------------------------------------------------- /modules/nf-core/fastqc/tests/tags.yml: -------------------------------------------------------------------------------- 1 | fastqc: 2 | - modules/nf-core/fastqc/** 3 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/environment.yml: -------------------------------------------------------------------------------- 1 | name: multiqc 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::multiqc=1.21 8 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/main.nf: -------------------------------------------------------------------------------- 1 | process MULTIQC { 2 | label 'process_single' 3 | 4 | conda "${moduleDir}/environment.yml" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/multiqc:1.21--pyhdfd78af_0' : 7 | 'biocontainers/multiqc:1.21--pyhdfd78af_0' }" 8 | 9 | input: 10 | path multiqc_files, stageAs: "?/*" 11 | path(multiqc_config) 12 | path(extra_multiqc_config) 13 | path(multiqc_logo) 14 | 15 | output: 16 | path "*multiqc_report.html", emit: report 17 | path "*_data" , emit: data 18 | path "*_plots" , optional:true, emit: plots 19 | path "versions.yml" , emit: versions 20 | 21 | when: 22 | task.ext.when == null || task.ext.when 23 | 24 | script: 25 | def args = task.ext.args ?: '' 26 | def config = multiqc_config ? "--config $multiqc_config" : '' 27 | def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : '' 28 | def logo = multiqc_logo ? /--cl-config 'custom_logo: "${multiqc_logo}"'/ : '' 29 | """ 30 | multiqc \\ 31 | --force \\ 32 | $args \\ 33 | $config \\ 34 | $extra_config \\ 35 | $logo \\ 36 | . 37 | 38 | cat <<-END_VERSIONS > versions.yml 39 | "${task.process}": 40 | multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) 41 | END_VERSIONS 42 | """ 43 | 44 | stub: 45 | """ 46 | mkdir multiqc_data 47 | touch multiqc_plots 48 | touch multiqc_report.html 49 | 50 | cat <<-END_VERSIONS > versions.yml 51 | "${task.process}": 52 | multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) 53 | END_VERSIONS 54 | """ 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/meta.yml: -------------------------------------------------------------------------------- 1 | name: multiqc 2 | description: Aggregate results from bioinformatics analyses across many samples into a single report 3 | keywords: 4 | - QC 5 | - bioinformatics tools 6 | - Beautiful stand-alone HTML report 7 | tools: 8 | - multiqc: 9 | description: | 10 | MultiQC searches a given directory for analysis logs and compiles a HTML report. 11 | It's a general use tool, perfect for summarising the output from numerous bioinformatics tools. 12 | homepage: https://multiqc.info/ 13 | documentation: https://multiqc.info/docs/ 14 | licence: ["GPL-3.0-or-later"] 15 | input: 16 | - multiqc_files: 17 | type: file 18 | description: | 19 | List of reports / files recognised by MultiQC, for example the html and zip output of FastQC 20 | - multiqc_config: 21 | type: file 22 | description: Optional config yml for MultiQC 23 | pattern: "*.{yml,yaml}" 24 | - extra_multiqc_config: 25 | type: file 26 | description: Second optional config yml for MultiQC. Will override common sections in multiqc_config. 27 | pattern: "*.{yml,yaml}" 28 | - multiqc_logo: 29 | type: file 30 | description: Optional logo file for MultiQC 31 | pattern: "*.{png}" 32 | output: 33 | - report: 34 | type: file 35 | description: MultiQC report file 36 | pattern: "multiqc_report.html" 37 | - data: 38 | type: directory 39 | description: MultiQC data dir 40 | pattern: "multiqc_data" 41 | - plots: 42 | type: file 43 | description: Plots created by MultiQC 44 | pattern: "*_data" 45 | - versions: 46 | type: file 47 | description: File containing software versions 48 | pattern: "versions.yml" 49 | authors: 50 | - "@abhi18av" 51 | - "@bunop" 52 | - "@drpatelh" 53 | - "@jfy133" 54 | maintainers: 55 | - "@abhi18av" 56 | - "@bunop" 57 | - "@drpatelh" 58 | - "@jfy133" 59 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process MULTIQC" 4 | script "../main.nf" 5 | process "MULTIQC" 6 | 7 | tag "modules" 8 | tag "modules_nfcore" 9 | tag "multiqc" 10 | 11 | test("sarscov2 single-end [fastqc]") { 12 | 13 | when { 14 | process { 15 | """ 16 | input[0] = Channel.of(file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastqc/test_fastqc.zip', checkIfExists: true)) 17 | input[1] = [] 18 | input[2] = [] 19 | input[3] = [] 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll( 26 | { assert process.success }, 27 | { assert process.out.report[0] ==~ ".*/multiqc_report.html" }, 28 | { assert process.out.data[0] ==~ ".*/multiqc_data" }, 29 | { assert snapshot(process.out.versions).match("multiqc_versions_single") } 30 | ) 31 | } 32 | 33 | } 34 | 35 | test("sarscov2 single-end [fastqc] [config]") { 36 | 37 | when { 38 | process { 39 | """ 40 | input[0] = Channel.of(file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastqc/test_fastqc.zip', checkIfExists: true)) 41 | input[1] = Channel.of(file("https://github.com/nf-core/tools/raw/dev/nf_core/pipeline-template/assets/multiqc_config.yml", checkIfExists: true)) 42 | input[2] = [] 43 | input[3] = [] 44 | """ 45 | } 46 | } 47 | 48 | then { 49 | assertAll( 50 | { assert process.success }, 51 | { assert process.out.report[0] ==~ ".*/multiqc_report.html" }, 52 | { assert process.out.data[0] ==~ ".*/multiqc_data" }, 53 | { assert snapshot(process.out.versions).match("multiqc_versions_config") } 54 | ) 55 | } 56 | } 57 | 58 | test("sarscov2 single-end [fastqc] - stub") { 59 | 60 | options "-stub" 61 | 62 | when { 63 | process { 64 | """ 65 | input[0] = Channel.of(file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastqc/test_fastqc.zip', checkIfExists: true)) 66 | input[1] = [] 67 | input[2] = [] 68 | input[3] = [] 69 | """ 70 | } 71 | } 72 | 73 | then { 74 | assertAll( 75 | { assert process.success }, 76 | { assert snapshot(process.out.report.collect { file(it).getName() } + 77 | process.out.data.collect { file(it).getName() } + 78 | process.out.plots.collect { file(it).getName() } + 79 | process.out.versions ).match("multiqc_stub") } 80 | ) 81 | } 82 | 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /modules/nf-core/multiqc/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "multiqc_versions_single": { 3 | "content": [ 4 | [ 5 | "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" 6 | ] 7 | ], 8 | "meta": { 9 | "nf-test": "0.8.4", 10 | "nextflow": "23.10.1" 11 | }, 12 | "timestamp": "2024-02-29T08:48:55.657331" 13 | }, 14 | "multiqc_stub": { 15 | "content": [ 16 | [ 17 | "multiqc_report.html", 18 | "multiqc_data", 19 | "multiqc_plots", 20 | "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" 21 | ] 22 | ], 23 | "meta": { 24 | "nf-test": "0.8.4", 25 | "nextflow": "23.10.1" 26 | }, 27 | "timestamp": "2024-02-29T08:49:49.071937" 28 | }, 29 | "multiqc_versions_config": { 30 | "content": [ 31 | [ 32 | "versions.yml:md5,21f35ee29416b9b3073c28733efe4b7d" 33 | ] 34 | ], 35 | "meta": { 36 | "nf-test": "0.8.4", 37 | "nextflow": "23.10.1" 38 | }, 39 | "timestamp": "2024-02-29T08:49:25.457567" 40 | } 41 | } -------------------------------------------------------------------------------- /modules/nf-core/multiqc/tests/tags.yml: -------------------------------------------------------------------------------- 1 | multiqc: 2 | - modules/nf-core/multiqc/** 3 | -------------------------------------------------------------------------------- /nextflow.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | nf-core/seqinspector Nextflow config file 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Default config options for all compute environments 6 | ---------------------------------------------------------------------------------------- 7 | */ 8 | 9 | // Global default params, used in configs 10 | params { 11 | 12 | // TODO nf-core: Specify your pipeline's command line flags 13 | // Input options 14 | input = null 15 | // References 16 | genome = null 17 | igenomes_base = 's3://ngi-igenomes/igenomes/' 18 | igenomes_ignore = false 19 | fasta = null// MultiQC options 20 | multiqc_config = null 21 | multiqc_title = null 22 | multiqc_logo = null 23 | max_multiqc_email_size = '25.MB' 24 | multiqc_methods_description = null 25 | 26 | // Boilerplate options 27 | outdir = null 28 | publish_dir_mode = 'copy' 29 | email = null 30 | email_on_fail = null 31 | plaintext_email = false 32 | monochrome_logs = false 33 | hook_url = null 34 | help = false 35 | version = false 36 | 37 | // Config options 38 | config_profile_name = null 39 | config_profile_description = null 40 | custom_config_version = 'master' 41 | custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}" 42 | config_profile_contact = null 43 | config_profile_url = null 44 | 45 | // Max resource options 46 | // Defaults only, expecting to be overwritten 47 | max_memory = '128.GB' 48 | max_cpus = 16 49 | max_time = '240.h' 50 | 51 | // Schema validation default options 52 | validationFailUnrecognisedParams = false 53 | validationLenientMode = false 54 | validationSchemaIgnoreParams = 'genomes,igenomes_base' 55 | validationShowHiddenParams = false 56 | validate_params = true 57 | 58 | } 59 | 60 | // Load base.config by default for all pipelines 61 | includeConfig 'conf/base.config' 62 | 63 | // Load nf-core custom profiles from different Institutions 64 | try { 65 | includeConfig "${params.custom_config_base}/nfcore_custom.config" 66 | } catch (Exception e) { 67 | System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") 68 | } 69 | 70 | // Load nf-core/seqinspector custom profiles from different institutions. 71 | // Warning: Uncomment only if a pipeline-specific institutional config already exists on nf-core/configs! 72 | // try { 73 | // includeConfig "${params.custom_config_base}/pipeline/seqinspector.config" 74 | // } catch (Exception e) { 75 | // System.err.println("WARNING: Could not load nf-core/config/seqinspector profiles: ${params.custom_config_base}/pipeline/seqinspector.config") 76 | // } 77 | profiles { 78 | debug { 79 | dumpHashes = true 80 | process.beforeScript = 'echo $HOSTNAME' 81 | cleanup = false 82 | nextflow.enable.configProcessNamesValidation = true 83 | } 84 | conda { 85 | conda.enabled = true 86 | docker.enabled = false 87 | singularity.enabled = false 88 | podman.enabled = false 89 | shifter.enabled = false 90 | charliecloud.enabled = false 91 | channels = ['conda-forge', 'bioconda', 'defaults'] 92 | apptainer.enabled = false 93 | } 94 | mamba { 95 | conda.enabled = true 96 | conda.useMamba = true 97 | docker.enabled = false 98 | singularity.enabled = false 99 | podman.enabled = false 100 | shifter.enabled = false 101 | charliecloud.enabled = false 102 | apptainer.enabled = false 103 | } 104 | docker { 105 | docker.enabled = true 106 | conda.enabled = false 107 | singularity.enabled = false 108 | podman.enabled = false 109 | shifter.enabled = false 110 | charliecloud.enabled = false 111 | apptainer.enabled = false 112 | docker.runOptions = '-u $(id -u):$(id -g)' 113 | } 114 | arm { 115 | docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' 116 | } 117 | singularity { 118 | singularity.enabled = true 119 | singularity.autoMounts = true 120 | conda.enabled = false 121 | docker.enabled = false 122 | podman.enabled = false 123 | shifter.enabled = false 124 | charliecloud.enabled = false 125 | apptainer.enabled = false 126 | } 127 | podman { 128 | podman.enabled = true 129 | conda.enabled = false 130 | docker.enabled = false 131 | singularity.enabled = false 132 | shifter.enabled = false 133 | charliecloud.enabled = false 134 | apptainer.enabled = false 135 | } 136 | shifter { 137 | shifter.enabled = true 138 | conda.enabled = false 139 | docker.enabled = false 140 | singularity.enabled = false 141 | podman.enabled = false 142 | charliecloud.enabled = false 143 | apptainer.enabled = false 144 | } 145 | charliecloud { 146 | charliecloud.enabled = true 147 | conda.enabled = false 148 | docker.enabled = false 149 | singularity.enabled = false 150 | podman.enabled = false 151 | shifter.enabled = false 152 | apptainer.enabled = false 153 | } 154 | apptainer { 155 | apptainer.enabled = true 156 | apptainer.autoMounts = true 157 | conda.enabled = false 158 | docker.enabled = false 159 | singularity.enabled = false 160 | podman.enabled = false 161 | shifter.enabled = false 162 | charliecloud.enabled = false 163 | } 164 | gitpod { 165 | executor.name = 'local' 166 | executor.cpus = 4 167 | executor.memory = 8.GB 168 | } 169 | test { includeConfig 'conf/test.config' } 170 | test_full { includeConfig 'conf/test_full.config' } 171 | } 172 | 173 | // Set default registry for Apptainer, Docker, Podman and Singularity independent of -profile 174 | // Will not be used unless Apptainer / Docker / Podman / Singularity are enabled 175 | // Set to your registry if you have a mirror of containers 176 | apptainer.registry = 'quay.io' 177 | docker.registry = 'quay.io' 178 | podman.registry = 'quay.io' 179 | singularity.registry = 'quay.io' 180 | 181 | // Nextflow plugins 182 | plugins { 183 | id 'nf-validation@1.1.3' // Validation of pipeline parameters and creation of an input channel from a sample sheet 184 | } 185 | 186 | // Load igenomes.config if required 187 | if (!params.igenomes_ignore) { 188 | includeConfig 'conf/igenomes.config' 189 | } else { 190 | params.genomes = [:] 191 | } 192 | // Export these variables to prevent local Python/R libraries from conflicting with those in the container 193 | // The JULIA depot path has been adjusted to a fixed path `/usr/local/share/julia` that needs to be used for packages in the container. 194 | // See https://apeltzer.github.io/post/03-julia-lang-nextflow/ for details on that. Once we have a common agreement on where to keep Julia packages, this is adjustable. 195 | 196 | env { 197 | PYTHONNOUSERSITE = 1 198 | R_PROFILE_USER = "/.Rprofile" 199 | R_ENVIRON_USER = "/.Renviron" 200 | JULIA_DEPOT_PATH = "/usr/local/share/julia" 201 | } 202 | 203 | // Capture exit codes from upstream processes when piping 204 | process.shell = ['/bin/bash', '-euo', 'pipefail'] 205 | 206 | // Disable process selector warnings by default. Use debug profile to enable warnings. 207 | nextflow.enable.configProcessNamesValidation = false 208 | 209 | def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') 210 | timeline { 211 | enabled = true 212 | file = "${params.outdir}/pipeline_info/execution_timeline_${trace_timestamp}.html" 213 | } 214 | report { 215 | enabled = true 216 | file = "${params.outdir}/pipeline_info/execution_report_${trace_timestamp}.html" 217 | } 218 | trace { 219 | enabled = true 220 | file = "${params.outdir}/pipeline_info/execution_trace_${trace_timestamp}.txt" 221 | } 222 | dag { 223 | enabled = true 224 | file = "${params.outdir}/pipeline_info/pipeline_dag_${trace_timestamp}.html" 225 | } 226 | 227 | manifest { 228 | name = 'nf-core/seqinspector' 229 | author = """Adrien Coulier""" 230 | homePage = 'https://github.com/nf-core/seqinspector' 231 | description = """Pipeline to QC your sequences""" 232 | mainScript = 'main.nf' 233 | nextflowVersion = '!>=23.04.0' 234 | version = '1.0dev' 235 | doi = '' 236 | } 237 | 238 | // Load modules.config for DSL2 module specific options 239 | includeConfig 'conf/modules.config' 240 | 241 | // Function to ensure that resource requirements don't go beyond 242 | // a maximum limit 243 | def check_max(obj, type) { 244 | if (type == 'memory') { 245 | try { 246 | if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) 247 | return params.max_memory as nextflow.util.MemoryUnit 248 | else 249 | return obj 250 | } catch (all) { 251 | println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" 252 | return obj 253 | } 254 | } else if (type == 'time') { 255 | try { 256 | if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) 257 | return params.max_time as nextflow.util.Duration 258 | else 259 | return obj 260 | } catch (all) { 261 | println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" 262 | return obj 263 | } 264 | } else if (type == 'cpus') { 265 | try { 266 | return Math.min( obj, params.max_cpus as int ) 267 | } catch (all) { 268 | println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" 269 | return obj 270 | } 271 | } 272 | } 273 | -------------------------------------------------------------------------------- /nextflow_schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema", 3 | "$id": "https://raw.githubusercontent.com/nf-core/seqinspector/master/nextflow_schema.json", 4 | "title": "nf-core/seqinspector pipeline parameters", 5 | "description": "Pipeline to QC your sequences", 6 | "type": "object", 7 | "definitions": { 8 | "input_output_options": { 9 | "title": "Input/output options", 10 | "type": "object", 11 | "fa_icon": "fas fa-terminal", 12 | "description": "Define where the pipeline should find input data and save output data.", 13 | "required": ["input", "outdir"], 14 | "properties": { 15 | "input": { 16 | "type": "string", 17 | "format": "file-path", 18 | "exists": true, 19 | "schema": "assets/schema_input.json", 20 | "mimetype": "text/csv", 21 | "pattern": "^\\S+\\.csv$", 22 | "description": "Path to comma-separated file containing information about the samples in the experiment.", 23 | "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row. See [usage docs](https://nf-co.re/seqinspector/usage#samplesheet-input).", 24 | "fa_icon": "fas fa-file-csv" 25 | }, 26 | "outdir": { 27 | "type": "string", 28 | "format": "directory-path", 29 | "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.", 30 | "fa_icon": "fas fa-folder-open" 31 | }, 32 | "email": { 33 | "type": "string", 34 | "description": "Email address for completion summary.", 35 | "fa_icon": "fas fa-envelope", 36 | "help_text": "Set this parameter to your e-mail address to get a summary e-mail with details of the run sent to you when the workflow exits. If set in your user config file (`~/.nextflow/config`) then you don't need to specify this on the command line for every run.", 37 | "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$" 38 | }, 39 | "multiqc_title": { 40 | "type": "string", 41 | "description": "MultiQC report title. Printed as page header, used for filename if not otherwise specified.", 42 | "fa_icon": "fas fa-file-signature" 43 | } 44 | } 45 | }, 46 | "reference_genome_options": { 47 | "title": "Reference genome options", 48 | "type": "object", 49 | "fa_icon": "fas fa-dna", 50 | "description": "Reference genome related files and options required for the workflow.", 51 | "properties": { 52 | "genome": { 53 | "type": "string", 54 | "description": "Name of iGenomes reference.", 55 | "fa_icon": "fas fa-book", 56 | "help_text": "If using a reference genome configured in the pipeline using iGenomes, use this parameter to give the ID for the reference. This is then used to build the full paths for all required reference genome files e.g. `--genome GRCh38`. \n\nSee the [nf-core website docs](https://nf-co.re/usage/reference_genomes) for more details." 57 | }, 58 | "fasta": { 59 | "type": "string", 60 | "format": "file-path", 61 | "exists": true, 62 | "mimetype": "text/plain", 63 | "pattern": "^\\S+\\.fn?a(sta)?(\\.gz)?$", 64 | "description": "Path to FASTA genome file.", 65 | "help_text": "This parameter is *mandatory* if `--genome` is not specified. If you don't have a BWA index available this will be generated for you automatically. Combine with `--save_reference` to save BWA index for future runs.", 66 | "fa_icon": "far fa-file-code" 67 | }, 68 | "igenomes_ignore": { 69 | "type": "boolean", 70 | "description": "Do not load the iGenomes reference config.", 71 | "fa_icon": "fas fa-ban", 72 | "hidden": true, 73 | "help_text": "Do not load `igenomes.config` when running the pipeline. You may choose this option if you observe clashes between custom parameters and those supplied in `igenomes.config`." 74 | } 75 | } 76 | }, 77 | "institutional_config_options": { 78 | "title": "Institutional config options", 79 | "type": "object", 80 | "fa_icon": "fas fa-university", 81 | "description": "Parameters used to describe centralised config profiles. These should not be edited.", 82 | "help_text": "The centralised nf-core configuration profiles use a handful of pipeline parameters to describe themselves. This information is then printed to the Nextflow log when you run a pipeline. You should not need to change these values when you run a pipeline.", 83 | "properties": { 84 | "custom_config_version": { 85 | "type": "string", 86 | "description": "Git commit id for Institutional configs.", 87 | "default": "master", 88 | "hidden": true, 89 | "fa_icon": "fas fa-users-cog" 90 | }, 91 | "custom_config_base": { 92 | "type": "string", 93 | "description": "Base directory for Institutional configs.", 94 | "default": "https://raw.githubusercontent.com/nf-core/configs/master", 95 | "hidden": true, 96 | "help_text": "If you're running offline, Nextflow will not be able to fetch the institutional config files from the internet. If you don't need them, then this is not a problem. If you do need them, you should download the files from the repo and tell Nextflow where to find them with this parameter.", 97 | "fa_icon": "fas fa-users-cog" 98 | }, 99 | "config_profile_name": { 100 | "type": "string", 101 | "description": "Institutional config name.", 102 | "hidden": true, 103 | "fa_icon": "fas fa-users-cog" 104 | }, 105 | "config_profile_description": { 106 | "type": "string", 107 | "description": "Institutional config description.", 108 | "hidden": true, 109 | "fa_icon": "fas fa-users-cog" 110 | }, 111 | "config_profile_contact": { 112 | "type": "string", 113 | "description": "Institutional config contact information.", 114 | "hidden": true, 115 | "fa_icon": "fas fa-users-cog" 116 | }, 117 | "config_profile_url": { 118 | "type": "string", 119 | "description": "Institutional config URL link.", 120 | "hidden": true, 121 | "fa_icon": "fas fa-users-cog" 122 | } 123 | } 124 | }, 125 | "max_job_request_options": { 126 | "title": "Max job request options", 127 | "type": "object", 128 | "fa_icon": "fab fa-acquisitions-incorporated", 129 | "description": "Set the top limit for requested resources for any single job.", 130 | "help_text": "If you are running on a smaller system, a pipeline step requesting more resources than are available may cause the Nextflow to stop the run with an error. These options allow you to cap the maximum resources requested by any single job so that the pipeline will run on your system.\n\nNote that you can not _increase_ the resources requested by any job using these options. For that you will need your own configuration file. See [the nf-core website](https://nf-co.re/usage/configuration) for details.", 131 | "properties": { 132 | "max_cpus": { 133 | "type": "integer", 134 | "description": "Maximum number of CPUs that can be requested for any single job.", 135 | "default": 16, 136 | "fa_icon": "fas fa-microchip", 137 | "hidden": true, 138 | "help_text": "Use to set an upper-limit for the CPU requirement for each process. Should be an integer e.g. `--max_cpus 1`" 139 | }, 140 | "max_memory": { 141 | "type": "string", 142 | "description": "Maximum amount of memory that can be requested for any single job.", 143 | "default": "128.GB", 144 | "fa_icon": "fas fa-memory", 145 | "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(K|M|G|T)?B$", 146 | "hidden": true, 147 | "help_text": "Use to set an upper-limit for the memory requirement for each process. Should be a string in the format integer-unit e.g. `--max_memory '8.GB'`" 148 | }, 149 | "max_time": { 150 | "type": "string", 151 | "description": "Maximum amount of time that can be requested for any single job.", 152 | "default": "240.h", 153 | "fa_icon": "far fa-clock", 154 | "pattern": "^(\\d+\\.?\\s*(s|m|h|d|day)\\s*)+$", 155 | "hidden": true, 156 | "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" 157 | } 158 | } 159 | }, 160 | "generic_options": { 161 | "title": "Generic options", 162 | "type": "object", 163 | "fa_icon": "fas fa-file-import", 164 | "description": "Less common options for the pipeline, typically set in a config file.", 165 | "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", 166 | "properties": { 167 | "help": { 168 | "type": "boolean", 169 | "description": "Display help text.", 170 | "fa_icon": "fas fa-question-circle", 171 | "hidden": true 172 | }, 173 | "version": { 174 | "type": "boolean", 175 | "description": "Display version and exit.", 176 | "fa_icon": "fas fa-question-circle", 177 | "hidden": true 178 | }, 179 | "publish_dir_mode": { 180 | "type": "string", 181 | "default": "copy", 182 | "description": "Method used to save pipeline results to output directory.", 183 | "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", 184 | "fa_icon": "fas fa-copy", 185 | "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], 186 | "hidden": true 187 | }, 188 | "email_on_fail": { 189 | "type": "string", 190 | "description": "Email address for completion summary, only when pipeline fails.", 191 | "fa_icon": "fas fa-exclamation-triangle", 192 | "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$", 193 | "help_text": "An email address to send a summary email to when the pipeline is completed - ONLY sent if the pipeline does not exit successfully.", 194 | "hidden": true 195 | }, 196 | "plaintext_email": { 197 | "type": "boolean", 198 | "description": "Send plain-text email instead of HTML.", 199 | "fa_icon": "fas fa-remove-format", 200 | "hidden": true 201 | }, 202 | "max_multiqc_email_size": { 203 | "type": "string", 204 | "description": "File size limit when attaching MultiQC reports to summary emails.", 205 | "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(K|M|G|T)?B$", 206 | "default": "25.MB", 207 | "fa_icon": "fas fa-file-upload", 208 | "hidden": true 209 | }, 210 | "monochrome_logs": { 211 | "type": "boolean", 212 | "description": "Do not use coloured log outputs.", 213 | "fa_icon": "fas fa-palette", 214 | "hidden": true 215 | }, 216 | "hook_url": { 217 | "type": "string", 218 | "description": "Incoming hook URL for messaging service", 219 | "fa_icon": "fas fa-people-group", 220 | "help_text": "Incoming hook URL for messaging service. Currently, MS Teams and Slack are supported.", 221 | "hidden": true 222 | }, 223 | "multiqc_config": { 224 | "type": "string", 225 | "format": "file-path", 226 | "description": "Custom config file to supply to MultiQC.", 227 | "fa_icon": "fas fa-cog", 228 | "hidden": true 229 | }, 230 | "multiqc_logo": { 231 | "type": "string", 232 | "description": "Custom logo file to supply to MultiQC. File name must also be set in the MultiQC config file", 233 | "fa_icon": "fas fa-image", 234 | "hidden": true 235 | }, 236 | "multiqc_methods_description": { 237 | "type": "string", 238 | "description": "Custom MultiQC yaml file containing HTML including a methods description.", 239 | "fa_icon": "fas fa-cog" 240 | }, 241 | "validate_params": { 242 | "type": "boolean", 243 | "description": "Boolean whether to validate parameters against the schema at runtime", 244 | "default": true, 245 | "fa_icon": "fas fa-check-square", 246 | "hidden": true 247 | }, 248 | "validationShowHiddenParams": { 249 | "type": "boolean", 250 | "fa_icon": "far fa-eye-slash", 251 | "description": "Show all params when using `--help`", 252 | "hidden": true, 253 | "help_text": "By default, parameters set as _hidden_ in the schema are not shown on the command line when a user runs with `--help`. Specifying this option will tell the pipeline to show all parameters." 254 | }, 255 | "validationFailUnrecognisedParams": { 256 | "type": "boolean", 257 | "fa_icon": "far fa-check-circle", 258 | "description": "Validation of parameters fails when an unrecognised parameter is found.", 259 | "hidden": true, 260 | "help_text": "By default, when an unrecognised parameter is found, it returns a warinig." 261 | }, 262 | "validationLenientMode": { 263 | "type": "boolean", 264 | "fa_icon": "far fa-check-circle", 265 | "description": "Validation of parameters in lenient more.", 266 | "hidden": true, 267 | "help_text": "Allows string values that are parseable as numbers or booleans. For further information see [JSONSchema docs](https://github.com/everit-org/json-schema#lenient-mode)." 268 | } 269 | } 270 | } 271 | }, 272 | "allOf": [ 273 | { 274 | "$ref": "#/definitions/input_output_options" 275 | }, 276 | { 277 | "$ref": "#/definitions/reference_genome_options" 278 | }, 279 | { 280 | "$ref": "#/definitions/institutional_config_options" 281 | }, 282 | { 283 | "$ref": "#/definitions/max_job_request_options" 284 | }, 285 | { 286 | "$ref": "#/definitions/generic_options" 287 | } 288 | ] 289 | } 290 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # Config file for Python. Mostly used to configure linting of bin/*.py with Ruff. 2 | # Should be kept the same as nf-core/tools to avoid fighting with template synchronisation. 3 | [tool.ruff] 4 | line-length = 120 5 | target-version = "py38" 6 | cache-dir = "~/.cache/ruff" 7 | 8 | [tool.ruff.lint] 9 | select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] 10 | 11 | [tool.ruff.lint.isort] 12 | known-first-party = ["nf_core"] 13 | 14 | [tool.ruff.lint.per-file-ignores] 15 | "__init__.py" = ["E402", "F401"] 16 | -------------------------------------------------------------------------------- /subworkflows/local/utils_nfcore_seqinspector_pipeline/main.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Subworkflow with functionality specific to the nf-core/seqinspector pipeline 3 | // 4 | 5 | /* 6 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 7 | IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS 8 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 9 | */ 10 | 11 | include { UTILS_NFVALIDATION_PLUGIN } from '../../nf-core/utils_nfvalidation_plugin' 12 | include { paramsSummaryMap } from 'plugin/nf-validation' 13 | include { fromSamplesheet } from 'plugin/nf-validation' 14 | include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline' 15 | include { completionEmail } from '../../nf-core/utils_nfcore_pipeline' 16 | include { completionSummary } from '../../nf-core/utils_nfcore_pipeline' 17 | include { dashedLine } from '../../nf-core/utils_nfcore_pipeline' 18 | include { nfCoreLogo } from '../../nf-core/utils_nfcore_pipeline' 19 | include { imNotification } from '../../nf-core/utils_nfcore_pipeline' 20 | include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline' 21 | include { workflowCitation } from '../../nf-core/utils_nfcore_pipeline' 22 | 23 | /* 24 | ======================================================================================== 25 | SUBWORKFLOW TO INITIALISE PIPELINE 26 | ======================================================================================== 27 | */ 28 | 29 | workflow PIPELINE_INITIALISATION { 30 | 31 | take: 32 | version // boolean: Display version and exit 33 | help // boolean: Display help text 34 | validate_params // boolean: Boolean whether to validate parameters against the schema at runtime 35 | monochrome_logs // boolean: Do not use coloured log outputs 36 | nextflow_cli_args // array: List of positional nextflow CLI args 37 | outdir // string: The output directory where the results will be saved 38 | input // string: Path to input samplesheet 39 | 40 | main: 41 | 42 | ch_versions = Channel.empty() 43 | 44 | // 45 | // Print version and exit if required and dump pipeline parameters to JSON file 46 | // 47 | UTILS_NEXTFLOW_PIPELINE ( 48 | version, 49 | true, 50 | outdir, 51 | workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1 52 | ) 53 | 54 | // 55 | // Validate parameters and generate parameter summary to stdout 56 | // 57 | pre_help_text = nfCoreLogo(monochrome_logs) 58 | post_help_text = '\n' + workflowCitation() + '\n' + dashedLine(monochrome_logs) 59 | def String workflow_command = "nextflow run ${workflow.manifest.name} -profile --input samplesheet.csv --outdir " 60 | UTILS_NFVALIDATION_PLUGIN ( // Validates parameters against $projectDir/nextflow_schema.json 61 | help, 62 | workflow_command, 63 | pre_help_text, 64 | post_help_text, 65 | validate_params, 66 | "nextflow_schema.json" 67 | ) 68 | 69 | // 70 | // Check config provided to the pipeline 71 | // 72 | UTILS_NFCORE_PIPELINE ( 73 | nextflow_cli_args 74 | ) 75 | // 76 | // Custom validation for pipeline parameters 77 | // 78 | validateInputParameters() // Runs additional validation that is not done by $projectDir/nextflow_schema.json 79 | 80 | // 81 | // Create channel from input file provided through params.input 82 | // 83 | Channel 84 | .fromSamplesheet("input") // Validates samplesheet against $projectDir/assets/schema_input.json. Path to validation schema is defined by $projectDir/nextflow_schema.json 85 | .map { 86 | meta, fastq_1, fastq_2 -> 87 | def id_string = "${meta.sample}_${meta.project ?: "ungrouped"}_${meta.lane}" 88 | def updated_meta = meta + [ id: id_string ] 89 | if (!fastq_2) { 90 | return [ updated_meta.id, updated_meta + [ single_end:true ], [ fastq_1 ] ] 91 | } else { 92 | return [ updated_meta.id, updated_meta + [ single_end:false ], [ fastq_1, fastq_2 ] ] 93 | } 94 | } 95 | .groupTuple() 96 | .map { 97 | validateInputSamplesheet(it) // Applies additional group validation checks that schema_input.json cannot do. 98 | } 99 | .transpose() // Replace the map below 100 | // .map { 101 | // meta, fastqs -> 102 | // return [ meta, fastqs.flatten() ] 103 | // } 104 | .view() 105 | .set { ch_samplesheet } 106 | 107 | emit: 108 | samplesheet = ch_samplesheet 109 | versions = ch_versions 110 | } 111 | 112 | /* 113 | ======================================================================================== 114 | SUBWORKFLOW FOR PIPELINE COMPLETION 115 | ======================================================================================== 116 | */ 117 | 118 | workflow PIPELINE_COMPLETION { 119 | 120 | take: 121 | email // string: email address 122 | email_on_fail // string: email address sent on pipeline failure 123 | plaintext_email // boolean: Send plain-text email instead of HTML 124 | outdir // path: Path to output directory where results will be published 125 | monochrome_logs // boolean: Disable ANSI colour codes in log output 126 | hook_url // string: hook URL for notifications 127 | multiqc_report // string: Path to MultiQC report 128 | 129 | main: 130 | 131 | summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json") 132 | 133 | // 134 | // Completion email and summary 135 | // 136 | workflow.onComplete { 137 | if (email || email_on_fail) { 138 | completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, multiqc_report.toList()) 139 | } 140 | 141 | completionSummary(monochrome_logs) 142 | 143 | if (hook_url) { 144 | imNotification(summary_params, hook_url) 145 | } 146 | } 147 | } 148 | 149 | /* 150 | ======================================================================================== 151 | FUNCTIONS 152 | ======================================================================================== 153 | */ 154 | // 155 | // Check and validate pipeline parameters 156 | // 157 | def validateInputParameters() { 158 | // genomeExistsError() 159 | 160 | // TODO: Add code to further validate pipeline parameters here 161 | } 162 | 163 | // 164 | // Validate channels from input samplesheet 165 | // 166 | def validateInputSamplesheet(input) { 167 | def (metas, fastqs) = input[1..2] 168 | 169 | // Check that multiple runs of the same sample are of the same datatype i.e. single-end / paired-end 170 | def endedness_ok = metas.collect{ it.single_end }.unique().size == 1 171 | if (!endedness_ok) { 172 | error("Please check input samplesheet -> Multiple runs of a sample must be of the same datatype i.e. single-end or paired-end: ${metas[0].id}") 173 | } 174 | 175 | return [ metas[0], fastqs ] 176 | } 177 | // 178 | // Get attribute from genome config file e.g. fasta 179 | // 180 | def getGenomeAttribute(attribute) { 181 | if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) { 182 | if (params.genomes[ params.genome ].containsKey(attribute)) { 183 | return params.genomes[ params.genome ][ attribute ] 184 | } 185 | } 186 | return null 187 | } 188 | 189 | // 190 | // Exit pipeline if incorrect --genome key provided 191 | // 192 | def genomeExistsError() { 193 | if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) { 194 | def error_string = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + 195 | " Genome '${params.genome}' not found in any config files provided to the pipeline.\n" + 196 | " Currently, the available genome keys are:\n" + 197 | " ${params.genomes.keySet().join(", ")}\n" + 198 | "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" 199 | error(error_string) 200 | } 201 | } 202 | 203 | // 204 | // Generate methods description for MultiQC 205 | // 206 | def toolCitationText() { 207 | // TODO nf-core: Optionally add in-text citation tools to this list. 208 | // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "Tool (Foo et al. 2023)" : "", 209 | // Uncomment function in methodsDescriptionText to render in MultiQC report 210 | def citation_text = [ 211 | "Tools used in the workflow included:", 212 | "FastQC (Andrews 2010),", 213 | "MultiQC (Ewels et al. 2016)", 214 | "." 215 | ].join(' ').trim() 216 | 217 | return citation_text 218 | } 219 | 220 | def toolBibliographyText() { 221 | // TODO nf-core: Optionally add bibliographic entries to this list. 222 | // Can use ternary operators to dynamically construct based conditions, e.g. params["run_xyz"] ? "
  • Author (2023) Pub name, Journal, DOI
  • " : "", 223 | // Uncomment function in methodsDescriptionText to render in MultiQC report 224 | def reference_text = [ 225 | "
  • Andrews S, (2010) FastQC, URL: https://www.bioinformatics.babraham.ac.uk/projects/fastqc/).
  • ", 226 | "
  • Ewels, P., Magnusson, M., Lundin, S., & Käller, M. (2016). MultiQC: summarize analysis results for multiple tools and samples in a single report. Bioinformatics , 32(19), 3047–3048. doi: /10.1093/bioinformatics/btw354
  • " 227 | ].join(' ').trim() 228 | 229 | return reference_text 230 | } 231 | 232 | def methodsDescriptionText(mqc_methods_yaml) { 233 | // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file 234 | def meta = [:] 235 | meta.workflow = workflow.toMap() 236 | meta["manifest_map"] = workflow.manifest.toMap() 237 | 238 | // Pipeline DOI 239 | meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : "" 240 | meta["nodoi_text"] = meta.manifest_map.doi ? "": "
  • If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used.
  • " 241 | 242 | // Tool references 243 | meta["tool_citations"] = "" 244 | meta["tool_bibliography"] = "" 245 | 246 | // TODO nf-core: Only uncomment below if logic in toolCitationText/toolBibliographyText has been filled! 247 | // meta["tool_citations"] = toolCitationText().replaceAll(", \\.", ".").replaceAll("\\. \\.", ".").replaceAll(", \\.", ".") 248 | // meta["tool_bibliography"] = toolBibliographyText() 249 | 250 | 251 | def methods_text = mqc_methods_yaml.text 252 | 253 | def engine = new groovy.text.SimpleTemplateEngine() 254 | def description_html = engine.createTemplate(methods_text).make(meta) 255 | 256 | return description_html.toString() 257 | } 258 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/main.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Subworkflow with functionality that may be useful for any Nextflow pipeline 3 | // 4 | 5 | import org.yaml.snakeyaml.Yaml 6 | import groovy.json.JsonOutput 7 | import nextflow.extension.FilesEx 8 | 9 | /* 10 | ======================================================================================== 11 | SUBWORKFLOW DEFINITION 12 | ======================================================================================== 13 | */ 14 | 15 | workflow UTILS_NEXTFLOW_PIPELINE { 16 | 17 | take: 18 | print_version // boolean: print version 19 | dump_parameters // boolean: dump parameters 20 | outdir // path: base directory used to publish pipeline results 21 | check_conda_channels // boolean: check conda channels 22 | 23 | main: 24 | 25 | // 26 | // Print workflow version and exit on --version 27 | // 28 | if (print_version) { 29 | log.info "${workflow.manifest.name} ${getWorkflowVersion()}" 30 | System.exit(0) 31 | } 32 | 33 | // 34 | // Dump pipeline parameters to a JSON file 35 | // 36 | if (dump_parameters && outdir) { 37 | dumpParametersToJSON(outdir) 38 | } 39 | 40 | // 41 | // When running with Conda, warn if channels have not been set-up appropriately 42 | // 43 | if (check_conda_channels) { 44 | checkCondaChannels() 45 | } 46 | 47 | emit: 48 | dummy_emit = true 49 | } 50 | 51 | /* 52 | ======================================================================================== 53 | FUNCTIONS 54 | ======================================================================================== 55 | */ 56 | 57 | // 58 | // Generate version string 59 | // 60 | def getWorkflowVersion() { 61 | String version_string = "" 62 | if (workflow.manifest.version) { 63 | def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' 64 | version_string += "${prefix_v}${workflow.manifest.version}" 65 | } 66 | 67 | if (workflow.commitId) { 68 | def git_shortsha = workflow.commitId.substring(0, 7) 69 | version_string += "-g${git_shortsha}" 70 | } 71 | 72 | return version_string 73 | } 74 | 75 | // 76 | // Dump pipeline parameters to a JSON file 77 | // 78 | def dumpParametersToJSON(outdir) { 79 | def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') 80 | def filename = "params_${timestamp}.json" 81 | def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") 82 | def jsonStr = JsonOutput.toJson(params) 83 | temp_pf.text = JsonOutput.prettyPrint(jsonStr) 84 | 85 | FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json") 86 | temp_pf.delete() 87 | } 88 | 89 | // 90 | // When running with -profile conda, warn if channels have not been set-up appropriately 91 | // 92 | def checkCondaChannels() { 93 | Yaml parser = new Yaml() 94 | def channels = [] 95 | try { 96 | def config = parser.load("conda config --show channels".execute().text) 97 | channels = config.channels 98 | } catch(NullPointerException | IOException e) { 99 | log.warn "Could not verify conda channel configuration." 100 | return 101 | } 102 | 103 | // Check that all channels are present 104 | // This channel list is ordered by required channel priority. 105 | def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults'] 106 | def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean 107 | 108 | // Check that they are in the right order 109 | def channel_priority_violation = false 110 | def n = required_channels_in_order.size() 111 | for (int i = 0; i < n - 1; i++) { 112 | channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1])) 113 | } 114 | 115 | if (channels_missing | channel_priority_violation) { 116 | log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + 117 | " There is a problem with your Conda configuration!\n\n" + 118 | " You will need to set-up the conda-forge and bioconda channels correctly.\n" + 119 | " Please refer to https://bioconda.github.io/\n" + 120 | " The observed channel order is \n" + 121 | " ${channels}\n" + 122 | " but the following channel order is required:\n" + 123 | " ${required_channels_in_order}\n" + 124 | "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NEXTFLOW_PIPELINE" 3 | description: Subworkflow with functionality that may be useful for any Nextflow pipeline 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - version 9 | components: [] 10 | input: 11 | - print_version: 12 | type: boolean 13 | description: | 14 | Print the version of the pipeline and exit 15 | - dump_parameters: 16 | type: boolean 17 | description: | 18 | Dump the parameters of the pipeline to a JSON file 19 | - output_directory: 20 | type: directory 21 | description: Path to output dir to write JSON file to. 22 | pattern: "results/" 23 | - check_conda_channel: 24 | type: boolean 25 | description: | 26 | Check if the conda channel priority is correct. 27 | output: 28 | - dummy_emit: 29 | type: boolean 30 | description: | 31 | Dummy emit to make nf-core subworkflows lint happy 32 | authors: 33 | - "@adamrtalbot" 34 | - "@drpatelh" 35 | maintainers: 36 | - "@adamrtalbot" 37 | - "@drpatelh" 38 | - "@maxulysse" 39 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test: -------------------------------------------------------------------------------- 1 | 2 | nextflow_function { 3 | 4 | name "Test Functions" 5 | script "subworkflows/nf-core/utils_nextflow_pipeline/main.nf" 6 | config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" 7 | tag 'subworkflows' 8 | tag 'utils_nextflow_pipeline' 9 | tag 'subworkflows/utils_nextflow_pipeline' 10 | 11 | test("Test Function getWorkflowVersion") { 12 | 13 | function "getWorkflowVersion" 14 | 15 | then { 16 | assertAll( 17 | { assert function.success }, 18 | { assert snapshot(function.result).match() } 19 | ) 20 | } 21 | } 22 | 23 | test("Test Function dumpParametersToJSON") { 24 | 25 | function "dumpParametersToJSON" 26 | 27 | when { 28 | function { 29 | """ 30 | // define inputs of the function here. Example: 31 | input[0] = "$outputDir" 32 | """.stripIndent() 33 | } 34 | } 35 | 36 | then { 37 | assertAll( 38 | { assert function.success } 39 | ) 40 | } 41 | } 42 | 43 | test("Test Function checkCondaChannels") { 44 | 45 | function "checkCondaChannels" 46 | 47 | then { 48 | assertAll( 49 | { assert function.success }, 50 | { assert snapshot(function.result).match() } 51 | ) 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Test Function getWorkflowVersion": { 3 | "content": [ 4 | "v9.9.9" 5 | ], 6 | "meta": { 7 | "nf-test": "0.8.4", 8 | "nextflow": "23.10.1" 9 | }, 10 | "timestamp": "2024-02-28T12:02:05.308243" 11 | }, 12 | "Test Function checkCondaChannels": { 13 | "content": null, 14 | "meta": { 15 | "nf-test": "0.8.4", 16 | "nextflow": "23.10.1" 17 | }, 18 | "timestamp": "2024-02-28T12:02:12.425833" 19 | } 20 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow UTILS_NEXTFLOW_PIPELINE" 4 | script "../main.nf" 5 | config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" 6 | workflow "UTILS_NEXTFLOW_PIPELINE" 7 | tag 'subworkflows' 8 | tag 'utils_nextflow_pipeline' 9 | tag 'subworkflows/utils_nextflow_pipeline' 10 | 11 | test("Should run no inputs") { 12 | 13 | when { 14 | workflow { 15 | """ 16 | print_version = false 17 | dump_parameters = false 18 | outdir = null 19 | check_conda_channels = false 20 | 21 | input[0] = print_version 22 | input[1] = dump_parameters 23 | input[2] = outdir 24 | input[3] = check_conda_channels 25 | """ 26 | } 27 | } 28 | 29 | then { 30 | assertAll( 31 | { assert workflow.success } 32 | ) 33 | } 34 | } 35 | 36 | test("Should print version") { 37 | 38 | when { 39 | workflow { 40 | """ 41 | print_version = true 42 | dump_parameters = false 43 | outdir = null 44 | check_conda_channels = false 45 | 46 | input[0] = print_version 47 | input[1] = dump_parameters 48 | input[2] = outdir 49 | input[3] = check_conda_channels 50 | """ 51 | } 52 | } 53 | 54 | then { 55 | assertAll( 56 | { assert workflow.success }, 57 | { assert workflow.stdout.contains("nextflow_workflow v9.9.9") } 58 | ) 59 | } 60 | } 61 | 62 | test("Should dump params") { 63 | 64 | when { 65 | workflow { 66 | """ 67 | print_version = false 68 | dump_parameters = true 69 | outdir = 'results' 70 | check_conda_channels = false 71 | 72 | input[0] = false 73 | input[1] = true 74 | input[2] = outdir 75 | input[3] = false 76 | """ 77 | } 78 | } 79 | 80 | then { 81 | assertAll( 82 | { assert workflow.success } 83 | ) 84 | } 85 | } 86 | 87 | test("Should not create params JSON if no output directory") { 88 | 89 | when { 90 | workflow { 91 | """ 92 | print_version = false 93 | dump_parameters = true 94 | outdir = null 95 | check_conda_channels = false 96 | 97 | input[0] = false 98 | input[1] = true 99 | input[2] = outdir 100 | input[3] = false 101 | """ 102 | } 103 | } 104 | 105 | then { 106 | assertAll( 107 | { assert workflow.success } 108 | ) 109 | } 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | manifest { 2 | name = 'nextflow_workflow' 3 | author = """nf-core""" 4 | homePage = 'https://127.0.0.1' 5 | description = """Dummy pipeline""" 6 | nextflowVersion = '!>=23.04.0' 7 | version = '9.9.9' 8 | doi = 'https://doi.org/10.5281/zenodo.5070524' 9 | } 10 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/utils_nextflow_pipeline: 2 | - subworkflows/nf-core/utils_nextflow_pipeline/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NFCORE_PIPELINE" 3 | description: Subworkflow with utility functions specific to the nf-core pipeline template 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - version 9 | components: [] 10 | input: 11 | - nextflow_cli_args: 12 | type: list 13 | description: | 14 | Nextflow CLI positional arguments 15 | output: 16 | - success: 17 | type: boolean 18 | description: | 19 | Dummy output to indicate success 20 | authors: 21 | - "@adamrtalbot" 22 | maintainers: 23 | - "@adamrtalbot" 24 | - "@maxulysse" 25 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test: -------------------------------------------------------------------------------- 1 | 2 | nextflow_function { 3 | 4 | name "Test Functions" 5 | script "../main.nf" 6 | config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" 7 | tag "subworkflows" 8 | tag "subworkflows_nfcore" 9 | tag "utils_nfcore_pipeline" 10 | tag "subworkflows/utils_nfcore_pipeline" 11 | 12 | test("Test Function checkConfigProvided") { 13 | 14 | function "checkConfigProvided" 15 | 16 | then { 17 | assertAll( 18 | { assert function.success }, 19 | { assert snapshot(function.result).match() } 20 | ) 21 | } 22 | } 23 | 24 | test("Test Function checkProfileProvided") { 25 | 26 | function "checkProfileProvided" 27 | 28 | when { 29 | function { 30 | """ 31 | input[0] = [] 32 | """ 33 | } 34 | } 35 | 36 | then { 37 | assertAll( 38 | { assert function.success }, 39 | { assert snapshot(function.result).match() } 40 | ) 41 | } 42 | } 43 | 44 | test("Test Function workflowCitation") { 45 | 46 | function "workflowCitation" 47 | 48 | then { 49 | assertAll( 50 | { assert function.success }, 51 | { assert snapshot(function.result).match() } 52 | ) 53 | } 54 | } 55 | 56 | test("Test Function nfCoreLogo") { 57 | 58 | function "nfCoreLogo" 59 | 60 | when { 61 | function { 62 | """ 63 | input[0] = false 64 | """ 65 | } 66 | } 67 | 68 | then { 69 | assertAll( 70 | { assert function.success }, 71 | { assert snapshot(function.result).match() } 72 | ) 73 | } 74 | } 75 | 76 | test("Test Function dashedLine") { 77 | 78 | function "dashedLine" 79 | 80 | when { 81 | function { 82 | """ 83 | input[0] = false 84 | """ 85 | } 86 | } 87 | 88 | then { 89 | assertAll( 90 | { assert function.success }, 91 | { assert snapshot(function.result).match() } 92 | ) 93 | } 94 | } 95 | 96 | test("Test Function without logColours") { 97 | 98 | function "logColours" 99 | 100 | when { 101 | function { 102 | """ 103 | input[0] = true 104 | """ 105 | } 106 | } 107 | 108 | then { 109 | assertAll( 110 | { assert function.success }, 111 | { assert snapshot(function.result).match() } 112 | ) 113 | } 114 | } 115 | 116 | test("Test Function with logColours") { 117 | function "logColours" 118 | 119 | when { 120 | function { 121 | """ 122 | input[0] = false 123 | """ 124 | } 125 | } 126 | 127 | then { 128 | assertAll( 129 | { assert function.success }, 130 | { assert snapshot(function.result).match() } 131 | ) 132 | } 133 | } 134 | } 135 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Test Function checkProfileProvided": { 3 | "content": null, 4 | "meta": { 5 | "nf-test": "0.8.4", 6 | "nextflow": "23.10.1" 7 | }, 8 | "timestamp": "2024-02-28T12:03:03.360873" 9 | }, 10 | "Test Function checkConfigProvided": { 11 | "content": [ 12 | true 13 | ], 14 | "meta": { 15 | "nf-test": "0.8.4", 16 | "nextflow": "23.10.1" 17 | }, 18 | "timestamp": "2024-02-28T12:02:59.729647" 19 | }, 20 | "Test Function nfCoreLogo": { 21 | "content": [ 22 | "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n" 23 | ], 24 | "meta": { 25 | "nf-test": "0.8.4", 26 | "nextflow": "23.10.1" 27 | }, 28 | "timestamp": "2024-02-28T12:03:10.562934" 29 | }, 30 | "Test Function workflowCitation": { 31 | "content": [ 32 | "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md" 33 | ], 34 | "meta": { 35 | "nf-test": "0.8.4", 36 | "nextflow": "23.10.1" 37 | }, 38 | "timestamp": "2024-02-28T12:03:07.019761" 39 | }, 40 | "Test Function without logColours": { 41 | "content": [ 42 | { 43 | "reset": "", 44 | "bold": "", 45 | "dim": "", 46 | "underlined": "", 47 | "blink": "", 48 | "reverse": "", 49 | "hidden": "", 50 | "black": "", 51 | "red": "", 52 | "green": "", 53 | "yellow": "", 54 | "blue": "", 55 | "purple": "", 56 | "cyan": "", 57 | "white": "", 58 | "bblack": "", 59 | "bred": "", 60 | "bgreen": "", 61 | "byellow": "", 62 | "bblue": "", 63 | "bpurple": "", 64 | "bcyan": "", 65 | "bwhite": "", 66 | "ublack": "", 67 | "ured": "", 68 | "ugreen": "", 69 | "uyellow": "", 70 | "ublue": "", 71 | "upurple": "", 72 | "ucyan": "", 73 | "uwhite": "", 74 | "iblack": "", 75 | "ired": "", 76 | "igreen": "", 77 | "iyellow": "", 78 | "iblue": "", 79 | "ipurple": "", 80 | "icyan": "", 81 | "iwhite": "", 82 | "biblack": "", 83 | "bired": "", 84 | "bigreen": "", 85 | "biyellow": "", 86 | "biblue": "", 87 | "bipurple": "", 88 | "bicyan": "", 89 | "biwhite": "" 90 | } 91 | ], 92 | "meta": { 93 | "nf-test": "0.8.4", 94 | "nextflow": "23.10.1" 95 | }, 96 | "timestamp": "2024-02-28T12:03:17.969323" 97 | }, 98 | "Test Function dashedLine": { 99 | "content": [ 100 | "-\u001b[2m----------------------------------------------------\u001b[0m-" 101 | ], 102 | "meta": { 103 | "nf-test": "0.8.4", 104 | "nextflow": "23.10.1" 105 | }, 106 | "timestamp": "2024-02-28T12:03:14.366181" 107 | }, 108 | "Test Function with logColours": { 109 | "content": [ 110 | { 111 | "reset": "\u001b[0m", 112 | "bold": "\u001b[1m", 113 | "dim": "\u001b[2m", 114 | "underlined": "\u001b[4m", 115 | "blink": "\u001b[5m", 116 | "reverse": "\u001b[7m", 117 | "hidden": "\u001b[8m", 118 | "black": "\u001b[0;30m", 119 | "red": "\u001b[0;31m", 120 | "green": "\u001b[0;32m", 121 | "yellow": "\u001b[0;33m", 122 | "blue": "\u001b[0;34m", 123 | "purple": "\u001b[0;35m", 124 | "cyan": "\u001b[0;36m", 125 | "white": "\u001b[0;37m", 126 | "bblack": "\u001b[1;30m", 127 | "bred": "\u001b[1;31m", 128 | "bgreen": "\u001b[1;32m", 129 | "byellow": "\u001b[1;33m", 130 | "bblue": "\u001b[1;34m", 131 | "bpurple": "\u001b[1;35m", 132 | "bcyan": "\u001b[1;36m", 133 | "bwhite": "\u001b[1;37m", 134 | "ublack": "\u001b[4;30m", 135 | "ured": "\u001b[4;31m", 136 | "ugreen": "\u001b[4;32m", 137 | "uyellow": "\u001b[4;33m", 138 | "ublue": "\u001b[4;34m", 139 | "upurple": "\u001b[4;35m", 140 | "ucyan": "\u001b[4;36m", 141 | "uwhite": "\u001b[4;37m", 142 | "iblack": "\u001b[0;90m", 143 | "ired": "\u001b[0;91m", 144 | "igreen": "\u001b[0;92m", 145 | "iyellow": "\u001b[0;93m", 146 | "iblue": "\u001b[0;94m", 147 | "ipurple": "\u001b[0;95m", 148 | "icyan": "\u001b[0;96m", 149 | "iwhite": "\u001b[0;97m", 150 | "biblack": "\u001b[1;90m", 151 | "bired": "\u001b[1;91m", 152 | "bigreen": "\u001b[1;92m", 153 | "biyellow": "\u001b[1;93m", 154 | "biblue": "\u001b[1;94m", 155 | "bipurple": "\u001b[1;95m", 156 | "bicyan": "\u001b[1;96m", 157 | "biwhite": "\u001b[1;97m" 158 | } 159 | ], 160 | "meta": { 161 | "nf-test": "0.8.4", 162 | "nextflow": "23.10.1" 163 | }, 164 | "timestamp": "2024-02-28T12:03:21.714424" 165 | } 166 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow UTILS_NFCORE_PIPELINE" 4 | script "../main.nf" 5 | config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" 6 | workflow "UTILS_NFCORE_PIPELINE" 7 | tag "subworkflows" 8 | tag "subworkflows_nfcore" 9 | tag "utils_nfcore_pipeline" 10 | tag "subworkflows/utils_nfcore_pipeline" 11 | 12 | test("Should run without failures") { 13 | 14 | when { 15 | workflow { 16 | """ 17 | input[0] = [] 18 | """ 19 | } 20 | } 21 | 22 | then { 23 | assertAll( 24 | { assert workflow.success }, 25 | { assert snapshot(workflow.out).match() } 26 | ) 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | true 7 | ], 8 | "valid_config": [ 9 | true 10 | ] 11 | } 12 | ], 13 | "meta": { 14 | "nf-test": "0.8.4", 15 | "nextflow": "23.10.1" 16 | }, 17 | "timestamp": "2024-02-28T12:03:25.726491" 18 | } 19 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | manifest { 2 | name = 'nextflow_workflow' 3 | author = """nf-core""" 4 | homePage = 'https://127.0.0.1' 5 | description = """Dummy pipeline""" 6 | nextflowVersion = '!>=23.04.0' 7 | version = '9.9.9' 8 | doi = 'https://doi.org/10.5281/zenodo.5070524' 9 | } 10 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/utils_nfcore_pipeline: 2 | - subworkflows/nf-core/utils_nfcore_pipeline/** 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfvalidation_plugin/main.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Subworkflow that uses the nf-validation plugin to render help text and parameter summary 3 | // 4 | 5 | /* 6 | ======================================================================================== 7 | IMPORT NF-VALIDATION PLUGIN 8 | ======================================================================================== 9 | */ 10 | 11 | include { paramsHelp } from 'plugin/nf-validation' 12 | include { paramsSummaryLog } from 'plugin/nf-validation' 13 | include { validateParameters } from 'plugin/nf-validation' 14 | 15 | /* 16 | ======================================================================================== 17 | SUBWORKFLOW DEFINITION 18 | ======================================================================================== 19 | */ 20 | 21 | workflow UTILS_NFVALIDATION_PLUGIN { 22 | 23 | take: 24 | print_help // boolean: print help 25 | workflow_command // string: default commmand used to run pipeline 26 | pre_help_text // string: string to be printed before help text and summary log 27 | post_help_text // string: string to be printed after help text and summary log 28 | validate_params // boolean: validate parameters 29 | schema_filename // path: JSON schema file, null to use default value 30 | 31 | main: 32 | 33 | log.debug "Using schema file: ${schema_filename}" 34 | 35 | // Default values for strings 36 | pre_help_text = pre_help_text ?: '' 37 | post_help_text = post_help_text ?: '' 38 | workflow_command = workflow_command ?: '' 39 | 40 | // 41 | // Print help message if needed 42 | // 43 | if (print_help) { 44 | log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text 45 | System.exit(0) 46 | } 47 | 48 | // 49 | // Print parameter summary to stdout 50 | // 51 | log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text 52 | 53 | // 54 | // Validate parameters relative to the parameter JSON schema 55 | // 56 | if (validate_params){ 57 | validateParameters(parameters_schema: schema_filename) 58 | } 59 | 60 | emit: 61 | dummy_emit = true 62 | } 63 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NFVALIDATION_PLUGIN" 3 | description: Use nf-validation to initiate and validate a pipeline 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - validation 9 | components: [] 10 | input: 11 | - print_help: 12 | type: boolean 13 | description: | 14 | Print help message and exit 15 | - workflow_command: 16 | type: string 17 | description: | 18 | The command to run the workflow e.g. "nextflow run main.nf" 19 | - pre_help_text: 20 | type: string 21 | description: | 22 | Text to print before the help message 23 | - post_help_text: 24 | type: string 25 | description: | 26 | Text to print after the help message 27 | - validate_params: 28 | type: boolean 29 | description: | 30 | Validate the parameters and error if invalid. 31 | - schema_filename: 32 | type: string 33 | description: | 34 | The filename of the schema to validate against. 35 | output: 36 | - dummy_emit: 37 | type: boolean 38 | description: | 39 | Dummy emit to make nf-core subworkflows lint happy 40 | authors: 41 | - "@adamrtalbot" 42 | maintainers: 43 | - "@adamrtalbot" 44 | - "@maxulysse" 45 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow UTILS_NFVALIDATION_PLUGIN" 4 | script "../main.nf" 5 | workflow "UTILS_NFVALIDATION_PLUGIN" 6 | tag "subworkflows" 7 | tag "subworkflows_nfcore" 8 | tag "plugin/nf-validation" 9 | tag "'plugin/nf-validation'" 10 | tag "utils_nfvalidation_plugin" 11 | tag "subworkflows/utils_nfvalidation_plugin" 12 | 13 | test("Should run nothing") { 14 | 15 | when { 16 | 17 | params { 18 | monochrome_logs = true 19 | test_data = '' 20 | } 21 | 22 | workflow { 23 | """ 24 | help = false 25 | workflow_command = null 26 | pre_help_text = null 27 | post_help_text = null 28 | validate_params = false 29 | schema_filename = "$moduleTestDir/nextflow_schema.json" 30 | 31 | input[0] = help 32 | input[1] = workflow_command 33 | input[2] = pre_help_text 34 | input[3] = post_help_text 35 | input[4] = validate_params 36 | input[5] = schema_filename 37 | """ 38 | } 39 | } 40 | 41 | then { 42 | assertAll( 43 | { assert workflow.success } 44 | ) 45 | } 46 | } 47 | 48 | test("Should run help") { 49 | 50 | 51 | when { 52 | 53 | params { 54 | monochrome_logs = true 55 | test_data = '' 56 | } 57 | workflow { 58 | """ 59 | help = true 60 | workflow_command = null 61 | pre_help_text = null 62 | post_help_text = null 63 | validate_params = false 64 | schema_filename = "$moduleTestDir/nextflow_schema.json" 65 | 66 | input[0] = help 67 | input[1] = workflow_command 68 | input[2] = pre_help_text 69 | input[3] = post_help_text 70 | input[4] = validate_params 71 | input[5] = schema_filename 72 | """ 73 | } 74 | } 75 | 76 | then { 77 | assertAll( 78 | { assert workflow.success }, 79 | { assert workflow.exitStatus == 0 }, 80 | { assert workflow.stdout.any { it.contains('Input/output options') } }, 81 | { assert workflow.stdout.any { it.contains('--outdir') } } 82 | ) 83 | } 84 | } 85 | 86 | test("Should run help with command") { 87 | 88 | when { 89 | 90 | params { 91 | monochrome_logs = true 92 | test_data = '' 93 | } 94 | workflow { 95 | """ 96 | help = true 97 | workflow_command = "nextflow run noorg/doesntexist" 98 | pre_help_text = null 99 | post_help_text = null 100 | validate_params = false 101 | schema_filename = "$moduleTestDir/nextflow_schema.json" 102 | 103 | input[0] = help 104 | input[1] = workflow_command 105 | input[2] = pre_help_text 106 | input[3] = post_help_text 107 | input[4] = validate_params 108 | input[5] = schema_filename 109 | """ 110 | } 111 | } 112 | 113 | then { 114 | assertAll( 115 | { assert workflow.success }, 116 | { assert workflow.exitStatus == 0 }, 117 | { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, 118 | { assert workflow.stdout.any { it.contains('Input/output options') } }, 119 | { assert workflow.stdout.any { it.contains('--outdir') } } 120 | ) 121 | } 122 | } 123 | 124 | test("Should run help with extra text") { 125 | 126 | 127 | when { 128 | 129 | params { 130 | monochrome_logs = true 131 | test_data = '' 132 | } 133 | workflow { 134 | """ 135 | help = true 136 | workflow_command = "nextflow run noorg/doesntexist" 137 | pre_help_text = "pre-help-text" 138 | post_help_text = "post-help-text" 139 | validate_params = false 140 | schema_filename = "$moduleTestDir/nextflow_schema.json" 141 | 142 | input[0] = help 143 | input[1] = workflow_command 144 | input[2] = pre_help_text 145 | input[3] = post_help_text 146 | input[4] = validate_params 147 | input[5] = schema_filename 148 | """ 149 | } 150 | } 151 | 152 | then { 153 | assertAll( 154 | { assert workflow.success }, 155 | { assert workflow.exitStatus == 0 }, 156 | { assert workflow.stdout.any { it.contains('pre-help-text') } }, 157 | { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, 158 | { assert workflow.stdout.any { it.contains('Input/output options') } }, 159 | { assert workflow.stdout.any { it.contains('--outdir') } }, 160 | { assert workflow.stdout.any { it.contains('post-help-text') } } 161 | ) 162 | } 163 | } 164 | 165 | test("Should validate params") { 166 | 167 | when { 168 | 169 | params { 170 | monochrome_logs = true 171 | test_data = '' 172 | outdir = 1 173 | } 174 | workflow { 175 | """ 176 | help = false 177 | workflow_command = null 178 | pre_help_text = null 179 | post_help_text = null 180 | validate_params = true 181 | schema_filename = "$moduleTestDir/nextflow_schema.json" 182 | 183 | input[0] = help 184 | input[1] = workflow_command 185 | input[2] = pre_help_text 186 | input[3] = post_help_text 187 | input[4] = validate_params 188 | input[5] = schema_filename 189 | """ 190 | } 191 | } 192 | 193 | then { 194 | assertAll( 195 | { assert workflow.failed }, 196 | { assert workflow.stdout.any { it.contains('ERROR ~ ERROR: Validation of pipeline parameters failed!') } } 197 | ) 198 | } 199 | } 200 | } 201 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema", 3 | "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json", 4 | "title": ". pipeline parameters", 5 | "description": "", 6 | "type": "object", 7 | "definitions": { 8 | "input_output_options": { 9 | "title": "Input/output options", 10 | "type": "object", 11 | "fa_icon": "fas fa-terminal", 12 | "description": "Define where the pipeline should find input data and save output data.", 13 | "required": ["outdir"], 14 | "properties": { 15 | "validate_params": { 16 | "type": "boolean", 17 | "description": "Validate parameters?", 18 | "default": true, 19 | "hidden": true 20 | }, 21 | "outdir": { 22 | "type": "string", 23 | "format": "directory-path", 24 | "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.", 25 | "fa_icon": "fas fa-folder-open" 26 | }, 27 | "test_data_base": { 28 | "type": "string", 29 | "default": "https://raw.githubusercontent.com/nf-core/test-datasets/modules", 30 | "description": "Base for test data directory", 31 | "hidden": true 32 | }, 33 | "test_data": { 34 | "type": "string", 35 | "description": "Fake test data param", 36 | "hidden": true 37 | } 38 | } 39 | }, 40 | "generic_options": { 41 | "title": "Generic options", 42 | "type": "object", 43 | "fa_icon": "fas fa-file-import", 44 | "description": "Less common options for the pipeline, typically set in a config file.", 45 | "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", 46 | "properties": { 47 | "help": { 48 | "type": "boolean", 49 | "description": "Display help text.", 50 | "fa_icon": "fas fa-question-circle", 51 | "hidden": true 52 | }, 53 | "version": { 54 | "type": "boolean", 55 | "description": "Display version and exit.", 56 | "fa_icon": "fas fa-question-circle", 57 | "hidden": true 58 | }, 59 | "logo": { 60 | "type": "boolean", 61 | "default": true, 62 | "description": "Display nf-core logo in console output.", 63 | "fa_icon": "fas fa-image", 64 | "hidden": true 65 | }, 66 | "singularity_pull_docker_container": { 67 | "type": "boolean", 68 | "description": "Pull Singularity container from Docker?", 69 | "hidden": true 70 | }, 71 | "publish_dir_mode": { 72 | "type": "string", 73 | "default": "copy", 74 | "description": "Method used to save pipeline results to output directory.", 75 | "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", 76 | "fa_icon": "fas fa-copy", 77 | "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], 78 | "hidden": true 79 | }, 80 | "monochrome_logs": { 81 | "type": "boolean", 82 | "description": "Use monochrome_logs", 83 | "hidden": true 84 | } 85 | } 86 | } 87 | }, 88 | "allOf": [ 89 | { 90 | "$ref": "#/definitions/input_output_options" 91 | }, 92 | { 93 | "$ref": "#/definitions/generic_options" 94 | } 95 | ] 96 | } 97 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfvalidation_plugin/tests/tags.yml: -------------------------------------------------------------------------------- 1 | subworkflows/utils_nfvalidation_plugin: 2 | - subworkflows/nf-core/utils_nfvalidation_plugin/** 3 | -------------------------------------------------------------------------------- /tower.yml: -------------------------------------------------------------------------------- 1 | reports: 2 | multiqc_report.html: 3 | display: "MultiQC HTML report" 4 | samplesheet.csv: 5 | display: "Auto-created samplesheet with collated metadata and FASTQ paths" 6 | -------------------------------------------------------------------------------- /workflows/seqinspector.nf: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | IMPORT MODULES / SUBWORKFLOWS / FUNCTIONS 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | */ 6 | 7 | include { FASTQC } from '../modules/nf-core/fastqc/main' 8 | include { MULTIQC } from '../modules/nf-core/multiqc/main' 9 | include { paramsSummaryMap } from 'plugin/nf-validation' 10 | include { paramsSummaryMultiqc } from '../subworkflows/nf-core/utils_nfcore_pipeline' 11 | include { softwareVersionsToYAML } from '../subworkflows/nf-core/utils_nfcore_pipeline' 12 | include { methodsDescriptionText } from '../subworkflows/local/utils_nfcore_seqinspector_pipeline' 13 | 14 | /* 15 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 16 | RUN MAIN WORKFLOW 17 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 18 | */ 19 | 20 | workflow SEQINSPECTOR { 21 | 22 | take: 23 | ch_samplesheet // channel: samplesheet read in from --input 24 | 25 | main: 26 | 27 | ch_versions = Channel.empty() 28 | ch_multiqc_files = Channel.empty() 29 | 30 | // 31 | // MODULE: Run FastQC 32 | // 33 | FASTQC ( 34 | ch_samplesheet 35 | ) 36 | ch_multiqc_files = ch_multiqc_files.mix(FASTQC.out.zip.collect{it[1]}) 37 | ch_versions = ch_versions.mix(FASTQC.out.versions.first()) 38 | 39 | // 40 | // Collate and save software versions 41 | // 42 | softwareVersionsToYAML(ch_versions) 43 | .collectFile(storeDir: "${params.outdir}/pipeline_info", name: 'nf_core_pipeline_software_mqc_versions.yml', sort: true, newLine: true) 44 | .set { ch_collated_versions } 45 | 46 | // 47 | // MODULE: MultiQC 48 | // 49 | ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true) 50 | ch_multiqc_custom_config = params.multiqc_config ? Channel.fromPath(params.multiqc_config, checkIfExists: true) : Channel.empty() 51 | ch_multiqc_logo = params.multiqc_logo ? Channel.fromPath(params.multiqc_logo, checkIfExists: true) : Channel.empty() 52 | summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json") 53 | ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params)) 54 | ch_multiqc_custom_methods_description = params.multiqc_methods_description ? file(params.multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true) 55 | ch_methods_description = Channel.value(methodsDescriptionText(ch_multiqc_custom_methods_description)) 56 | ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml')) 57 | ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions) 58 | ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml', sort: false)) 59 | 60 | MULTIQC ( 61 | ch_multiqc_files.collect(), 62 | ch_multiqc_config.toList(), 63 | ch_multiqc_custom_config.toList(), 64 | ch_multiqc_logo.toList() 65 | ) 66 | 67 | emit: 68 | multiqc_report = MULTIQC.out.report.toList() // channel: /path/to/multiqc_report.html 69 | versions = ch_versions // channel: [ path(versions.yml) ] 70 | } 71 | 72 | /* 73 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 74 | THE END 75 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 76 | */ 77 | --------------------------------------------------------------------------------