├── .devcontainer └── devcontainer.json ├── .editorconfig ├── .gitattributes ├── .github ├── .dockstore.yml ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ └── feature_request.yml ├── PULL_REQUEST_TEMPLATE.md ├── python │ └── find_changed_files.py └── workflows │ ├── branch.yml │ ├── ci.yml │ ├── clean-up.yml │ ├── cloud_tests_full.yml │ ├── cloud_tests_small.yml │ ├── download_pipeline.yml │ ├── fix-linting.yml │ ├── linting.yml │ ├── linting_comment.yml │ └── release-announcements.yml ├── .gitignore ├── .gitpod.yml ├── .nf-core.yml ├── .pre-commit-config.yaml ├── .prettierignore ├── .prettierrc.yml ├── CHANGELOG.md ├── CITATIONS.md ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── assets ├── adaptivecard.json ├── email_template.html ├── email_template.txt ├── methods_description_template.yml ├── nf-core-fetchngs_logo_light.png ├── schema_input.json ├── sendmail_template.txt └── slackreport.json ├── bin ├── multiqc_mappings_config.py ├── sra_ids_to_runinfo.py └── sra_runinfo_to_ftp.py ├── conf ├── base.config ├── test.config └── test_full.config ├── docs ├── README.md ├── images │ ├── nf-core-fetchngs_logo_dark.png │ ├── nf-core-fetchngs_logo_light.png │ ├── nf-core-fetchngs_metro_map_grey.png │ └── nf-core-fetchngs_metro_map_grey.svg ├── output.md └── usage.md ├── main.nf ├── modules.json ├── modules ├── local │ ├── aspera_cli │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── nextflow.config │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── multiqc_mappings_config │ │ ├── main.nf │ │ ├── nextflow.config │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── sra_fastq_ftp │ │ ├── main.nf │ │ ├── nextflow.config │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── sra_ids_to_runinfo │ │ ├── main.nf │ │ ├── nextflow.config │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ ├── sra_runinfo_to_ftp │ │ ├── main.nf │ │ ├── nextflow.config │ │ └── tests │ │ │ ├── main.nf.test │ │ │ └── main.nf.test.snap │ └── sra_to_samplesheet │ │ ├── main.nf │ │ ├── nextflow.config │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap └── nf-core │ ├── custom │ └── sratoolsncbisettings │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ ├── templates │ │ └── detect_ncbi_settings.sh │ │ └── tests │ │ ├── main.nf.test │ │ ├── main.nf.test.snap │ │ └── nextflow.config │ ├── sratools │ ├── fasterqdump │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ ├── nextflow.config │ │ ├── sratools-fasterqdump.diff │ │ └── tests │ │ │ ├── main.nf.test │ │ │ ├── main.nf.test.snap │ │ │ └── nextflow.config │ └── prefetch │ │ ├── environment.yml │ │ ├── main.nf │ │ ├── meta.yml │ │ ├── nextflow.config │ │ ├── templates │ │ └── retry_with_backoff.sh │ │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ └── untar │ ├── environment.yml │ ├── main.nf │ ├── meta.yml │ └── tests │ ├── main.nf.test │ └── main.nf.test.snap ├── nextflow.config ├── nextflow_schema.json ├── nf-test.config ├── pyproject.toml ├── subworkflows ├── local │ └── utils_nfcore_fetchngs_pipeline │ │ ├── main.nf │ │ └── tests │ │ ├── main.function.nf.test │ │ ├── main.function.nf.test.snap │ │ ├── main.workflow_pipeline_completion.nf.test │ │ └── main.workflow_pipeline_initialisation.nf.test └── nf-core │ ├── fastq_download_prefetch_fasterqdump_sratools │ ├── main.nf │ ├── meta.yml │ ├── nextflow.config │ └── tests │ │ ├── main.nf.test │ │ └── main.nf.test.snap │ ├── utils_nextflow_pipeline │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.function.nf.test │ │ ├── main.function.nf.test.snap │ │ ├── main.workflow.nf.test │ │ └── nextflow.config │ ├── utils_nfcore_pipeline │ ├── main.nf │ ├── meta.yml │ └── tests │ │ ├── main.function.nf.test │ │ ├── main.function.nf.test.snap │ │ ├── main.workflow.nf.test │ │ ├── main.workflow.nf.test.snap │ │ └── nextflow.config │ └── utils_nfvalidation_plugin │ ├── main.nf │ ├── meta.yml │ └── tests │ ├── main.nf.test │ └── nextflow_schema.json ├── tests ├── main.nf.test └── nextflow.config ├── tower.yml └── workflows └── sra ├── main.nf ├── nextflow.config └── tests ├── main.nf.test ├── sra_custom_ena_metadata_fields.nf.test ├── sra_download_method_aspera.nf.test ├── sra_download_method_sratools.nf.test ├── sra_nf_core_pipeline_atacseq.nf.test ├── sra_nf_core_pipeline_rnaseq.nf.test ├── sra_nf_core_pipeline_taxprofiler.nf.test ├── sra_nf_core_pipeline_viralrecon.nf.test └── sra_skip_fastq_download.nf.test /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nfcore", 3 | "image": "nfcore/gitpod:latest", 4 | "remoteUser": "gitpod", 5 | "runArgs": ["--privileged"], 6 | 7 | // Configure tool-specific properties. 8 | "customizations": { 9 | // Configure properties specific to VS Code. 10 | "vscode": { 11 | // Set *default* container specific settings.json values on container create. 12 | "settings": { 13 | "python.defaultInterpreterPath": "/opt/conda/bin/python", 14 | "python.linting.enabled": true, 15 | "python.linting.pylintEnabled": true, 16 | "python.formatting.autopep8Path": "/opt/conda/bin/autopep8", 17 | "python.formatting.yapfPath": "/opt/conda/bin/yapf", 18 | "python.linting.flake8Path": "/opt/conda/bin/flake8", 19 | "python.linting.pycodestylePath": "/opt/conda/bin/pycodestyle", 20 | "python.linting.pydocstylePath": "/opt/conda/bin/pydocstyle", 21 | "python.linting.pylintPath": "/opt/conda/bin/pylint" 22 | }, 23 | 24 | // Add the IDs of extensions you want installed when the container is created. 25 | "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"] 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | end_of_line = lf 6 | insert_final_newline = true 7 | trim_trailing_whitespace = true 8 | indent_size = 4 9 | indent_style = space 10 | 11 | [*.{md,yml,yaml,html,css,scss,js}] 12 | indent_size = 2 13 | 14 | # These files are edited and tested upstream in nf-core/modules 15 | [/modules/nf-core/**] 16 | charset = unset 17 | end_of_line = unset 18 | insert_final_newline = unset 19 | trim_trailing_whitespace = unset 20 | indent_style = unset 21 | [/subworkflows/nf-core/**] 22 | charset = unset 23 | end_of_line = unset 24 | insert_final_newline = unset 25 | trim_trailing_whitespace = unset 26 | indent_style = unset 27 | 28 | [/assets/email*] 29 | indent_size = unset 30 | 31 | # ignore Readme 32 | [README.md] 33 | indent_style = unset 34 | 35 | # ignore python 36 | [*.{py,md}] 37 | indent_style = unset 38 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.config linguist-language=nextflow 2 | *.nf.test linguist-language=nextflow 3 | modules/nf-core/** linguist-generated 4 | subworkflows/nf-core/** linguist-generated 5 | -------------------------------------------------------------------------------- /.github/.dockstore.yml: -------------------------------------------------------------------------------- 1 | # Dockstore config version, not pipeline version 2 | version: 1.2 3 | workflows: 4 | - subclass: nfl 5 | primaryDescriptorPath: /nextflow.config 6 | publish: True 7 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug report 2 | description: Report something that is broken or incorrect 3 | labels: bug 4 | body: 5 | - type: markdown 6 | attributes: 7 | value: | 8 | Before you post this issue, please check the documentation: 9 | 10 | - [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) 11 | - [nf-core/fetchngs pipeline documentation](https://nf-co.re/fetchngs/usage) 12 | 13 | - type: textarea 14 | id: description 15 | attributes: 16 | label: Description of the bug 17 | description: A clear and concise description of what the bug is. 18 | validations: 19 | required: true 20 | 21 | - type: textarea 22 | id: command_used 23 | attributes: 24 | label: Command used and terminal output 25 | description: Steps to reproduce the behaviour. Please paste the command you used to launch the pipeline and the output from your terminal. 26 | render: console 27 | placeholder: | 28 | $ nextflow run ... 29 | 30 | Some output where something broke 31 | 32 | - type: textarea 33 | id: files 34 | attributes: 35 | label: Relevant files 36 | description: | 37 | Please drag and drop the relevant files here. Create a `.zip` archive if the extension is not allowed. 38 | Your verbose log file `.nextflow.log` is often useful _(this is a hidden file in the directory where you launched the pipeline)_ as well as custom Nextflow configuration files. 39 | 40 | - type: textarea 41 | id: system 42 | attributes: 43 | label: System information 44 | description: | 45 | * Nextflow version _(eg. 23.04.0)_ 46 | * Hardware _(eg. HPC, Desktop, Cloud)_ 47 | * Executor _(eg. slurm, local, awsbatch)_ 48 | * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter, Charliecloud, or Apptainer)_ 49 | * OS _(eg. CentOS Linux, macOS, Linux Mint)_ 50 | * Version of nf-core/fetchngs _(eg. 1.1, 1.5, 1.8.2)_ 51 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | contact_links: 2 | - name: Join nf-core 3 | url: https://nf-co.re/join 4 | about: Please join the nf-core community here 5 | - name: "Slack #fetchngs channel" 6 | url: https://nfcore.slack.com/channels/fetchngs 7 | about: Discussion about the nf-core/fetchngs pipeline 8 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature request 2 | description: Suggest an idea for the nf-core/fetchngs pipeline 3 | labels: enhancement 4 | body: 5 | - type: textarea 6 | id: description 7 | attributes: 8 | label: Description of feature 9 | description: Please describe your suggestion for a new feature. It might help to describe a problem or use case, plus any alternatives that you have considered. 10 | validations: 11 | required: true 12 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 13 | 14 | ## PR checklist 15 | 16 | - [ ] This comment contains a description of changes (with reason). 17 | - [ ] If you've fixed a bug or added code that should be tested, add tests! 18 | - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/fetchngs/tree/master/.github/CONTRIBUTING.md) 19 | - [ ] If necessary, also make a PR on the nf-core/fetchngs _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. 20 | - [ ] Make sure your code lints (`nf-core lint`). 21 | - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). 22 | - [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir `). 23 | - [ ] Usage Documentation in `docs/usage.md` is updated. 24 | - [ ] Output Documentation in `docs/output.md` is updated. 25 | - [ ] `CHANGELOG.md` is updated. 26 | - [ ] `README.md` is updated (including new tool citations and authors/contributors). 27 | -------------------------------------------------------------------------------- /.github/python/find_changed_files.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ## This script is used to generate scan *.nf.test files for function/process/workflow name and return as a JSON list 4 | # It is functionally similar to nf-test list but fills a gap until feature https://github.com/askimed/nf-test/issues/196 is added 5 | 6 | import argparse 7 | import json 8 | import logging 9 | import re 10 | 11 | from itertools import chain 12 | from pathlib import Path 13 | 14 | 15 | def parse_args() -> argparse.Namespace: 16 | """ 17 | Parse command line arguments and return an ArgumentParser object. 18 | 19 | Returns: 20 | argparse.ArgumentParser: The ArgumentParser object with the parsed arguments. 21 | """ 22 | parser = argparse.ArgumentParser( 23 | description="Scan *.nf.test files for function/process/workflow name and return as a JSON list" 24 | ) 25 | parser.add_argument( 26 | "-p", 27 | "--paths", 28 | nargs="+", 29 | default=["."], 30 | help="List of directories or files to scan", 31 | ) 32 | parser.add_argument( 33 | "-l", 34 | "--log-level", 35 | choices=["DEBUG", "INFO", "WARNING", "ERROR"], 36 | default="INFO", 37 | help="Logging level", 38 | ) 39 | parser.add_argument( 40 | "-t", 41 | "--types", 42 | nargs="+", 43 | choices=["function", "process", "workflow", "pipeline"], 44 | default=["function", "process", "workflow", "pipeline"], 45 | help="Types of tests to include.", 46 | ) 47 | return parser.parse_args() 48 | 49 | 50 | def find_files(paths: list[str]) -> list[Path]: 51 | """ 52 | Find all files matching pattern *.nf.test recursively from a list of paths. 53 | 54 | Args: 55 | paths (list): List of directories or files to scan. 56 | 57 | Returns: 58 | list: List of files matching the pattern *.nf.test. 59 | """ 60 | # this is a bit clunky 61 | result = [] 62 | for path in paths: 63 | path_obj = Path(path) 64 | # If Path is the exact nf-test file add to list: 65 | if path_obj.match("*.nf.test"): 66 | result.append(path_obj) 67 | # Else recursively search for nf-test files: 68 | else: 69 | for file in path_obj.rglob("*.nf.test"): 70 | result.append(file) 71 | return result 72 | 73 | 74 | def process_files(files: list[Path]) -> list[str]: 75 | """ 76 | Process the files and return lines that begin with 'workflow', 'process', or 'function' and have a single string afterwards. 77 | 78 | Args: 79 | files (list): List of files to process. 80 | 81 | Returns: 82 | list: List of lines that match the criteria. 83 | """ 84 | result = [] 85 | for file in files: 86 | with open(file, "r") as f: 87 | is_pipeline_test = True 88 | lines = f.readlines() 89 | for line in lines: 90 | line = line.strip() 91 | if line.startswith(("workflow", "process", "function")): 92 | words = line.split() 93 | if len(words) == 2 and re.match(r'^".*"$', words[1]): 94 | result.append(line) 95 | is_pipeline_test = False 96 | 97 | # If no results included workflow, process or function 98 | # Add a dummy result to fill the 'pipeline' category 99 | if is_pipeline_test: 100 | result.append("pipeline 'PIPELINE'") 101 | 102 | return result 103 | 104 | 105 | def generate( 106 | lines: list[str], types: list[str] = ["function", "process", "workflow", "pipeline"] 107 | ) -> dict[str, list[str]]: 108 | """ 109 | Generate a dictionary of function, process and workflow lists from the lines. 110 | 111 | Args: 112 | lines (list): List of lines to process. 113 | types (list): List of types to include. 114 | 115 | Returns: 116 | dict: Dictionary with function, process and workflow lists. 117 | """ 118 | result: dict[str, list[str]] = { 119 | "function": [], 120 | "process": [], 121 | "workflow": [], 122 | "pipeline": [], 123 | } 124 | for line in lines: 125 | words = line.split() 126 | if len(words) == 2: 127 | keyword = words[0] 128 | name = words[1].strip("'\"") # Strip both single and double quotes 129 | if keyword in types: 130 | result[keyword].append(name) 131 | return result 132 | 133 | 134 | if __name__ == "__main__": 135 | 136 | # Utility stuff 137 | args = parse_args() 138 | logging.basicConfig(level=args.log_level) 139 | 140 | # Parse nf-test files for targets of tests 141 | files = find_files(args.paths) 142 | lines = process_files(files) 143 | result = generate(lines) 144 | 145 | # Get only relevant results (specified by -t) 146 | # Unique using a set 147 | target_results = list( 148 | {item for sublist in map(result.get, args.types) for item in sublist} 149 | ) 150 | 151 | # Print to stdout 152 | print(json.dumps(target_results)) 153 | -------------------------------------------------------------------------------- /.github/workflows/branch.yml: -------------------------------------------------------------------------------- 1 | name: nf-core branch protection 2 | # This workflow is triggered on PRs to master branch on the repository 3 | # It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev` 4 | on: 5 | pull_request_target: 6 | branches: [master] 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | steps: 12 | # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches 13 | - name: Check PRs 14 | if: github.repository == 'nf-core/fetchngs' 15 | run: | 16 | { [[ ${{github.event.pull_request.head.repo.full_name }} == nf-core/fetchngs ]] && [[ $GITHUB_HEAD_REF == "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] 17 | 18 | # If the above check failed, post a comment on the PR explaining the failure 19 | # NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets 20 | - name: Post PR comment 21 | if: failure() 22 | uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2 23 | with: 24 | message: | 25 | ## This PR is against the `master` branch :x: 26 | 27 | * Do not close this PR 28 | * Click _Edit_ and change the `base` to `dev` 29 | * This CI test will remain failed until you push a new commit 30 | 31 | --- 32 | 33 | Hi @${{ github.event.pull_request.user.login }}, 34 | 35 | It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `master` branch. 36 | The `master` branch on nf-core repositories should always contain code from the latest release. 37 | Because of this, PRs to `master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch. 38 | 39 | You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page. 40 | Note that even after this, the test will continue to show as failing until you push a new commit. 41 | 42 | Thanks again for your contribution! 43 | repo-token: ${{ secrets.GITHUB_TOKEN }} 44 | allow-repeats: false 45 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors 2 | name: nf-core CI 3 | on: 4 | pull_request: 5 | release: 6 | types: [published] 7 | merge_group: 8 | types: 9 | - checks_requested 10 | branches: 11 | - master 12 | - dev 13 | 14 | env: 15 | NXF_ANSI_LOG: false 16 | NFT_VER: "0.8.4" 17 | NFT_WORKDIR: "~" 18 | NFT_DIFF: "pdiff" 19 | NFT_DIFF_ARGS: "--line-numbers --expand-tabs=2" 20 | 21 | concurrency: 22 | group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}" 23 | cancel-in-progress: true 24 | 25 | jobs: 26 | changes: 27 | name: Check for changes 28 | runs-on: ubuntu-latest 29 | outputs: 30 | changes: ${{ steps.changed_files.outputs.any_modified }} 31 | tags: ${{ steps.list.outputs.tags }} 32 | steps: 33 | - uses: actions/setup-python@v4 34 | with: 35 | python-version: "3.11" 36 | architecture: "x64" 37 | 38 | - uses: actions/checkout@v3 39 | with: 40 | fetch-depth: 0 41 | 42 | - uses: tj-actions/changed-files@v42 43 | id: changed_files 44 | with: 45 | dir_names: "true" 46 | output_renamed_files_as_deleted_and_added: "true" 47 | # Define list of additional rules for testing paths 48 | # Mostly, we define additional 'pipeline' or 'all' tests here 49 | files_yaml: | 50 | ".": 51 | - .github/workflows/** 52 | - nf-test.config 53 | - nextflow.config 54 | tests: 55 | - assets/* 56 | - bin/* 57 | - conf/* 58 | - main.nf 59 | - nextflow_schema.json 60 | 61 | files_ignore: | 62 | .git* 63 | .gitpod.yml 64 | .prettierignore 65 | .prettierrc.yml 66 | **.md 67 | **.png 68 | modules.json 69 | pyproject.toml 70 | tower.yml 71 | 72 | - name: debug 73 | run: | 74 | echo ${{ steps.changed_files.outputs.any_modified }} 75 | echo ${{ steps.changed_files.outputs.all_changed_files }} 76 | echo ${{ steps.changed_files.outputs.changed_keys }} 77 | 78 | - name: nf-test list tags 79 | id: list 80 | if: ${{ steps.changed_files.outputs.any_modified }} 81 | run: | 82 | echo tags=$(python \ 83 | .github/python/find_changed_files.py \ 84 | -t pipeline workflow process \ 85 | -p ${{ steps.changed_files.outputs.all_changed_files }} ${{ steps.changed_files.outputs.changed_keys }} \ 86 | ) >> $GITHUB_OUTPUT 87 | 88 | - name: debug2 89 | run: | 90 | echo ${{ steps.list.outputs.tags }} 91 | 92 | test: 93 | name: ${{ matrix.tags }} ${{ matrix.profile }} NF-${{ matrix.NXF_VER }} 94 | needs: [changes] 95 | if: needs.changes.outputs.changes 96 | runs-on: ubuntu-latest 97 | strategy: 98 | fail-fast: false 99 | matrix: 100 | NXF_VER: 101 | - "latest-everything" 102 | - "23.04" 103 | tags: ["${{ fromJson(needs.changes.outputs.tags) }}"] 104 | profile: 105 | - "docker" 106 | 107 | steps: 108 | - name: Check out pipeline code 109 | uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 110 | 111 | - name: Install Nextflow 112 | uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 113 | with: 114 | version: "${{ matrix.NXF_VER }}" 115 | 116 | - uses: actions/setup-python@v4 117 | with: 118 | python-version: "3.11" 119 | architecture: "x64" 120 | 121 | - name: Install pdiff to see diff between nf-test snapshots 122 | run: | 123 | python -m pip install --upgrade pip 124 | pip install pdiff 125 | 126 | - name: Cache nf-test installation 127 | id: cache-software 128 | uses: actions/cache@v3 129 | with: 130 | path: | 131 | /usr/local/bin/nf-test 132 | /home/runner/.nf-test/nf-test.jar 133 | key: ${{ runner.os }}-${{ env.NFT_VER }}-nftest 134 | 135 | - name: Install nf-test 136 | if: steps.cache-software.outputs.cache-hit != 'true' 137 | run: | 138 | wget -qO- https://code.askimed.com/install/nf-test | bash 139 | sudo mv nf-test /usr/local/bin/ 140 | 141 | - name: Run nf-test 142 | run: | 143 | nf-test test --verbose --tag ${{ matrix.tags }} --profile "+${{ matrix.profile }}" --junitxml=test.xml --tap=test.tap 144 | 145 | - uses: pcolby/tap-summary@v1 146 | with: 147 | path: >- 148 | test.tap 149 | 150 | - name: Output log on failure 151 | if: failure() 152 | run: | 153 | sudo apt install bat > /dev/null 154 | batcat --decorations=always --color=always ${{ github.workspace }}/.nf-test/tests/*/meta/nextflow.log 155 | 156 | - name: Publish Test Report 157 | uses: mikepenz/action-junit-report@v3 158 | if: always() # always run even if the previous step fails 159 | with: 160 | report_paths: test.xml 161 | 162 | confirm-pass: 163 | runs-on: ubuntu-latest 164 | needs: 165 | - changes 166 | - test 167 | if: always() 168 | steps: 169 | - name: All tests ok 170 | if: ${{ !contains(needs.*.result, 'failure') }} 171 | run: exit 0 172 | - name: One or more tests failed 173 | if: ${{ contains(needs.*.result, 'failure') }} 174 | run: exit 1 175 | 176 | - name: debug-print 177 | if: always() 178 | run: | 179 | echo "toJSON(needs) = ${{ toJSON(needs) }}" 180 | echo "toJSON(needs.*.result) = ${{ toJSON(needs.*.result) }}" 181 | -------------------------------------------------------------------------------- /.github/workflows/clean-up.yml: -------------------------------------------------------------------------------- 1 | name: "Close user-tagged issues and PRs" 2 | on: 3 | schedule: 4 | - cron: "0 0 * * 0" # Once a week 5 | 6 | jobs: 7 | clean-up: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | issues: write 11 | pull-requests: write 12 | steps: 13 | - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 14 | with: 15 | stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." 16 | stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." 17 | close-issue-message: "This issue was closed because it has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor and then staled for 20 days with no activity." 18 | days-before-stale: 30 19 | days-before-close: 20 20 | days-before-pr-close: -1 21 | any-of-labels: "awaiting-changes,awaiting-feedback" 22 | exempt-issue-labels: "WIP" 23 | exempt-pr-labels: "WIP" 24 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 25 | -------------------------------------------------------------------------------- /.github/workflows/cloud_tests_full.yml: -------------------------------------------------------------------------------- 1 | name: full-sized tests on cloud providers 2 | run-name: Submitting workflow to all cloud providers using full sized data 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | inputs: 8 | platform: 9 | description: "Platform to run test" 10 | required: true 11 | default: "all" 12 | type: choice 13 | options: 14 | - all 15 | - aws 16 | - azure 17 | - gcp 18 | jobs: 19 | run-full-tests-on-aws: 20 | if: ${{ github.event.inputs.platform == 'all' || github.event.inputs.platform == 'aws' || !github.event.inputs }} 21 | runs-on: ubuntu-latest 22 | strategy: 23 | matrix: 24 | download_method: ["aspera", "ftp", "sratools"] 25 | steps: 26 | - uses: seqeralabs/action-tower-launch@v2 27 | with: 28 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 29 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 30 | compute_env: ${{ secrets.TOWER_CE_AWS_CPU }} 31 | workdir: "${{ secrets.TOWER_BUCKET_AWS }}/work/fetchngs/work-${{ github.sha }}" 32 | run_name: "aws_fetchngs_full_${{ matrix.download_method }}" 33 | revision: ${{ github.sha }} 34 | profiles: test_full 35 | parameters: | 36 | { 37 | "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", 38 | "download_method": "${{ matrix.download_method }}", 39 | "outdir": "${{ secrets.TOWER_BUCKET_AWS }}/fetchngs/results-${{ github.sha }}/download_method_${{ matrix.download_method }}/" 40 | } 41 | - uses: actions/upload-artifact@v3 42 | with: 43 | name: Tower debug log file 44 | path: tower_action_*.log 45 | 46 | run-full-tests-on-azure: 47 | if: ${{ github.event.inputs.platform == 'all' || github.event.inputs.platform == 'azure' || !github.event.inputs }} 48 | runs-on: ubuntu-latest 49 | strategy: 50 | matrix: 51 | download_method: ["aspera", "ftp", "sratools"] 52 | steps: 53 | - uses: seqeralabs/action-tower-launch@v2 54 | with: 55 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 56 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 57 | compute_env: ${{ secrets.TOWER_CE_AZURE_CPU }} 58 | workdir: "${{ secrets.TOWER_BUCKET_AZURE }}/work/fetchngs/work-${{ github.sha }}" 59 | run_name: "azure_fetchngs_full_${{ matrix.download_method }}" 60 | revision: ${{ github.sha }} 61 | profiles: test_full 62 | parameters: | 63 | { 64 | "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", 65 | "download_method": "${{ matrix.download_method }}", 66 | "outdir": "${{ secrets.TOWER_BUCKET_AZURE }}/fetchngs/results-${{ github.sha }}/download_method_${{ matrix.download_method }}/" 67 | } 68 | - uses: actions/upload-artifact@v3 69 | with: 70 | name: Tower debug log file 71 | path: tower_action_*.log 72 | 73 | run-full-tests-on-gcp: 74 | if: ${{ github.event.inputs.platform == 'gcp' || !github.event.inputs }} 75 | runs-on: ubuntu-latest 76 | strategy: 77 | matrix: 78 | download_method: ["aspera", "ftp", "sratools"] 79 | steps: 80 | - uses: seqeralabs/action-tower-launch@v2 81 | with: 82 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 83 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 84 | compute_env: ${{ secrets.TOWER_CE_GCP_CPU }} 85 | workdir: "${{ secrets.TOWER_BUCKET_GCP }}/work/fetchngs/work-${{ github.sha }}" 86 | run_name: "gcp_fetchngs_full_${{ matrix.download_method }}" 87 | revision: ${{ github.sha }} 88 | profiles: test_full 89 | parameters: | 90 | { 91 | "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", 92 | "download_method": "${{ matrix.download_method }}", 93 | "outdir": "${{ secrets.TOWER_BUCKET_GCP }}/fetchngs/results-${{ github.sha }}/download_method_${{ matrix.download_method }}/" 94 | } 95 | - uses: actions/upload-artifact@v3 96 | with: 97 | name: Tower debug log file 98 | path: tower_action_*.log 99 | -------------------------------------------------------------------------------- /.github/workflows/cloud_tests_small.yml: -------------------------------------------------------------------------------- 1 | name: small-sized tests on cloud providers 2 | run-name: Submitting workflow to all cloud providers using small sized data 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | platform: 7 | description: "Platform to run test" 8 | required: true 9 | default: "all" 10 | type: choice 11 | options: 12 | - all 13 | - aws 14 | - azure 15 | - gcp 16 | jobs: 17 | run-small-tests-on-aws: 18 | if: ${{ github.event.inputs.platform == 'all' || github.event.inputs.platform == 'aws' }} 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: seqeralabs/action-tower-launch@v2 22 | with: 23 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 24 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 25 | compute_env: ${{ secrets.TOWER_CE_AWS_CPU }} 26 | workdir: "${{ secrets.TOWER_BUCKET_AWS }}/work/fetchngs/work-${{ github.sha }}" 27 | run_name: "aws_fetchngs_small" 28 | revision: ${{ github.sha }} 29 | profiles: test 30 | parameters: | 31 | { 32 | "outdir": "${{ secrets.TOWER_BUCKET_AWS }}/fetchngs/results-test-${{ github.sha }}/" 33 | } 34 | - uses: actions/upload-artifact@v3 35 | with: 36 | name: Tower debug log file 37 | path: tower_action_*.log 38 | 39 | run-small-tests-on-azure: 40 | if: ${{ github.event.inputs.platform == 'all' || github.event.inputs.platform == 'azure' }} 41 | runs-on: ubuntu-latest 42 | steps: 43 | - uses: seqeralabs/action-tower-launch@v2 44 | with: 45 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 46 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 47 | compute_env: ${{ secrets.TOWER_CE_AZURE_CPU }} 48 | workdir: "${{ secrets.TOWER_BUCKET_AZURE }}/work/fetchngs/work-${{ github.sha }}" 49 | run_name: "azure_fetchngs_small" 50 | revision: ${{ github.sha }} 51 | profiles: test 52 | parameters: | 53 | { 54 | "outdir": "${{ secrets.TOWER_BUCKET_AZURE }}/fetchngs/results-test-${{ github.sha }}/" 55 | } 56 | - uses: actions/upload-artifact@v3 57 | with: 58 | name: Tower debug log file 59 | path: tower_action_*.log 60 | 61 | run-small-tests-on-gcp: 62 | if: ${{ github.event.inputs.platform == 'gcp' }} 63 | runs-on: ubuntu-latest 64 | steps: 65 | - uses: seqeralabs/action-tower-launch@v2 66 | with: 67 | workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} 68 | access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} 69 | compute_env: ${{ secrets.TOWER_CE_GCP_CPU }} 70 | workdir: "${{ secrets.TOWER_BUCKET_GCP }}/work/fetchngs/work-${{ github.sha }}" 71 | run_name: "gcp_fetchngs_small" 72 | revision: ${{ github.sha }} 73 | profiles: test 74 | parameters: | 75 | { 76 | "outdir": "${{ secrets.TOWER_BUCKET_GCP }}/fetchngs/results-test-${{ github.sha }}/" 77 | } 78 | - uses: actions/upload-artifact@v3 79 | with: 80 | name: Tower debug log file 81 | path: tower_action_*.log 82 | -------------------------------------------------------------------------------- /.github/workflows/download_pipeline.yml: -------------------------------------------------------------------------------- 1 | name: Test successful pipeline download with 'nf-core download' 2 | 3 | # Run the workflow when: 4 | # - dispatched manually 5 | # - when a PR is opened or reopened to master branch 6 | # - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev. 7 | on: 8 | workflow_dispatch: 9 | inputs: 10 | testbranch: 11 | description: "The specific branch you wish to utilize for the test execution of nf-core download." 12 | required: true 13 | default: "dev" 14 | pull_request: 15 | types: 16 | - opened 17 | branches: 18 | - master 19 | pull_request_target: 20 | branches: 21 | - master 22 | 23 | env: 24 | NXF_ANSI_LOG: false 25 | 26 | jobs: 27 | download: 28 | runs-on: ubuntu-latest 29 | steps: 30 | - name: Install Nextflow 31 | uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 32 | 33 | - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 34 | with: 35 | python-version: "3.11" 36 | architecture: "x64" 37 | - uses: eWaterCycle/setup-singularity@931d4e31109e875b13309ae1d07c70ca8fbc8537 # v7 38 | with: 39 | singularity-version: 3.8.3 40 | 41 | - name: Install dependencies 42 | run: | 43 | python -m pip install --upgrade pip 44 | pip install git+https://github.com/nf-core/tools.git@dev 45 | 46 | - name: Get the repository name and current branch set as environment variable 47 | run: | 48 | echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV} 49 | echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV} 50 | echo "REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> ${GITHUB_ENV} 51 | 52 | - name: Download the pipeline 53 | env: 54 | NXF_SINGULARITY_CACHEDIR: ./ 55 | run: | 56 | nf-core download ${{ env.REPO_LOWERCASE }} \ 57 | --revision ${{ env.REPO_BRANCH }} \ 58 | --outdir ./${{ env.REPOTITLE_LOWERCASE }} \ 59 | --compress "none" \ 60 | --container-system 'singularity' \ 61 | --container-library "quay.io" -l "docker.io" -l "ghcr.io" \ 62 | --container-cache-utilisation 'amend' \ 63 | --download-configuration 64 | 65 | - name: Inspect download 66 | run: tree ./${{ env.REPOTITLE_LOWERCASE }} 67 | 68 | - name: Run the downloaded pipeline 69 | env: 70 | NXF_SINGULARITY_CACHEDIR: ./ 71 | NXF_SINGULARITY_HOME_MOUNT: true 72 | run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results 73 | -------------------------------------------------------------------------------- /.github/workflows/fix-linting.yml: -------------------------------------------------------------------------------- 1 | name: Fix linting from a comment 2 | on: 3 | issue_comment: 4 | types: [created] 5 | 6 | jobs: 7 | fix-linting: 8 | # Only run if comment is on a PR with the main repo, and if it contains the magic keywords 9 | if: > 10 | contains(github.event.comment.html_url, '/pull/') && 11 | contains(github.event.comment.body, '@nf-core-bot fix linting') && 12 | github.repository == 'nf-core/fetchngs' 13 | runs-on: ubuntu-latest 14 | steps: 15 | # Use the @nf-core-bot token to check out so we can push later 16 | - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 17 | with: 18 | token: ${{ secrets.nf_core_bot_auth_token }} 19 | 20 | # indication that the linting is being fixed 21 | - name: React on comment 22 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 23 | with: 24 | comment-id: ${{ github.event.comment.id }} 25 | reactions: eyes 26 | 27 | # Action runs on the issue comment, so we don't get the PR by default 28 | # Use the gh cli to check out the PR 29 | - name: Checkout Pull Request 30 | run: gh pr checkout ${{ github.event.issue.number }} 31 | env: 32 | GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} 33 | 34 | # Install and run pre-commit 35 | - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 36 | with: 37 | python-version: 3.11 38 | 39 | - name: Install pre-commit 40 | run: pip install pre-commit 41 | 42 | - name: Run pre-commit 43 | id: pre-commit 44 | run: pre-commit run --all-files 45 | continue-on-error: true 46 | 47 | # indication that the linting has finished 48 | - name: react if linting finished succesfully 49 | if: steps.pre-commit.outcome == 'success' 50 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 51 | with: 52 | comment-id: ${{ github.event.comment.id }} 53 | reactions: "+1" 54 | 55 | - name: Commit & push changes 56 | id: commit-and-push 57 | if: steps.pre-commit.outcome == 'failure' 58 | run: | 59 | git config user.email "core@nf-co.re" 60 | git config user.name "nf-core-bot" 61 | git config push.default upstream 62 | git add . 63 | git status 64 | git commit -m "[automated] Fix code linting" 65 | git push 66 | 67 | - name: react if linting errors were fixed 68 | id: react-if-fixed 69 | if: steps.commit-and-push.outcome == 'success' 70 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 71 | with: 72 | comment-id: ${{ github.event.comment.id }} 73 | reactions: hooray 74 | 75 | - name: react if linting errors were not fixed 76 | if: steps.commit-and-push.outcome == 'failure' 77 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 78 | with: 79 | comment-id: ${{ github.event.comment.id }} 80 | reactions: confused 81 | 82 | - name: react if linting errors were not fixed 83 | if: steps.commit-and-push.outcome == 'failure' 84 | uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 85 | with: 86 | issue-number: ${{ github.event.issue.number }} 87 | body: | 88 | @${{ github.actor }} I tried to fix the linting errors, but it didn't work. Please fix them manually. 89 | See [CI log](https://github.com/nf-core/fetchngs/actions/runs/${{ github.run_id }}) for more details. 90 | -------------------------------------------------------------------------------- /.github/workflows/linting.yml: -------------------------------------------------------------------------------- 1 | name: nf-core linting 2 | # This workflow is triggered on pushes and PRs to the repository. 3 | # It runs the `nf-core lint` and markdown lint tests to ensure 4 | # that the code meets the nf-core guidelines. 5 | on: 6 | push: 7 | branches: 8 | - dev 9 | pull_request: 10 | release: 11 | types: [published] 12 | 13 | jobs: 14 | pre-commit: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 18 | 19 | - name: Set up Python 3.11 20 | uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 21 | with: 22 | python-version: 3.11 23 | cache: "pip" 24 | 25 | - name: Install pre-commit 26 | run: pip install pre-commit 27 | 28 | - name: Run pre-commit 29 | run: pre-commit run --all-files 30 | 31 | nf-core: 32 | runs-on: ubuntu-latest 33 | steps: 34 | - name: Check out pipeline code 35 | uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 36 | 37 | - name: Install Nextflow 38 | uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1 39 | 40 | - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 41 | with: 42 | python-version: "3.11" 43 | architecture: "x64" 44 | 45 | - name: Install dependencies 46 | run: | 47 | python -m pip install --upgrade pip 48 | pip install nf-core 49 | 50 | - name: Run nf-core lint 51 | env: 52 | GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }} 53 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 54 | GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }} 55 | run: nf-core -l lint_log.txt lint --dir ${GITHUB_WORKSPACE} --markdown lint_results.md 56 | 57 | - name: Save PR number 58 | if: ${{ always() }} 59 | run: echo ${{ github.event.pull_request.number }} > PR_number.txt 60 | 61 | - name: Upload linting log file artifact 62 | if: ${{ always() }} 63 | uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4 64 | with: 65 | name: linting-logs 66 | path: | 67 | lint_log.txt 68 | lint_results.md 69 | PR_number.txt 70 | -------------------------------------------------------------------------------- /.github/workflows/linting_comment.yml: -------------------------------------------------------------------------------- 1 | name: nf-core linting comment 2 | # This workflow is triggered after the linting action is complete 3 | # It posts an automated comment to the PR, even if the PR is coming from a fork 4 | 5 | on: 6 | workflow_run: 7 | workflows: ["nf-core linting"] 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Download lint results 14 | uses: dawidd6/action-download-artifact@f6b0bace624032e30a85a8fd9c1a7f8f611f5737 # v3 15 | with: 16 | workflow: linting.yml 17 | workflow_conclusion: completed 18 | 19 | - name: Get PR number 20 | id: pr_number 21 | run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT 22 | 23 | - name: Post PR comment 24 | uses: marocchino/sticky-pull-request-comment@331f8f5b4215f0445d3c07b4967662a32a2d3e31 # v2 25 | with: 26 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 27 | number: ${{ steps.pr_number.outputs.pr_number }} 28 | path: linting-logs/lint_results.md 29 | -------------------------------------------------------------------------------- /.github/workflows/release-announcements.yml: -------------------------------------------------------------------------------- 1 | name: release-announcements 2 | # Automatic release toot and tweet anouncements 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | 8 | jobs: 9 | toot: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: get topics and convert to hashtags 13 | id: get_topics 14 | run: | 15 | curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ' > $GITHUB_OUTPUT 16 | 17 | - uses: rzr/fediverse-action@master 18 | with: 19 | access-token: ${{ secrets.MASTODON_ACCESS_TOKEN }} 20 | host: "mstdn.science" # custom host if not "mastodon.social" (default) 21 | # GitHub event payload 22 | # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#release 23 | message: | 24 | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! 25 | 26 | Please see the changelog: ${{ github.event.release.html_url }} 27 | 28 | ${{ steps.get_topics.outputs.GITHUB_OUTPUT }} #nfcore #openscience #nextflow #bioinformatics 29 | 30 | send-tweet: 31 | runs-on: ubuntu-latest 32 | 33 | steps: 34 | - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5 35 | with: 36 | python-version: "3.10" 37 | - name: Install dependencies 38 | run: pip install tweepy==4.14.0 39 | - name: Send tweet 40 | shell: python 41 | run: | 42 | import os 43 | import tweepy 44 | 45 | client = tweepy.Client( 46 | access_token=os.getenv("TWITTER_ACCESS_TOKEN"), 47 | access_token_secret=os.getenv("TWITTER_ACCESS_TOKEN_SECRET"), 48 | consumer_key=os.getenv("TWITTER_CONSUMER_KEY"), 49 | consumer_secret=os.getenv("TWITTER_CONSUMER_SECRET"), 50 | ) 51 | tweet = os.getenv("TWEET") 52 | client.create_tweet(text=tweet) 53 | env: 54 | TWEET: | 55 | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! 56 | 57 | Please see the changelog: ${{ github.event.release.html_url }} 58 | TWITTER_CONSUMER_KEY: ${{ secrets.TWITTER_CONSUMER_KEY }} 59 | TWITTER_CONSUMER_SECRET: ${{ secrets.TWITTER_CONSUMER_SECRET }} 60 | TWITTER_ACCESS_TOKEN: ${{ secrets.TWITTER_ACCESS_TOKEN }} 61 | TWITTER_ACCESS_TOKEN_SECRET: ${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }} 62 | 63 | bsky-post: 64 | runs-on: ubuntu-latest 65 | steps: 66 | - uses: zentered/bluesky-post-action@80dbe0a7697de18c15ad22f4619919ceb5ccf597 # v0.1.0 67 | with: 68 | post: | 69 | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! 70 | 71 | Please see the changelog: ${{ github.event.release.html_url }} 72 | env: 73 | BSKY_IDENTIFIER: ${{ secrets.BSKY_IDENTIFIER }} 74 | BSKY_PASSWORD: ${{ secrets.BSKY_PASSWORD }} 75 | # 76 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .nextflow* 2 | work/ 3 | data/ 4 | results/ 5 | .DS_Store 6 | testing/ 7 | testing* 8 | *.pyc 9 | .nf-test.log 10 | nf-test 11 | .nf-test* 12 | test.xml 13 | -------------------------------------------------------------------------------- /.gitpod.yml: -------------------------------------------------------------------------------- 1 | image: nfcore/gitpod:latest 2 | tasks: 3 | - name: Update Nextflow and setup pre-commit 4 | command: | 5 | pre-commit install --install-hooks 6 | nextflow self-update 7 | - name: unset JAVA_TOOL_OPTIONS 8 | command: | 9 | unset JAVA_TOOL_OPTIONS 10 | 11 | vscode: 12 | extensions: # based on nf-core.nf-core-extensionpack 13 | - codezombiech.gitignore # Language support for .gitignore files 14 | # - cssho.vscode-svgviewer # SVG viewer 15 | - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code 16 | - eamodio.gitlens # Quickly glimpse into whom, why, and when a line or code block was changed 17 | - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files 18 | - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar 19 | - mechatroner.rainbow-csv # Highlight columns in csv files in different colors 20 | # - nextflow.nextflow # Nextflow syntax highlighting 21 | - oderwat.indent-rainbow # Highlight indentation level 22 | - streetsidesoftware.code-spell-checker # Spelling checker for source code 23 | -------------------------------------------------------------------------------- /.nf-core.yml: -------------------------------------------------------------------------------- 1 | repository_type: pipeline 2 | lint: 3 | actions_ci: false 4 | files_exist: 5 | - conf/modules.config 6 | files_unchanged: 7 | - assets/sendmail_template.txt 8 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/mirrors-prettier 3 | rev: "v3.1.0" 4 | hooks: 5 | - id: prettier 6 | - repo: https://github.com/editorconfig-checker/editorconfig-checker.python 7 | rev: "2.7.3" 8 | hooks: 9 | - id: editorconfig-checker 10 | alias: ec 11 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | email_template.html 2 | adaptivecard.json 3 | slackreport.json 4 | .nextflow* 5 | work/ 6 | data/ 7 | results/ 8 | .DS_Store 9 | testing/ 10 | testing* 11 | *.pyc 12 | bin/ 13 | -------------------------------------------------------------------------------- /.prettierrc.yml: -------------------------------------------------------------------------------- 1 | printWidth: 120 2 | -------------------------------------------------------------------------------- /CITATIONS.md: -------------------------------------------------------------------------------- 1 | # nf-core/fetchngs: Citations 2 | 3 | ## [nf-core](https://pubmed.ncbi.nlm.nih.gov/32055031/) 4 | 5 | > Ewels PA, Peltzer A, Fillinger S, Patel H, Alneberg J, Wilm A, Garcia MU, Di Tommaso P, Nahnsen S. The nf-core framework for community-curated bioinformatics pipelines. Nat Biotechnol. 2020 Mar;38(3):276-278. doi: 10.1038/s41587-020-0439-x. PubMed PMID: 32055031. 6 | 7 | ## [Nextflow](https://pubmed.ncbi.nlm.nih.gov/28398311/) 8 | 9 | > Di Tommaso P, Chatzou M, Floden EW, Barja PP, Palumbo E, Notredame C. Nextflow enables reproducible computational workflows. Nat Biotechnol. 2017 Apr 11;35(4):316-319. doi: 10.1038/nbt.3820. PubMed PMID: 28398311. 10 | 11 | ## Pipeline tools 12 | 13 | - [Aspera CLI](https://github.com/IBM/aspera-cli) 14 | 15 | - [Python](http://www.python.org) 16 | 17 | - [Requests](https://docs.python-requests.org/) 18 | 19 | - [sra-tools](https://github.com/ncbi/sra-tools) 20 | 21 | ## Pipeline resources 22 | 23 | - [ENA](https://pubmed.ncbi.nlm.nih.gov/33175160/) 24 | 25 | > Harrison PW, Ahamed A, Aslam R, Alako BTF, Burgin J, Buso N, Courtot M, Fan J, Gupta D, Haseeb M, Holt S, Ibrahim T, Ivanov E, Jayathilaka S, Kadhirvelu VB, Kumar M, Lopez R, Kay S, Leinonen R, Liu X, O'Cathail C, Pakseresht A, Park Y, Pesant S, Rahman N, Rajan J, Sokolov A, Vijayaraja S, Waheed Z, Zyoud A, Burdett T, Cochrane G. The European Nucleotide Archive in 2020. Nucleic Acids Res. 2021 Jan 8;49(D1):D82-D85. doi: 10.1093/nar/gkaa1028. PubMed PMID: 33175160; PubMed Central PMCID: PMC7778925. 26 | 27 | - [SRA](https://pubmed.ncbi.nlm.nih.gov/21062823/) 28 | 29 | > Leinonen R, Sugawara H, Shumway M, International Nucleotide Sequence Database Collaboration. The sequence read archive. Nucleic Acids Res. 2011 Jan;39 (Database issue):D19-21. doi: 10.1093/nar/gkq1019. Epub 2010 Nov 9. PubMed PMID: 21062823; PubMed Central PMCID: PMC3013647. 30 | 31 | - [DDBJ](https://pubmed.ncbi.nlm.nih.gov/33156332/) 32 | 33 | > Fukuda A, Kodama Y, Mashima J, Fujisawa T, Ogasawara O. DDBJ update: streamlining submission and access of human data. Nucleic Acids Res. 2021 Jan 8;49(D1):D71-D75. doi: 10.1093/nar/gkaa982. PubMed PMID: 33156332; PubMed Central PMCID: PMC7779041. 34 | 35 | - [GEO](https://pubmed.ncbi.nlm.nih.gov/23193258/) 36 | 37 | > Barrett T, Wilhite SE, Ledoux P, Evangelista C, Kim IF, Tomashevsky M, Marshall KA, Phillippy KH, Sherman PM, Holko M, Yefanov A, Lee H, Zhang N, Robertson CL, Serova N, Davis S, Soboleva A. NCBI GEO: archive for functional genomics data sets--update. Nucleic Acids Res. 2013 Jan;41(Database issue):D991-5. doi: 10.1093/nar/gks1193. Epub 2012 Nov 27. PubMed PMID: 23193258; PubMed Central PMCID: PMC3531084. 38 | 39 | ## Software packaging/containerisation/testing tools 40 | 41 | - [Anaconda](https://anaconda.com) 42 | 43 | > Anaconda Software Distribution. Computer software. Vers. 2-2.4.0. Anaconda, Nov. 2016. Web. 44 | 45 | - [Bioconda](https://pubmed.ncbi.nlm.nih.gov/29967506/) 46 | 47 | > Grüning B, Dale R, Sjödin A, Chapman BA, Rowe J, Tomkins-Tinch CH, Valieris R, Köster J; Bioconda Team. Bioconda: sustainable and comprehensive software distribution for the life sciences. Nat Methods. 2018 Jul;15(7):475-476. doi: 10.1038/s41592-018-0046-7. PubMed PMID: 29967506. 48 | 49 | - [BioContainers](https://pubmed.ncbi.nlm.nih.gov/28379341/) 50 | 51 | > da Veiga Leprevost F, Grüning B, Aflitos SA, Röst HL, Uszkoreit J, Barsnes H, Vaudel M, Moreno P, Gatto L, Weber J, Bai M, Jimenez RC, Sachsenberg T, Pfeuffer J, Alvarez RV, Griss J, Nesvizhskii AI, Perez-Riverol Y. BioContainers: an open-source and community-driven framework for software standardization. Bioinformatics. 2017 Aug 15;33(16):2580-2582. doi: 10.1093/bioinformatics/btx192. PubMed PMID: 28379341; PubMed Central PMCID: PMC5870671. 52 | 53 | - [Docker](https://dl.acm.org/doi/10.5555/2600239.2600241) 54 | 55 | > Merkel, D. (2014). Docker: lightweight linux containers for consistent development and deployment. Linux Journal, 2014(239), 2. doi: 10.5555/2600239.2600241. 56 | 57 | - [nf-test](https://code.askimed.com/nf-test) 58 | 59 | - [Singularity](https://pubmed.ncbi.nlm.nih.gov/28494014/) 60 | 61 | > Kurtzer GM, Sochat V, Bauer MW. Singularity: Scientific containers for mobility of compute. PLoS One. 2017 May 11;12(5):e0177459. doi: 10.1371/journal.pone.0177459. eCollection 2017. PubMed PMID: 28494014; PubMed Central PMCID: PMC5426675. 62 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Harshil Patel, Moritz E. Beber and Jose Espinosa-Carrasco 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /assets/adaptivecard.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "message", 3 | "attachments": [ 4 | { 5 | "contentType": "application/vnd.microsoft.card.adaptive", 6 | "contentUrl": null, 7 | "content": { 8 | "\$schema": "http://adaptivecards.io/schemas/adaptive-card.json", 9 | "msteams": { 10 | "width": "Full" 11 | }, 12 | "type": "AdaptiveCard", 13 | "version": "1.2", 14 | "body": [ 15 | { 16 | "type": "TextBlock", 17 | "size": "Large", 18 | "weight": "Bolder", 19 | "color": "<% if (success) { %>Good<% } else { %>Attention<%} %>", 20 | "text": "nf-core/fetchngs v${version} - ${runName}", 21 | "wrap": true 22 | }, 23 | { 24 | "type": "TextBlock", 25 | "spacing": "None", 26 | "text": "Completed at ${dateComplete} (duration: ${duration})", 27 | "isSubtle": true, 28 | "wrap": true 29 | }, 30 | { 31 | "type": "TextBlock", 32 | "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors. The full error message was: ${errorReport}.<% } %>", 33 | "wrap": true 34 | }, 35 | { 36 | "type": "TextBlock", 37 | "text": "The command used to launch the workflow was as follows:", 38 | "wrap": true 39 | }, 40 | { 41 | "type": "TextBlock", 42 | "text": "${commandLine}", 43 | "isSubtle": true, 44 | "wrap": true 45 | } 46 | ], 47 | "actions": [ 48 | { 49 | "type": "Action.ShowCard", 50 | "title": "Pipeline Configuration", 51 | "card": { 52 | "type": "AdaptiveCard", 53 | "\$schema": "http://adaptivecards.io/schemas/adaptive-card.json", 54 | "body": [ 55 | { 56 | "type": "FactSet", 57 | "facts": [<% out << summary.collect{ k,v -> "{\"title\": \"$k\", \"value\" : \"$v\"}"}.join(",\n") %> 58 | ] 59 | } 60 | ] 61 | } 62 | } 63 | ] 64 | } 65 | } 66 | ] 67 | } 68 | -------------------------------------------------------------------------------- /assets/email_template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | nf-core/fetchngs Pipeline Report 9 | 10 | 11 |
12 | 13 | 14 | 15 |

nf-core/fetchngs ${version}

16 |

Run Name: $runName

17 | 18 | <% if (!success){ 19 | out << """ 20 |
21 |

nf-core/fetchngs execution completed unsuccessfully!

22 |

The exit status of the task that caused the workflow execution to fail was: $exitStatus.

23 |

The full error message was:

24 |
${errorReport}
25 |
26 | """ 27 | } else { 28 | out << """ 29 |
30 | nf-core/fetchngs execution completed successfully! 31 |
32 | """ 33 | } 34 | %> 35 | 36 |

The workflow was completed at $dateComplete (duration: $duration)

37 |

The command used to launch the workflow was as follows:

38 |
$commandLine
39 | 40 |

Pipeline Configuration:

41 | 42 | 43 | <% out << summary.collect{ k,v -> "" }.join("\n") %> 44 | 45 |
$k
$v
46 | 47 |

nf-core/fetchngs

48 |

https://github.com/nf-core/fetchngs

49 | 50 |
51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /assets/email_template.txt: -------------------------------------------------------------------------------- 1 | ---------------------------------------------------- 2 | ,--./,-. 3 | ___ __ __ __ ___ /,-._.--~\\ 4 | |\\ | |__ __ / ` / \\ |__) |__ } { 5 | | \\| | \\__, \\__/ | \\ |___ \\`-._,-`-, 6 | `._,._,' 7 | nf-core/fetchngs ${version} 8 | ---------------------------------------------------- 9 | Run Name: $runName 10 | 11 | <% if (success){ 12 | out << "## nf-core/fetchngs execution completed successfully! ##" 13 | } else { 14 | out << """#################################################### 15 | ## nf-core/fetchngs execution completed unsuccessfully! ## 16 | #################################################### 17 | The exit status of the task that caused the workflow execution to fail was: $exitStatus. 18 | The full error message was: 19 | 20 | ${errorReport} 21 | """ 22 | } %> 23 | 24 | 25 | The workflow was completed at $dateComplete (duration: $duration) 26 | 27 | The command used to launch the workflow was as follows: 28 | 29 | $commandLine 30 | 31 | 32 | 33 | Pipeline Configuration: 34 | ----------------------- 35 | <% out << summary.collect{ k,v -> " - $k: $v" }.join("\n") %> 36 | 37 | -- 38 | nf-core/fetchngs 39 | https://github.com/nf-core/fetchngs 40 | -------------------------------------------------------------------------------- /assets/methods_description_template.yml: -------------------------------------------------------------------------------- 1 | id: "nf-core-fetchngs-methods-description" 2 | description: "Suggested text and references to use when describing pipeline usage within the methods section of a publication." 3 | section_name: "nf-core/fetchngs Methods Description" 4 | section_href: "https://github.com/nf-core/fetchngs" 5 | plot_type: "html" 6 | data: | 7 |

Methods

8 |

Data was processed using nf-core/fetchngs v${workflow.manifest.version} ${doi_text} of the nf-core collection of workflows (Ewels et al., 2020), utilising reproducible software environments from the Bioconda (Grüning et al., 2018) and Biocontainers (da Veiga Leprevost et al., 2017) projects.

9 |

The pipeline was executed with Nextflow v${workflow.nextflow.version} (Di Tommaso et al., 2017) with the following command:

10 |
${workflow.commandLine}
11 |

${tool_citations}

12 |

References

13 | 20 |
21 |
Notes:
22 |
    23 | ${nodoi_text} 24 |
  • The command above does not include parameters contained in any configs or profiles that may have been used. Ensure the config file is also uploaded with your publication!
  • 25 |
  • You should also cite all software used within this run. Check the "Software Versions" of this report to get version information.
  • 26 |
27 |
28 | -------------------------------------------------------------------------------- /assets/nf-core-fetchngs_logo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/fetchngs/8ec2d934f9301c818d961b1e4fdf7fc79610bdc5/assets/nf-core-fetchngs_logo_light.png -------------------------------------------------------------------------------- /assets/schema_input.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema", 3 | "$id": "https://raw.githubusercontent.com/nf-core/fetchngs/master/assets/schema_input.json", 4 | "title": "nf-core/fetchngs pipeline - params.input schema", 5 | "description": "Schema for the file provided with params.input", 6 | "type": "array", 7 | "items": { 8 | "type": "object", 9 | "properties": { 10 | "": { 11 | "type": "string", 12 | "pattern": "^(((SR|ER|DR)[APRSX])|(SAM(N|EA|D))|(PRJ(NA|EB|DB))|(GS[EM]))(\\d+)$", 13 | "errorMessage": "Please provide a valid SRA, ENA, DDBJ or GEO identifier" 14 | } 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /assets/sendmail_template.txt: -------------------------------------------------------------------------------- 1 | To: $email 2 | Subject: $subject 3 | Mime-Version: 1.0 4 | Content-Type: multipart/related;boundary="nfcoremimeboundary" 5 | 6 | --nfcoremimeboundary 7 | Content-Type: text/html; charset=utf-8 8 | 9 | $email_html 10 | 11 | --nfcoremimeboundary 12 | Content-Type: image/png;name="nf-core-fetchngs_logo.png" 13 | Content-Transfer-Encoding: base64 14 | Content-ID: 15 | Content-Disposition: inline; filename="nf-core-fetchngs_logo_light.png" 16 | 17 | <% out << new File("$projectDir/assets/nf-core-fetchngs_logo_light.png"). 18 | bytes. 19 | encodeBase64(). 20 | toString(). 21 | tokenize( '\n' )*. 22 | toList()*. 23 | collate( 76 )*. 24 | collect { it.join() }. 25 | flatten(). 26 | join( '\n' ) %> 27 | 28 | --nfcoremimeboundary-- 29 | -------------------------------------------------------------------------------- /assets/slackreport.json: -------------------------------------------------------------------------------- 1 | { 2 | "attachments": [ 3 | { 4 | "fallback": "Plain-text summary of the attachment.", 5 | "color": "<% if (success) { %>good<% } else { %>danger<%} %>", 6 | "author_name": "nf-core/fetchngs ${version} - ${runName}", 7 | "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", 8 | "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", 9 | "fields": [ 10 | { 11 | "title": "Command used to launch the workflow", 12 | "value": "```${commandLine}```", 13 | "short": false 14 | } 15 | <% 16 | if (!success) { %> 17 | , 18 | { 19 | "title": "Full error message", 20 | "value": "```${errorReport}```", 21 | "short": false 22 | }, 23 | { 24 | "title": "Pipeline configuration", 25 | "value": "<% out << summary.collect{ k,v -> k == "hook_url" ? "_${k}_: (_hidden_)" : ( ( v.class.toString().contains('Path') || ( v.class.toString().contains('String') && v.contains('/') ) ) ? "_${k}_: `${v}`" : (v.class.toString().contains('DateTime') ? ("_${k}_: " + v.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM))) : "_${k}_: ${v}") ) }.join(",\n") %>", 26 | "short": false 27 | } 28 | <% } 29 | %> 30 | ], 31 | "footer": "Completed at <% out << dateComplete.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM)) %> (duration: ${duration})" 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /bin/multiqc_mappings_config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | 5 | with open(sys.argv[1], "r") as fin, open(sys.argv[2], "w") as fout: 6 | header = fin.readline().split(",") 7 | config = "sample_names_rename_buttons:\n" 8 | config += "\n".join([" - " + x.strip('"') for x in header]) 9 | config += "sample_names_rename:\n" 10 | rename = [] 11 | for line in fin: 12 | rename.append(f" - [{', '.join(line.strip().split(','))}]") 13 | fout.write(config + "\n".join(sorted(rename)) + "\n") 14 | -------------------------------------------------------------------------------- /conf/base.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | nf-core/fetchngs Nextflow base config file 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | A 'blank slate' config file, appropriate for general use on most high performance 6 | compute environments. Assumes that all software is installed and available on 7 | the PATH. Runs in `local` mode - all jobs will be run on the logged in environment. 8 | ---------------------------------------------------------------------------------------- 9 | */ 10 | 11 | process { 12 | 13 | cpus = { check_max( 1 * task.attempt, 'cpus' ) } 14 | memory = { check_max( 6.GB * task.attempt, 'memory' ) } 15 | time = { check_max( 4.h * task.attempt, 'time' ) } 16 | 17 | publishDir = [ 18 | path: { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" }, 19 | mode: params.publish_dir_mode, 20 | saveAs: { filename -> filename.equals('versions.yml') ? null : filename } 21 | ] 22 | 23 | errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } 24 | maxRetries = 1 25 | maxErrors = '-1' 26 | 27 | // Process-specific resource requirements 28 | // NOTE - Please try and re-use the labels below as much as possible. 29 | // These labels are used and recognised by default in DSL2 files hosted on nf-core/modules. 30 | // If possible, it would be nice to keep the same label naming convention when 31 | // adding in your local modules too. 32 | // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors 33 | withLabel:process_single { 34 | cpus = { check_max( 1 , 'cpus' ) } 35 | memory = { check_max( 6.GB * task.attempt, 'memory' ) } 36 | time = { check_max( 4.h * task.attempt, 'time' ) } 37 | } 38 | withLabel:process_low { 39 | cpus = { check_max( 2 * task.attempt, 'cpus' ) } 40 | memory = { check_max( 12.GB * task.attempt, 'memory' ) } 41 | time = { check_max( 4.h * task.attempt, 'time' ) } 42 | } 43 | withLabel:process_medium { 44 | cpus = { check_max( 6 * task.attempt, 'cpus' ) } 45 | memory = { check_max( 36.GB * task.attempt, 'memory' ) } 46 | time = { check_max( 8.h * task.attempt, 'time' ) } 47 | } 48 | withLabel:process_high { 49 | cpus = { check_max( 12 * task.attempt, 'cpus' ) } 50 | memory = { check_max( 72.GB * task.attempt, 'memory' ) } 51 | time = { check_max( 16.h * task.attempt, 'time' ) } 52 | } 53 | withLabel:process_long { 54 | time = { check_max( 20.h * task.attempt, 'time' ) } 55 | } 56 | withLabel:process_high_memory { 57 | memory = { check_max( 200.GB * task.attempt, 'memory' ) } 58 | } 59 | withLabel:error_ignore { 60 | errorStrategy = 'ignore' 61 | } 62 | withLabel:error_retry { 63 | errorStrategy = 'retry' 64 | maxRetries = 2 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /conf/test.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running minimal tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a fast and simple pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/fetchngs -profile test, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | params { 14 | config_profile_name = 'Test profile' 15 | config_profile_description = 'Minimal test dataset to check pipeline function' 16 | 17 | // Limit resources so that this can run on GitHub Actions 18 | max_cpus = 2 19 | max_memory = '6.GB' 20 | max_time = '6.h' 21 | 22 | // Input data 23 | input = 'https://raw.githubusercontent.com/nf-core/test-datasets/2732b911c57e607fa7aea5ba0c3d91b25bafb662/testdata/v1.12.0/sra_ids_test.csv' 24 | } 25 | -------------------------------------------------------------------------------- /conf/test_full.config: -------------------------------------------------------------------------------- 1 | /* 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | Nextflow config file for running full-size tests 4 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 5 | Defines input files and everything required to run a full size pipeline test. 6 | 7 | Use as follows: 8 | nextflow run nf-core/fetchngs -profile test_full, --outdir 9 | 10 | ---------------------------------------------------------------------------------------- 11 | */ 12 | 13 | params { 14 | config_profile_name = 'Full test profile' 15 | config_profile_description = 'Full test dataset to check pipeline function' 16 | 17 | // File containing SRA ids from nf-core/rnaseq -profile test_full for full-sized test 18 | input = 'https://raw.githubusercontent.com/nf-core/test-datasets/100736c99d87667fb7c247c267bc8acfac647bed/testdata/v1.12.0/sra_ids_rnaseq_test_full.csv' 19 | } 20 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # nf-core/fetchngs: Documentation 2 | 3 | The nf-core/fetchngs documentation is split into the following pages: 4 | 5 | - [Usage](usage.md) 6 | - An overview of how the pipeline works, how to run it and a description of all of the different command-line flags. 7 | - [Output](output.md) 8 | - An overview of the different results produced by the pipeline and how to interpret them. 9 | 10 | You can find a lot more documentation about installing, configuring and running nf-core pipelines on the website: [https://nf-co.re](https://nf-co.re) 11 | -------------------------------------------------------------------------------- /docs/images/nf-core-fetchngs_logo_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/fetchngs/8ec2d934f9301c818d961b1e4fdf7fc79610bdc5/docs/images/nf-core-fetchngs_logo_dark.png -------------------------------------------------------------------------------- /docs/images/nf-core-fetchngs_logo_light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/fetchngs/8ec2d934f9301c818d961b1e4fdf7fc79610bdc5/docs/images/nf-core-fetchngs_logo_light.png -------------------------------------------------------------------------------- /docs/images/nf-core-fetchngs_metro_map_grey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nf-core/fetchngs/8ec2d934f9301c818d961b1e4fdf7fc79610bdc5/docs/images/nf-core-fetchngs_metro_map_grey.png -------------------------------------------------------------------------------- /docs/output.md: -------------------------------------------------------------------------------- 1 | # nf-core/fetchngs: Output 2 | 3 | ## Introduction 4 | 5 | This document describes the output produced by the pipeline. The directories listed below will be created in the results directory after the pipeline has finished. All paths are relative to the top-level results directory. 6 | 7 | ## Pipeline overview 8 | 9 | The pipeline is built using [Nextflow](https://www.nextflow.io/) and processes data depending on the type of ids provided: 10 | 11 | - Download FastQ files and create samplesheet from [SRA / ENA / DDBJ / GEO ids](#sra--ena--ddbj--geo-ids) 12 | - [Pipeline information](#pipeline-information) - Report metrics generated during the workflow execution 13 | 14 | Please see the [usage documentation](https://nf-co.re/fetchngs/usage#introduction) for a list of supported public repository identifiers and how to provide them to the pipeline. 15 | 16 | ### SRA / ENA / DDBJ / GEO ids 17 | 18 |
19 | Output files 20 | 21 | - `fastq/` 22 | - `*.fastq.gz`: Paired-end/single-end reads downloaded from the SRA / ENA / DDBJ / GEO. 23 | - `fastq/md5/` 24 | - `*.md5`: Files containing `md5` sum for FastQ files downloaded from the ENA. 25 | - `samplesheet/` 26 | - `samplesheet.csv`: Auto-created samplesheet with collated metadata and paths to downloaded FastQ files. 27 | - `id_mappings.csv`: File with selected fields that can be used to rename samples to more informative names; see [`--sample_mapping_fields`](https://nf-co.re/fetchngs/parameters#sample_mapping_fields) parameter to customise this behaviour. 28 | - `multiqc_config.yml`: [MultiQC](https://multiqc.info/docs/#bulk-sample-renaming) config file that can be passed to most nf-core pipelines via the `--multiqc_config` parameter for bulk renaming of sample names from database ids; [`--sample_mapping_fields`](https://nf-co.re/fetchngs/parameters#sample_mapping_fields) parameter to customise this behaviour. 29 | - `metadata/` 30 | - `*.runinfo_ftp.tsv`: Re-formatted metadata file downloaded from the ENA. 31 | - `*.runinfo.tsv`: Original metadata file downloaded from the ENA. 32 | 33 |
34 | 35 | The final sample information for all identifiers is obtained from the ENA which provides direct download links for FastQ files as well as their associated md5 sums. If download links exist, the files will be downloaded in parallel by FTP. Otherwise they are downloaded using sra-tools. 36 | 37 | ### Pipeline information 38 | 39 |
40 | Output files 41 | 42 | - `pipeline_info/` 43 | - Reports generated by Nextflow: `execution_report.html`, `execution_timeline.html`, `execution_trace.txt` and `pipeline_dag.dot`/`pipeline_dag.svg`. 44 | - Reports generated by the pipeline: `pipeline_report.html`, `pipeline_report.txt` and `software_versions.yml`. The `pipeline_report*` files will only be present if the `--email` / `--email_on_fail` parameter's are used when running the pipeline. 45 | - Parameters used by the pipeline run: `params.json`. 46 | 47 |
48 | 49 | [Nextflow](https://www.nextflow.io/docs/latest/tracing.html) provides excellent functionality for generating various reports relevant to the running and execution of the pipeline. This will allow you to troubleshoot errors with the running of the pipeline, and also provide you with other information such as launch commands, run times and resource usage. 50 | -------------------------------------------------------------------------------- /main.nf: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env nextflow 2 | /* 3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 4 | nf-core/fetchngs 5 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 6 | Github : https://github.com/nf-core/fetchngs 7 | Website: https://nf-co.re/fetchngs 8 | Slack : https://nfcore.slack.com/channels/fetchngs 9 | ---------------------------------------------------------------------------------------- 10 | */ 11 | 12 | nextflow.enable.dsl = 2 13 | 14 | /* 15 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 16 | IMPORT FUNCTIONS / MODULES / SUBWORKFLOWS / WORKFLOWS 17 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 18 | */ 19 | 20 | include { SRA } from './workflows/sra' 21 | include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_fetchngs_pipeline' 22 | include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_fetchngs_pipeline' 23 | 24 | /* 25 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 26 | NAMED WORKFLOWS FOR PIPELINE 27 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 28 | */ 29 | 30 | // 31 | // WORKFLOW: Run main nf-core/fetchngs analysis pipeline depending on type of identifier provided 32 | // 33 | workflow NFCORE_FETCHNGS { 34 | 35 | take: 36 | ids // channel: database ids read in from --input 37 | 38 | main: 39 | 40 | // 41 | // WORKFLOW: Download FastQ files for SRA / ENA / GEO / DDBJ ids 42 | // 43 | SRA ( ids ) 44 | 45 | } 46 | 47 | /* 48 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 49 | RUN MAIN WORKFLOW 50 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 51 | */ 52 | 53 | workflow { 54 | 55 | // 56 | // SUBWORKFLOW: Run initialisation tasks 57 | // 58 | PIPELINE_INITIALISATION ( 59 | params.version, 60 | params.help, 61 | params.validate_params, 62 | params.monochrome_logs, 63 | args, 64 | params.outdir, 65 | params.input, 66 | params.ena_metadata_fields 67 | ) 68 | 69 | // 70 | // WORKFLOW: Run primary workflows for the pipeline 71 | // 72 | NFCORE_FETCHNGS ( 73 | PIPELINE_INITIALISATION.out.ids 74 | ) 75 | 76 | // 77 | // SUBWORKFLOW: Run completion tasks 78 | // 79 | PIPELINE_COMPLETION ( 80 | params.email, 81 | params.email_on_fail, 82 | params.plaintext_email, 83 | params.outdir, 84 | params.monochrome_logs, 85 | params.hook_url 86 | ) 87 | } 88 | 89 | /* 90 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 91 | THE END 92 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 93 | */ 94 | -------------------------------------------------------------------------------- /modules.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nf-core/fetchngs", 3 | "homePage": "https://github.com/nf-core/fetchngs", 4 | "repos": { 5 | "https://github.com/nf-core/modules.git": { 6 | "modules": { 7 | "nf-core": { 8 | "custom/sratoolsncbisettings": { 9 | "branch": "master", 10 | "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", 11 | "installed_by": ["fastq_download_prefetch_fasterqdump_sratools"] 12 | }, 13 | "sratools/fasterqdump": { 14 | "branch": "master", 15 | "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", 16 | "installed_by": ["fastq_download_prefetch_fasterqdump_sratools"], 17 | "patch": "modules/nf-core/sratools/fasterqdump/sratools-fasterqdump.diff" 18 | }, 19 | "sratools/prefetch": { 20 | "branch": "master", 21 | "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", 22 | "installed_by": ["fastq_download_prefetch_fasterqdump_sratools"] 23 | }, 24 | "untar": { 25 | "branch": "master", 26 | "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", 27 | "installed_by": ["modules"] 28 | } 29 | } 30 | }, 31 | "subworkflows": { 32 | "nf-core": { 33 | "fastq_download_prefetch_fasterqdump_sratools": { 34 | "branch": "master", 35 | "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", 36 | "installed_by": ["subworkflows"] 37 | }, 38 | "utils_nextflow_pipeline": { 39 | "branch": "master", 40 | "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", 41 | "installed_by": ["subworkflows"] 42 | }, 43 | "utils_nfcore_pipeline": { 44 | "branch": "master", 45 | "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", 46 | "installed_by": ["subworkflows"] 47 | }, 48 | "utils_nfvalidation_plugin": { 49 | "branch": "master", 50 | "git_sha": "5caf7640a9ef1d18d765d55339be751bb0969dfa", 51 | "installed_by": ["subworkflows"] 52 | } 53 | } 54 | } 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /modules/local/aspera_cli/environment.yml: -------------------------------------------------------------------------------- 1 | name: aspera_cli 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::aspera-cli=4.14.0 8 | -------------------------------------------------------------------------------- /modules/local/aspera_cli/main.nf: -------------------------------------------------------------------------------- 1 | process ASPERA_CLI { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/aspera-cli:4.14.0--hdfd78af_1' : 8 | 'biocontainers/aspera-cli:4.14.0--hdfd78af_1' }" 9 | 10 | input: 11 | tuple val(meta), val(fastq) 12 | val user 13 | 14 | output: 15 | tuple val(meta), path("*fastq.gz"), emit: fastq 16 | tuple val(meta), path("*md5") , emit: md5 17 | path "versions.yml" , emit: versions 18 | 19 | script: 20 | def args = task.ext.args ?: '' 21 | def conda_prefix = ['singularity', 'apptainer'].contains(workflow.containerEngine) ? "export CONDA_PREFIX=/usr/local" : "" 22 | if (meta.single_end) { 23 | """ 24 | $conda_prefix 25 | 26 | ascp \\ 27 | $args \\ 28 | -i \$CONDA_PREFIX/etc/aspera/aspera_bypass_dsa.pem \\ 29 | ${user}@${fastq[0]} \\ 30 | ${meta.id}.fastq.gz 31 | 32 | echo "${meta.md5_1} ${meta.id}.fastq.gz" > ${meta.id}.fastq.gz.md5 33 | md5sum -c ${meta.id}.fastq.gz.md5 34 | 35 | cat <<-END_VERSIONS > versions.yml 36 | "${task.process}": 37 | aspera_cli: \$(ascli --version) 38 | END_VERSIONS 39 | """ 40 | } else { 41 | """ 42 | $conda_prefix 43 | 44 | ascp \\ 45 | $args \\ 46 | -i \$CONDA_PREFIX/etc/aspera/aspera_bypass_dsa.pem \\ 47 | ${user}@${fastq[0]} \\ 48 | ${meta.id}_1.fastq.gz 49 | 50 | echo "${meta.md5_1} ${meta.id}_1.fastq.gz" > ${meta.id}_1.fastq.gz.md5 51 | md5sum -c ${meta.id}_1.fastq.gz.md5 52 | 53 | ascp \\ 54 | $args \\ 55 | -i \$CONDA_PREFIX/etc/aspera/aspera_bypass_dsa.pem \\ 56 | ${user}@${fastq[1]} \\ 57 | ${meta.id}_2.fastq.gz 58 | 59 | echo "${meta.md5_2} ${meta.id}_2.fastq.gz" > ${meta.id}_2.fastq.gz.md5 60 | md5sum -c ${meta.id}_2.fastq.gz.md5 61 | 62 | cat <<-END_VERSIONS > versions.yml 63 | "${task.process}": 64 | aspera_cli: \$(ascli --version) 65 | END_VERSIONS 66 | """ 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /modules/local/aspera_cli/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'ASPERA_CLI' { 3 | ext.args = '-QT -l 300m -P33001' 4 | publishDir = [ 5 | [ 6 | path: { "${params.outdir}/fastq" }, 7 | mode: params.publish_dir_mode, 8 | pattern: "*.fastq.gz" 9 | ], 10 | [ 11 | path: { "${params.outdir}/fastq/md5" }, 12 | mode: params.publish_dir_mode, 13 | pattern: "*.md5" 14 | ] 15 | ] 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /modules/local/aspera_cli/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test process: ASPERA_CLI" 4 | script "../main.nf" 5 | process "ASPERA_CLI" 6 | 7 | test("Should run without failures") { 8 | 9 | when { 10 | process { 11 | """ 12 | input[0] = [ 13 | [ id:'SRX9626017_SRR13191702', single_end:false, md5_1: '89c5be920021a035084d8aeb74f32df7', md5_2: '56271be38a80db78ef3bdfc5d9909b98' ], // meta map 14 | [ 15 | 'fasp.sra.ebi.ac.uk:/vol1/fastq/SRR131/002/SRR13191702/SRR13191702_1.fastq.gz', 16 | 'fasp.sra.ebi.ac.uk:/vol1/fastq/SRR131/002/SRR13191702/SRR13191702_2.fastq.gz' 17 | ] 18 | ] 19 | input[1] = 'era-fasp' 20 | """ 21 | } 22 | } 23 | 24 | then { 25 | assertAll( 26 | { assert process.success }, 27 | { assert snapshot(process.out).match() } 28 | ) 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /modules/local/aspera_cli/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "SRX9626017_SRR13191702", 9 | "single_end": false, 10 | "md5_1": "89c5be920021a035084d8aeb74f32df7", 11 | "md5_2": "56271be38a80db78ef3bdfc5d9909b98" 12 | }, 13 | [ 14 | "SRX9626017_SRR13191702_1.fastq.gz:md5,baaaea61cba4294ec696fdfea1610848", 15 | "SRX9626017_SRR13191702_2.fastq.gz:md5,8e43ad99049fabb6526a4b846da01c32" 16 | ] 17 | ] 18 | ], 19 | "1": [ 20 | [ 21 | { 22 | "id": "SRX9626017_SRR13191702", 23 | "single_end": false, 24 | "md5_1": "89c5be920021a035084d8aeb74f32df7", 25 | "md5_2": "56271be38a80db78ef3bdfc5d9909b98" 26 | }, 27 | [ 28 | "SRX9626017_SRR13191702_1.fastq.gz.md5:md5,055a6916ec9ee478e453d50651f87997", 29 | "SRX9626017_SRR13191702_2.fastq.gz.md5:md5,c30ac785f8d80ec563fabf604d8bf945" 30 | ] 31 | ] 32 | ], 33 | "2": [ 34 | "versions.yml:md5,a51a1dfc6308d71058ddc12c46101dd3" 35 | ], 36 | "fastq": [ 37 | [ 38 | { 39 | "id": "SRX9626017_SRR13191702", 40 | "single_end": false, 41 | "md5_1": "89c5be920021a035084d8aeb74f32df7", 42 | "md5_2": "56271be38a80db78ef3bdfc5d9909b98" 43 | }, 44 | [ 45 | "SRX9626017_SRR13191702_1.fastq.gz:md5,baaaea61cba4294ec696fdfea1610848", 46 | "SRX9626017_SRR13191702_2.fastq.gz:md5,8e43ad99049fabb6526a4b846da01c32" 47 | ] 48 | ] 49 | ], 50 | "md5": [ 51 | [ 52 | { 53 | "id": "SRX9626017_SRR13191702", 54 | "single_end": false, 55 | "md5_1": "89c5be920021a035084d8aeb74f32df7", 56 | "md5_2": "56271be38a80db78ef3bdfc5d9909b98" 57 | }, 58 | [ 59 | "SRX9626017_SRR13191702_1.fastq.gz.md5:md5,055a6916ec9ee478e453d50651f87997", 60 | "SRX9626017_SRR13191702_2.fastq.gz.md5:md5,c30ac785f8d80ec563fabf604d8bf945" 61 | ] 62 | ] 63 | ], 64 | "versions": [ 65 | "versions.yml:md5,a51a1dfc6308d71058ddc12c46101dd3" 66 | ] 67 | } 68 | ], 69 | "meta": { 70 | "nf-test": "0.8.4", 71 | "nextflow": "23.10.1" 72 | }, 73 | "timestamp": "2024-02-28T11:52:00.601018" 74 | } 75 | } -------------------------------------------------------------------------------- /modules/local/multiqc_mappings_config/main.nf: -------------------------------------------------------------------------------- 1 | 2 | process MULTIQC_MAPPINGS_CONFIG { 3 | 4 | conda "conda-forge::python=3.9.5" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/python:3.9--1' : 7 | 'biocontainers/python:3.9--1' }" 8 | 9 | input: 10 | path csv 11 | 12 | output: 13 | path "*yml" , emit: yml 14 | path "versions.yml", emit: versions 15 | 16 | script: 17 | """ 18 | multiqc_mappings_config.py \\ 19 | $csv \\ 20 | multiqc_config.yml 21 | 22 | cat <<-END_VERSIONS > versions.yml 23 | "${task.process}": 24 | python: \$(python --version | sed 's/Python //g') 25 | END_VERSIONS 26 | """ 27 | } 28 | -------------------------------------------------------------------------------- /modules/local/multiqc_mappings_config/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'MULTIQC_MAPPINGS_CONFIG' { 3 | publishDir = [ 4 | path: { "${params.outdir}/samplesheet" }, 5 | mode: params.publish_dir_mode, 6 | saveAs: { filename -> filename.equals('versions.yml') ? null : filename } 7 | ] 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /modules/local/multiqc_mappings_config/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test process: MULTIQC_MAPPINGS_CONFIG" 4 | script "../main.nf" 5 | process "MULTIQC_MAPPINGS_CONFIG" 6 | 7 | test("Should run without failures") { 8 | 9 | when { 10 | process { 11 | """ 12 | input[0] = file(params.pipelines_testdata_base_path + 'csv/SRX9626017_SRR13191702.mappings.csv', checkIfExists: true) 13 | """ 14 | } 15 | } 16 | 17 | then { 18 | assertAll( 19 | { assert process.success }, 20 | { assert snapshot(process.out).match() } 21 | ) 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /modules/local/multiqc_mappings_config/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | "multiqc_config.yml:md5,7f3cb10fff83ba9eb3e8fa6862d1290a", 8 | "versions.yml:md5,dd4c66f0551d15510b36bb2e2b2fdd73" 9 | ] 10 | ], 11 | "1": [ 12 | "versions.yml:md5,dd4c66f0551d15510b36bb2e2b2fdd73" 13 | ], 14 | "versions": [ 15 | "versions.yml:md5,dd4c66f0551d15510b36bb2e2b2fdd73" 16 | ], 17 | "yml": [ 18 | [ 19 | "multiqc_config.yml:md5,7f3cb10fff83ba9eb3e8fa6862d1290a", 20 | "versions.yml:md5,dd4c66f0551d15510b36bb2e2b2fdd73" 21 | ] 22 | ] 23 | } 24 | ], 25 | "meta": { 26 | "nf-test": "0.8.4", 27 | "nextflow": "23.10.1" 28 | }, 29 | "timestamp": "2024-02-28T11:52:12.65888" 30 | } 31 | } -------------------------------------------------------------------------------- /modules/local/sra_fastq_ftp/main.nf: -------------------------------------------------------------------------------- 1 | 2 | process SRA_FASTQ_FTP { 3 | tag "$meta.id" 4 | label 'process_low' 5 | label 'error_retry' 6 | 7 | conda "conda-forge::wget=1.20.1" 8 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 9 | 'https://depot.galaxyproject.org/singularity/wget:1.20.1' : 10 | 'biocontainers/wget:1.20.1' }" 11 | 12 | input: 13 | tuple val(meta), val(fastq) 14 | 15 | output: 16 | tuple val(meta), path("*fastq.gz"), emit: fastq 17 | tuple val(meta), path("*md5") , emit: md5 18 | path "versions.yml" , emit: versions 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | if (meta.single_end) { 23 | """ 24 | wget \\ 25 | $args \\ 26 | -O ${meta.id}.fastq.gz \\ 27 | ${fastq[0]} 28 | 29 | echo "${meta.md5_1} ${meta.id}.fastq.gz" > ${meta.id}.fastq.gz.md5 30 | md5sum -c ${meta.id}.fastq.gz.md5 31 | 32 | cat <<-END_VERSIONS > versions.yml 33 | "${task.process}": 34 | wget: \$(echo \$(wget --version | head -n 1 | sed 's/^GNU Wget //; s/ .*\$//')) 35 | END_VERSIONS 36 | """ 37 | } else { 38 | """ 39 | wget \\ 40 | $args \\ 41 | -O ${meta.id}_1.fastq.gz \\ 42 | ${fastq[0]} 43 | 44 | echo "${meta.md5_1} ${meta.id}_1.fastq.gz" > ${meta.id}_1.fastq.gz.md5 45 | md5sum -c ${meta.id}_1.fastq.gz.md5 46 | 47 | wget \\ 48 | $args \\ 49 | -O ${meta.id}_2.fastq.gz \\ 50 | ${fastq[1]} 51 | 52 | echo "${meta.md5_2} ${meta.id}_2.fastq.gz" > ${meta.id}_2.fastq.gz.md5 53 | md5sum -c ${meta.id}_2.fastq.gz.md5 54 | 55 | cat <<-END_VERSIONS > versions.yml 56 | "${task.process}": 57 | wget: \$(echo \$(wget --version | head -n 1 | sed 's/^GNU Wget //; s/ .*\$//')) 58 | END_VERSIONS 59 | """ 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /modules/local/sra_fastq_ftp/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'SRA_FASTQ_FTP' { 3 | ext.args = '-t 5 -nv -c -T 60' 4 | publishDir = [ 5 | [ 6 | path: { "${params.outdir}/fastq" }, 7 | mode: params.publish_dir_mode, 8 | pattern: "*.fastq.gz" 9 | ], 10 | [ 11 | path: { "${params.outdir}/fastq/md5" }, 12 | mode: params.publish_dir_mode, 13 | pattern: "*.md5" 14 | ] 15 | ] 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /modules/local/sra_fastq_ftp/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test process: SRA_FASTQ_FTP" 4 | script "../main.nf" 5 | process "SRA_FASTQ_FTP" 6 | 7 | test("Should run without failures") { 8 | 9 | when { 10 | process { 11 | """ 12 | input[0] = [ 13 | [ id:'SRX9626017_SRR13191702', single_end:false, md5_1: '89c5be920021a035084d8aeb74f32df7', md5_2: '56271be38a80db78ef3bdfc5d9909b98' ], // meta map 14 | [ 15 | 'ftp.sra.ebi.ac.uk/vol1/fastq/SRR131/002/SRR13191702/SRR13191702_1.fastq.gz', 16 | 'ftp.sra.ebi.ac.uk/vol1/fastq/SRR131/002/SRR13191702/SRR13191702_2.fastq.gz' 17 | ] 18 | ] 19 | """ 20 | } 21 | } 22 | 23 | then { 24 | assertAll( 25 | { assert process.success }, 26 | { assert snapshot(process.out).match() } 27 | ) 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /modules/local/sra_fastq_ftp/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "SRX9626017_SRR13191702", 9 | "single_end": false, 10 | "md5_1": "89c5be920021a035084d8aeb74f32df7", 11 | "md5_2": "56271be38a80db78ef3bdfc5d9909b98" 12 | }, 13 | [ 14 | "SRX9626017_SRR13191702_1.fastq.gz:md5,baaaea61cba4294ec696fdfea1610848", 15 | "SRX9626017_SRR13191702_2.fastq.gz:md5,8e43ad99049fabb6526a4b846da01c32" 16 | ] 17 | ] 18 | ], 19 | "1": [ 20 | [ 21 | { 22 | "id": "SRX9626017_SRR13191702", 23 | "single_end": false, 24 | "md5_1": "89c5be920021a035084d8aeb74f32df7", 25 | "md5_2": "56271be38a80db78ef3bdfc5d9909b98" 26 | }, 27 | [ 28 | "SRX9626017_SRR13191702_1.fastq.gz.md5:md5,055a6916ec9ee478e453d50651f87997", 29 | "SRX9626017_SRR13191702_2.fastq.gz.md5:md5,c30ac785f8d80ec563fabf604d8bf945" 30 | ] 31 | ] 32 | ], 33 | "2": [ 34 | "versions.yml:md5,6b60ed6d5805271a1b97798e29c0635c" 35 | ], 36 | "fastq": [ 37 | [ 38 | { 39 | "id": "SRX9626017_SRR13191702", 40 | "single_end": false, 41 | "md5_1": "89c5be920021a035084d8aeb74f32df7", 42 | "md5_2": "56271be38a80db78ef3bdfc5d9909b98" 43 | }, 44 | [ 45 | "SRX9626017_SRR13191702_1.fastq.gz:md5,baaaea61cba4294ec696fdfea1610848", 46 | "SRX9626017_SRR13191702_2.fastq.gz:md5,8e43ad99049fabb6526a4b846da01c32" 47 | ] 48 | ] 49 | ], 50 | "md5": [ 51 | [ 52 | { 53 | "id": "SRX9626017_SRR13191702", 54 | "single_end": false, 55 | "md5_1": "89c5be920021a035084d8aeb74f32df7", 56 | "md5_2": "56271be38a80db78ef3bdfc5d9909b98" 57 | }, 58 | [ 59 | "SRX9626017_SRR13191702_1.fastq.gz.md5:md5,055a6916ec9ee478e453d50651f87997", 60 | "SRX9626017_SRR13191702_2.fastq.gz.md5:md5,c30ac785f8d80ec563fabf604d8bf945" 61 | ] 62 | ] 63 | ], 64 | "versions": [ 65 | "versions.yml:md5,6b60ed6d5805271a1b97798e29c0635c" 66 | ] 67 | } 68 | ], 69 | "meta": { 70 | "nf-test": "0.8.4", 71 | "nextflow": "23.10.1" 72 | }, 73 | "timestamp": "2024-02-28T11:51:51.301654" 74 | } 75 | } -------------------------------------------------------------------------------- /modules/local/sra_ids_to_runinfo/main.nf: -------------------------------------------------------------------------------- 1 | 2 | process SRA_IDS_TO_RUNINFO { 3 | tag "$id" 4 | label 'error_retry' 5 | 6 | conda "conda-forge::python=3.9.5" 7 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 8 | 'https://depot.galaxyproject.org/singularity/python:3.9--1' : 9 | 'biocontainers/python:3.9--1' }" 10 | 11 | input: 12 | val id 13 | val fields 14 | 15 | output: 16 | path "*.tsv" , emit: tsv 17 | path "versions.yml", emit: versions 18 | 19 | script: 20 | def metadata_fields = fields ? "--ena_metadata_fields ${fields}" : '' 21 | """ 22 | echo $id > id.txt 23 | sra_ids_to_runinfo.py \\ 24 | id.txt \\ 25 | ${id}.runinfo.tsv \\ 26 | $metadata_fields 27 | 28 | cat <<-END_VERSIONS > versions.yml 29 | "${task.process}": 30 | python: \$(python --version | sed 's/Python //g') 31 | END_VERSIONS 32 | """ 33 | } 34 | -------------------------------------------------------------------------------- /modules/local/sra_ids_to_runinfo/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'SRA_IDS_TO_RUNINFO' { 3 | publishDir = [ 4 | path: { "${params.outdir}/metadata" }, 5 | enabled: false 6 | ] 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /modules/local/sra_ids_to_runinfo/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test process: SRA_IDS_TO_RUNINFO" 4 | script "../main.nf" 5 | process "SRA_IDS_TO_RUNINFO" 6 | 7 | test("Should run without failures") { 8 | 9 | when { 10 | process { 11 | """ 12 | input[0] = 'SRR13191702' 13 | input[1] = '' 14 | """ 15 | } 16 | } 17 | 18 | then { 19 | assertAll( 20 | { assert process.success }, 21 | { assert snapshot(process.out).match() } 22 | ) 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /modules/local/sra_ids_to_runinfo/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | "SRR13191702.runinfo.tsv:md5,3a1be35781ca6e8a28d8fd4d2f3bbe85" 7 | ], 8 | "1": [ 9 | "versions.yml:md5,1c14442e9b494b586eafe41e77300fae" 10 | ], 11 | "tsv": [ 12 | "SRR13191702.runinfo.tsv:md5,3a1be35781ca6e8a28d8fd4d2f3bbe85" 13 | ], 14 | "versions": [ 15 | "versions.yml:md5,1c14442e9b494b586eafe41e77300fae" 16 | ] 17 | } 18 | ], 19 | "meta": { 20 | "nf-test": "0.8.4", 21 | "nextflow": "23.10.1" 22 | }, 23 | "timestamp": "2024-02-28T11:52:05.345153" 24 | } 25 | } -------------------------------------------------------------------------------- /modules/local/sra_runinfo_to_ftp/main.nf: -------------------------------------------------------------------------------- 1 | 2 | process SRA_RUNINFO_TO_FTP { 3 | 4 | conda "conda-forge::python=3.9.5" 5 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 6 | 'https://depot.galaxyproject.org/singularity/python:3.9--1' : 7 | 'biocontainers/python:3.9--1' }" 8 | 9 | input: 10 | path runinfo 11 | 12 | output: 13 | path "*.tsv" , emit: tsv 14 | path "versions.yml", emit: versions 15 | 16 | script: 17 | """ 18 | sra_runinfo_to_ftp.py \\ 19 | ${runinfo.join(',')} \\ 20 | ${runinfo.toString().tokenize(".")[0]}.runinfo_ftp.tsv 21 | 22 | cat <<-END_VERSIONS > versions.yml 23 | "${task.process}": 24 | python: \$(python --version | sed 's/Python //g') 25 | END_VERSIONS 26 | """ 27 | } 28 | -------------------------------------------------------------------------------- /modules/local/sra_runinfo_to_ftp/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: 'SRA_RUNINFO_TO_FTP' { 3 | publishDir = [ 4 | path: { "${params.outdir}/metadata" }, 5 | mode: params.publish_dir_mode, 6 | saveAs: { filename -> filename.equals('versions.yml') ? null : filename } 7 | ] 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /modules/local/sra_runinfo_to_ftp/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test process: SRA_RUNINFO_TO_FTP" 4 | script "../main.nf" 5 | process "SRA_RUNINFO_TO_FTP" 6 | 7 | test("Should run without failures") { 8 | 9 | when { 10 | process { 11 | """ 12 | input[0] = file(params.pipelines_testdata_base_path + 'tsv/SRR13191702.runinfo.tsv', checkIfExists: true) 13 | """ 14 | } 15 | } 16 | 17 | then { 18 | assertAll( 19 | { assert process.success }, 20 | { assert snapshot(process.out).match() } 21 | ) 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /modules/local/sra_runinfo_to_ftp/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | "SRR13191702.runinfo_ftp.tsv:md5,94378c448c044b3e20e5c54e442ab62e" 7 | ], 8 | "1": [ 9 | "versions.yml:md5,e95f8185f665127a73622a19d321bcca" 10 | ], 11 | "tsv": [ 12 | "SRR13191702.runinfo_ftp.tsv:md5,94378c448c044b3e20e5c54e442ab62e" 13 | ], 14 | "versions": [ 15 | "versions.yml:md5,e95f8185f665127a73622a19d321bcca" 16 | ] 17 | } 18 | ], 19 | "meta": { 20 | "nf-test": "0.8.4", 21 | "nextflow": "23.10.1" 22 | }, 23 | "timestamp": "2024-02-28T11:51:45.748227" 24 | } 25 | } -------------------------------------------------------------------------------- /modules/local/sra_to_samplesheet/main.nf: -------------------------------------------------------------------------------- 1 | 2 | process SRA_TO_SAMPLESHEET { 3 | tag "$meta.id" 4 | 5 | executor 'local' 6 | memory 100.MB 7 | 8 | input: 9 | val meta 10 | val pipeline 11 | val strandedness 12 | val mapping_fields 13 | 14 | output: 15 | tuple val(meta), path("*samplesheet.csv"), emit: samplesheet 16 | tuple val(meta), path("*mappings.csv") , emit: mappings 17 | 18 | exec: 19 | // 20 | // Create samplesheet containing metadata 21 | // 22 | 23 | // Remove custom keys needed to download the data 24 | def meta_clone = meta.clone() 25 | meta_clone.remove("id") 26 | meta_clone.remove("fastq_1") 27 | meta_clone.remove("fastq_2") 28 | meta_clone.remove("md5_1") 29 | meta_clone.remove("md5_2") 30 | meta_clone.remove("single_end") 31 | 32 | // Add relevant fields to the beginning of the map 33 | pipeline_map = [ 34 | sample : "${meta.id.split('_')[0..-2].join('_')}", 35 | fastq_1 : meta.fastq_1, 36 | fastq_2 : meta.fastq_2 37 | ] 38 | 39 | // Add nf-core pipeline specific entries 40 | if (pipeline) { 41 | if (pipeline == 'rnaseq') { 42 | pipeline_map << [ strandedness: strandedness ] 43 | } else if (pipeline == 'atacseq') { 44 | pipeline_map << [ replicate: 1 ] 45 | } else if (pipeline == 'taxprofiler') { 46 | pipeline_map << [ fasta: '' ] 47 | } 48 | } 49 | pipeline_map << meta_clone 50 | 51 | // Create a samplesheet 52 | samplesheet = pipeline_map.keySet().collect{ '"' + it + '"'}.join(",") + '\n' 53 | samplesheet += pipeline_map.values().collect{ '"' + it + '"'}.join(",") 54 | 55 | // Write samplesheet to file 56 | def samplesheet_file = task.workDir.resolve("${meta.id}.samplesheet.csv") 57 | samplesheet_file.text = samplesheet 58 | 59 | // 60 | // Create sample id mappings file 61 | // 62 | mappings_map = pipeline_map.clone() 63 | def fields = mapping_fields ? ['sample'] + mapping_fields.split(',').collect{ it.trim().toLowerCase() } : [] 64 | if ((mappings_map.keySet() + fields).unique().size() != mappings_map.keySet().size()) { 65 | error("Invalid option for '--sample_mapping_fields': ${mapping_fields}.\nValid options: ${mappings_map.keySet().join(', ')}") 66 | } 67 | 68 | // Create mappings 69 | mappings = fields.collect{ '"' + it + '"'}.join(",") + '\n' 70 | mappings += mappings_map.subMap(fields).values().collect{ '"' + it + '"'}.join(",") 71 | 72 | // Write mappings to file 73 | def mappings_file = task.workDir.resolve("${meta.id}.mappings.csv") 74 | mappings_file.text = mappings 75 | } 76 | -------------------------------------------------------------------------------- /modules/local/sra_to_samplesheet/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: SRA_TO_SAMPLESHEET { 3 | publishDir = [ 4 | path: { "${params.outdir}/samplesheet" }, 5 | enabled: false 6 | ] 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /modules/local/sra_to_samplesheet/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test process: SRA_TO_SAMPLESHEET" 4 | script "../main.nf" 5 | process "SRA_TO_SAMPLESHEET" 6 | 7 | test("Should run without failures") { 8 | 9 | when { 10 | process { 11 | """ 12 | input[0] = [id:'ERX1188904_ERR1109373', run_accession:'ERR1109373', experiment_accession:'ERX1188904', sample_accession:'SAMEA3643867', experiment_alias:'ena-EXPERIMENT-CAM-03-11-2015-17:01:52:847-7', run_alias:'ena-RUN-CAM-03-11-2015-17:01:52:847-7', sample_alias:'sample_56', study_alias:'ena-STUDY-CAM-02-11-2015-17:42:24:189-13', library_layout:'PAIRED', experiment_title:'Illumina HiSeq 2500 paired end sequencing', sample_title:'RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome', sample_description:'RNA-Seq reads mapped onto L. Boulardi Toti-like virus genome', fastq_md5:'8d7d7b854d0207d1226477a30103fade;9fd57225d6c07a31843276d6df9b15c0;5a62e8f785687dce890cfb4fe3e607f9', fastq_ftp:'ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_1.fastq.gz;ftp.sra.ebi.ac.uk/vol1/fastq/ERR110/003/ERR1109373/ERR1109373_2.fastq.gz', fastq_1:'./results/fastq/ERX1188904_ERR1109373_1.fastq.gz', fastq_2:'./results/fastq/ERX1188904_ERR1109373_2.fastq.gz', md5_1:'9fd57225d6c07a31843276d6df9b15c0', md5_2:'5a62e8f785687dce890cfb4fe3e607f9', single_end:false] 13 | input[1] = 'rnaseq' 14 | input[2] = 'auto' 15 | input[3] = 'experiment_accession,run_accession,sample_accession,experiment_alias,run_alias,sample_alias,experiment_title,sample_title,sample_description' 16 | """ 17 | } 18 | } 19 | 20 | then { 21 | assertAll( 22 | { assert process.success }, 23 | { assert snapshot(process.out).match() } 24 | ) 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /modules/nf-core/custom/sratoolsncbisettings/environment.yml: -------------------------------------------------------------------------------- 1 | name: custom_sratoolsncbisettings 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::sra-tools=3.0.8 8 | -------------------------------------------------------------------------------- /modules/nf-core/custom/sratoolsncbisettings/main.nf: -------------------------------------------------------------------------------- 1 | process CUSTOM_SRATOOLSNCBISETTINGS { 2 | tag 'ncbi-settings' 3 | label 'process_low' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/sra-tools:3.0.8--h9f5acd7_0' : 8 | 'biocontainers/sra-tools:3.0.8--h9f5acd7_0' }" 9 | 10 | input: 11 | val ids 12 | 13 | output: 14 | path('*.mkfg') , emit: ncbi_settings 15 | path 'versions.yml', emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | shell: 21 | config = "/LIBS/GUID = \"${UUID.randomUUID().toString()}\"\\n/libs/cloud/report_instance_identity = \"true\"\\n" 22 | template 'detect_ncbi_settings.sh' 23 | } 24 | -------------------------------------------------------------------------------- /modules/nf-core/custom/sratoolsncbisettings/meta.yml: -------------------------------------------------------------------------------- 1 | name: "custom_sratoolsncbisettings" 2 | description: Test for the presence of suitable NCBI settings or create them on the fly. 3 | keywords: 4 | - NCBI 5 | - settings 6 | - sra-tools 7 | - prefetch 8 | - fasterq-dump 9 | tools: 10 | - "sratools": 11 | description: "SRA Toolkit and SDK from NCBI" 12 | homepage: https://github.com/ncbi/sra-tools 13 | documentation: https://github.com/ncbi/sra-tools/wiki 14 | tool_dev_url: https://github.com/ncbi/sra-tools 15 | licence: ["Public Domain"] 16 | output: 17 | - versions: 18 | type: file 19 | description: File containing software versions 20 | pattern: "versions.yml" 21 | - ncbi_settings: 22 | type: file 23 | description: An NCBI user settings file. 24 | pattern: "*.mkfg" 25 | authors: 26 | - "@Midnighter" 27 | maintainers: 28 | - "@Midnighter" 29 | -------------------------------------------------------------------------------- /modules/nf-core/custom/sratoolsncbisettings/templates/detect_ncbi_settings.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -u 4 | 5 | 6 | # Get the expected NCBI settings path and define the environment variable 7 | # `NCBI_SETTINGS`. 8 | eval "$(vdb-config -o n NCBI_SETTINGS | sed 's/[" ]//g')" 9 | 10 | # If the user settings do not exist yet, create a file suitable for `prefetch` 11 | # and `fasterq-dump`. If an existing settings file does not contain the required 12 | # values, error out with a helpful message. 13 | if [[ ! -f "${NCBI_SETTINGS}" ]]; then 14 | printf '!{config}' > 'user-settings.mkfg' 15 | else 16 | prefetch --help &> /dev/null 17 | if [[ $? = 78 ]]; then 18 | echo "You have an existing vdb-config at '${NCBI_SETTINGS}' but it is"\ 19 | "missing the required entries for /LIBS/GUID and"\ 20 | "/libs/cloud/report_instance_identity."\ 21 | "Feel free to add the following to your settings file:" >&2 22 | echo "$(printf '!{config}')" >&2 23 | exit 1 24 | fi 25 | fasterq-dump --help &> /dev/null 26 | if [[ $? = 78 ]]; then 27 | echo "You have an existing vdb-config at '${NCBI_SETTINGS}' but it is"\ 28 | "missing the required entries for /LIBS/GUID and"\ 29 | "/libs/cloud/report_instance_identity."\ 30 | "Feel free to add the following to your settings file:" >&2 31 | echo "$(printf '!{config}')" >&2 32 | exit 1 33 | fi 34 | if [[ "${NCBI_SETTINGS}" != *.mkfg ]]; then 35 | echo "The detected settings '${NCBI_SETTINGS}' do not have the required"\ 36 | "file extension '.mkfg'." >&2 37 | exit 1 38 | fi 39 | cp "${NCBI_SETTINGS}" ./ 40 | fi 41 | 42 | cat <<-END_VERSIONS > versions.yml 43 | "!{task.process}": 44 | sratools: $(vdb-config --version 2>&1 | grep -Eo '[0-9.]+') 45 | END_VERSIONS 46 | -------------------------------------------------------------------------------- /modules/nf-core/custom/sratoolsncbisettings/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process CUSTOM_SRATOOLSNCBISETTINGS" 4 | script "../main.nf" 5 | process "CUSTOM_SRATOOLSNCBISETTINGS" 6 | config "modules/nf-core/custom/sratoolsncbisettings/tests/nextflow.config" 7 | 8 | test("Should run without failures") { 9 | 10 | when { 11 | params { 12 | settings_path = '/tmp/.ncbi' 13 | settings_file = "${params.settings_path}/user-settings.mkfg" 14 | } 15 | 16 | process { 17 | """ 18 | input[0] = ["SRX6725035"] 19 | file(params.settings_path).mkdirs() 20 | def settings = file(params.modules_testdata_base_path + 'generic/config/ncbi_user_settings.mkfg', checkIfExists: true) 21 | settings.copyTo(params.settings_file) 22 | """ 23 | } 24 | } 25 | 26 | then { 27 | assert process.success 28 | assert snapshot( 29 | process.out.versions 30 | ).match() 31 | 32 | with(process.out.ncbi_settings) { 33 | assert path(get(0)).readLines().any { it.contains('/LIBS/GUID') } 34 | assert path(get(0)).readLines().any { it.contains('/libs/cloud/report_instance_identity') } 35 | } 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /modules/nf-core/custom/sratoolsncbisettings/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | [ 5 | "versions.yml:md5,3d6ee88cce1ee517e198633f062589a8" 6 | ] 7 | ], 8 | "meta": { 9 | "nf-test": "0.8.4", 10 | "nextflow": "23.10.1" 11 | }, 12 | "timestamp": "2024-02-28T11:47:15.824443" 13 | } 14 | } -------------------------------------------------------------------------------- /modules/nf-core/custom/sratoolsncbisettings/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | params.settings_path = '/tmp/.ncbi' 2 | params.settings_file = "${params.settings_path}/user-settings.mkfg" 3 | 4 | env.NCBI_SETTINGS = params.settings_file 5 | 6 | process { 7 | withName: CUSTOM_SRATOOLSNCBISETTINGS { 8 | containerOptions = { 9 | (workflow.containerEngine == 'singularity') ? 10 | "-B ${params.settings_path}:${params.settings_path}" : 11 | "-v ${params.settings_path}:${params.settings_path}" 12 | } 13 | } 14 | } -------------------------------------------------------------------------------- /modules/nf-core/sratools/fasterqdump/environment.yml: -------------------------------------------------------------------------------- 1 | name: sratools_fasterqdump 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::sra-tools=2.11.0 8 | - conda-forge::pigz=2.6 9 | -------------------------------------------------------------------------------- /modules/nf-core/sratools/fasterqdump/main.nf: -------------------------------------------------------------------------------- 1 | process SRATOOLS_FASTERQDUMP { 2 | tag "$meta.id" 3 | label 'process_medium' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' : 8 | 'quay.io/biocontainers/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' }" 9 | 10 | input: 11 | tuple val(meta), path(sra) 12 | path ncbi_settings 13 | path certificate 14 | 15 | output: 16 | tuple val(meta), path('*.fastq.gz'), emit: reads 17 | path "versions.yml" , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | script: 23 | def args = task.ext.args ?: '' 24 | def args2 = task.ext.args2 ?: '' 25 | def prefix = task.ext.prefix ?: "${meta.id}" 26 | def outfile = meta.single_end ? "${prefix}.fastq" : prefix 27 | def key_file = '' 28 | if (certificate.toString().endsWith('.jwt')) { 29 | key_file += " --perm ${certificate}" 30 | } else if (certificate.toString().endsWith('.ngc')) { 31 | key_file += " --ngc ${certificate}" 32 | } 33 | """ 34 | export NCBI_SETTINGS="\$PWD/${ncbi_settings}" 35 | 36 | fasterq-dump \\ 37 | $args \\ 38 | --threads $task.cpus \\ 39 | --outfile $outfile \\ 40 | ${key_file} \\ 41 | ${sra} 42 | 43 | pigz \\ 44 | $args2 \\ 45 | --no-name \\ 46 | --processes $task.cpus \\ 47 | *.fastq 48 | 49 | cat <<-END_VERSIONS > versions.yml 50 | "${task.process}": 51 | sratools: \$(fasterq-dump --version 2>&1 | grep -Eo '[0-9.]+') 52 | pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) 53 | END_VERSIONS 54 | """ 55 | } 56 | -------------------------------------------------------------------------------- /modules/nf-core/sratools/fasterqdump/meta.yml: -------------------------------------------------------------------------------- 1 | name: sratools_fasterqdump 2 | description: Extract sequencing reads in FASTQ format from a given NCBI Sequence Read Archive (SRA). 3 | keywords: 4 | - sequencing 5 | - FASTQ 6 | - dump 7 | tools: 8 | - sratools: 9 | description: SRA Toolkit and SDK from NCBI 10 | homepage: https://github.com/ncbi/sra-tools 11 | documentation: https://github.com/ncbi/sra-tools/wiki 12 | tool_dev_url: https://github.com/ncbi/sra-tools 13 | licence: ["Public Domain"] 14 | input: 15 | - meta: 16 | type: map 17 | description: > 18 | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] 19 | 20 | - sra: 21 | type: directory 22 | description: Directory containing ETL data for the given SRA. 23 | pattern: "*/*.sra" 24 | - ncbi_settings: 25 | type: file 26 | description: > 27 | An NCBI user settings file. 28 | 29 | pattern: "*.mkfg" 30 | - certificate: 31 | type: file 32 | description: > 33 | Path to a JWT cart file used to access protected dbGAP data on SRA using the sra-toolkit 34 | 35 | pattern: "*.cart" 36 | output: 37 | - meta: 38 | type: map 39 | description: > 40 | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] 41 | 42 | - versions: 43 | type: file 44 | description: File containing software versions 45 | pattern: "versions.yml" 46 | - reads: 47 | type: file 48 | description: Extracted FASTQ file or files if the sequencing reads are paired-end. 49 | pattern: "*.fastq.gz" 50 | authors: 51 | - "@Midnighter" 52 | maintainers: 53 | - "@Midnighter" 54 | -------------------------------------------------------------------------------- /modules/nf-core/sratools/fasterqdump/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: SRATOOLS_FASTERQDUMP { 3 | ext.args = '--split-files --include-technical' 4 | publishDir = [ 5 | path: { "${params.outdir}/fastq" }, 6 | mode: params.publish_dir_mode, 7 | pattern: "*.fastq.gz" 8 | ] 9 | } 10 | } -------------------------------------------------------------------------------- /modules/nf-core/sratools/fasterqdump/sratools-fasterqdump.diff: -------------------------------------------------------------------------------- 1 | Changes in module 'nf-core/sratools/fasterqdump' 2 | --- modules/nf-core/sratools/fasterqdump/main.nf 3 | +++ modules/nf-core/sratools/fasterqdump/main.nf 4 | @@ -4,8 +4,8 @@ 5 | 6 | conda "${moduleDir}/environment.yml" 7 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 8 | - 'https://depot.galaxyproject.org/singularity/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:2f4a4c900edd6801ff0068c2b3048b4459d119eb-0' : 9 | - 'biocontainers/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:2f4a4c900edd6801ff0068c2b3048b4459d119eb-0' }" 10 | + 'https://depot.galaxyproject.org/singularity/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' : 11 | + 'quay.io/biocontainers/mulled-v2-5f89fe0cd045cb1d615630b9261a1d17943a9b6a:6a9ff0e76ec016c3d0d27e0c0d362339f2d787e6-0' }" 12 | 13 | input: 14 | tuple val(meta), path(sra) 15 | 16 | --- /dev/null 17 | +++ modules/nf-core/sratools/fasterqdump/nextflow.config 18 | @@ -0,0 +1,10 @@ 19 | +process { 20 | + withName: SRATOOLS_FASTERQDUMP { 21 | + ext.args = '--split-files --include-technical' 22 | + publishDir = [ 23 | + path: { "${params.outdir}/fastq" }, 24 | + mode: params.publish_dir_mode, 25 | + pattern: "*.fastq.gz" 26 | + ] 27 | + } 28 | +} 29 | --- modules/nf-core/sratools/fasterqdump/environment.yml 30 | +++ modules/nf-core/sratools/fasterqdump/environment.yml 31 | @@ -4,5 +4,5 @@ 32 | - bioconda 33 | - defaults 34 | dependencies: 35 | - - bioconda::sra-tools=3.0.8 36 | + - bioconda::sra-tools=2.11.0 37 | - conda-forge::pigz=2.6 38 | 39 | --- modules/nf-core/sratools/fasterqdump/tests/main.nf.test 40 | +++ modules/nf-core/sratools/fasterqdump/tests/main.nf.test 41 | @@ -3,11 +3,8 @@ 42 | script "../main.nf" 43 | config "./nextflow.config" 44 | process "SRATOOLS_FASTERQDUMP" 45 | - tag "modules" 46 | - tag "modules_nfcore" 47 | - tag "untar" 48 | - tag "sratools" 49 | - tag "sratools/fasterqdump" 50 | + 51 | + tag "UNTAR" 52 | 53 | test("Single-end") { 54 | 55 | 56 | --- modules/nf-core/sratools/fasterqdump/tests/tags.yml 57 | +++ /dev/null 58 | @@ -1,2 +0,0 @@ 59 | -sratools/fasterqdump: 60 | - - modules/nf-core/sratools/fasterqdump/** 61 | 62 | ************************************************************ 63 | -------------------------------------------------------------------------------- /modules/nf-core/sratools/fasterqdump/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | name "Test Process SRATOOLS_FASTERQDUMP" 3 | script "../main.nf" 4 | config "./nextflow.config" 5 | process "SRATOOLS_FASTERQDUMP" 6 | 7 | tag "UNTAR" 8 | 9 | test("Single-end") { 10 | 11 | setup { 12 | run("UNTAR") { 13 | script "modules/nf-core/untar/main.nf" 14 | process { 15 | """ 16 | input[0] = Channel.of([ [], file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/sra/SRR13255544.tar.gz', checkIfExists: true) ]) 17 | """ 18 | } 19 | } 20 | } 21 | 22 | when { 23 | process { 24 | """ 25 | input[0] = UNTAR.out.untar.map{ meta, files -> [ [ id:'test_single_end', single_end:true ], files]} 26 | input[1] = file(params.modules_testdata_base_path + 'generic/config/ncbi_user_settings.mkfg', checkIfExists: true) 27 | input[2] = [] 28 | """ 29 | } 30 | } 31 | 32 | then { 33 | assertAll( 34 | { assert process.success }, 35 | { assert snapshot(process.out).match() } 36 | ) 37 | } 38 | } 39 | 40 | test("Paired-end") { 41 | 42 | setup { 43 | run("UNTAR") { 44 | script "modules/nf-core/untar/main.nf" 45 | process { 46 | """ 47 | input[0] = Channel.of([ [], file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/sra/SRR11140744.tar.gz', checkIfExists: true) ]) 48 | """ 49 | } 50 | } 51 | } 52 | 53 | when { 54 | process { 55 | """ 56 | input[0] = UNTAR.out.untar.map{ meta, files -> [ [ id:'test_paired_end', single_end:false ], files]} 57 | input[1] = file(params.modules_testdata_base_path + 'generic/config/ncbi_user_settings.mkfg', checkIfExists: true) 58 | input[2] = [] 59 | """ 60 | } 61 | } 62 | 63 | then { 64 | assertAll( 65 | { assert process.success }, 66 | { assert snapshot(process.out).match() } 67 | ) 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /modules/nf-core/sratools/fasterqdump/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Single-end": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "test_single_end", 9 | "single_end": true 10 | }, 11 | "test_single_end.fastq.gz:md5,674d78c1cc3c1308d6d39d6369a42887" 12 | ] 13 | ], 14 | "1": [ 15 | "versions.yml:md5,6ff2d50b15c3f0eb9c72cd13a4a20295" 16 | ], 17 | "reads": [ 18 | [ 19 | { 20 | "id": "test_single_end", 21 | "single_end": true 22 | }, 23 | "test_single_end.fastq.gz:md5,674d78c1cc3c1308d6d39d6369a42887" 24 | ] 25 | ], 26 | "versions": [ 27 | "versions.yml:md5,6ff2d50b15c3f0eb9c72cd13a4a20295" 28 | ] 29 | } 30 | ], 31 | "meta": { 32 | "nf-test": "0.8.4", 33 | "nextflow": "24.01.0" 34 | }, 35 | "timestamp": "2024-02-28T15:25:52.837288" 36 | }, 37 | "Paired-end": { 38 | "content": [ 39 | { 40 | "0": [ 41 | [ 42 | { 43 | "id": "test_paired_end", 44 | "single_end": false 45 | }, 46 | [ 47 | "test_paired_end_1.fastq.gz:md5,8573015c91d099b6e30789f8bab2f43c", 48 | "test_paired_end_2.fastq.gz:md5,37e6f719a022dc3c9994c80fbc20c311" 49 | ] 50 | ] 51 | ], 52 | "1": [ 53 | "versions.yml:md5,6ff2d50b15c3f0eb9c72cd13a4a20295" 54 | ], 55 | "reads": [ 56 | [ 57 | { 58 | "id": "test_paired_end", 59 | "single_end": false 60 | }, 61 | [ 62 | "test_paired_end_1.fastq.gz:md5,8573015c91d099b6e30789f8bab2f43c", 63 | "test_paired_end_2.fastq.gz:md5,37e6f719a022dc3c9994c80fbc20c311" 64 | ] 65 | ] 66 | ], 67 | "versions": [ 68 | "versions.yml:md5,6ff2d50b15c3f0eb9c72cd13a4a20295" 69 | ] 70 | } 71 | ], 72 | "meta": { 73 | "nf-test": "0.8.4", 74 | "nextflow": "24.01.0" 75 | }, 76 | "timestamp": "2024-02-28T15:26:42.466223" 77 | } 78 | } -------------------------------------------------------------------------------- /modules/nf-core/sratools/fasterqdump/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: SRATOOLS_FASTERQDUMP { 3 | ext.args = '' 4 | } 5 | } -------------------------------------------------------------------------------- /modules/nf-core/sratools/prefetch/environment.yml: -------------------------------------------------------------------------------- 1 | name: sratools_prefetch 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | - defaults 6 | dependencies: 7 | - bioconda::sra-tools=3.0.8 8 | -------------------------------------------------------------------------------- /modules/nf-core/sratools/prefetch/main.nf: -------------------------------------------------------------------------------- 1 | process SRATOOLS_PREFETCH { 2 | tag "$id" 3 | label 'process_low' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/sra-tools:3.0.8--h9f5acd7_0' : 8 | 'biocontainers/sra-tools:3.0.8--h9f5acd7_0' }" 9 | 10 | input: 11 | tuple val(meta), val(id) 12 | path ncbi_settings 13 | path certificate 14 | 15 | output: 16 | tuple val(meta), path(id), emit: sra 17 | path 'versions.yml' , emit: versions 18 | 19 | when: 20 | task.ext.when == null || task.ext.when 21 | 22 | shell: 23 | args = task.ext.args ?: '' 24 | args2 = task.ext.args2 ?: '5 1 100' // 25 | if (certificate) { 26 | if (certificate.toString().endsWith('.jwt')) { 27 | args += " --perm ${certificate}" 28 | } 29 | else if (certificate.toString().endsWith('.ngc')) { 30 | args += " --ngc ${certificate}" 31 | } 32 | } 33 | 34 | template 'retry_with_backoff.sh' 35 | } 36 | -------------------------------------------------------------------------------- /modules/nf-core/sratools/prefetch/meta.yml: -------------------------------------------------------------------------------- 1 | name: sratools_prefetch 2 | description: Download sequencing data from the NCBI Sequence Read Archive (SRA). 3 | keywords: 4 | - sequencing 5 | - fastq 6 | - prefetch 7 | tools: 8 | - sratools: 9 | description: SRA Toolkit and SDK from NCBI 10 | homepage: https://github.com/ncbi/sra-tools 11 | documentation: https://github.com/ncbi/sra-tools/wiki 12 | tool_dev_url: https://github.com/ncbi/sra-tools 13 | licence: ["Public Domain"] 14 | input: 15 | - meta: 16 | type: map 17 | description: > 18 | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] 19 | 20 | - id: 21 | type: string 22 | description: > 23 | A string denoting an SRA id. 24 | 25 | - ncbi_settings: 26 | type: file 27 | description: > 28 | An NCBI user settings file. 29 | 30 | pattern: "*.mkfg" 31 | - certificate: 32 | type: file 33 | description: > 34 | Path to a JWT cart file used to access protected dbGAP data on SRA using the sra-toolkit 35 | 36 | pattern: "*.cart" 37 | output: 38 | - meta: 39 | type: map 40 | description: > 41 | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] 42 | 43 | - sra: 44 | type: directory 45 | description: > 46 | Directory containing the ETL data for the given SRA id. 47 | 48 | pattern: "*/*.sra" 49 | - versions: 50 | type: file 51 | description: File containing software versions 52 | pattern: "versions.yml" 53 | authors: 54 | - "@Midnighter" 55 | maintainers: 56 | - "@Midnighter" 57 | -------------------------------------------------------------------------------- /modules/nf-core/sratools/prefetch/nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: SRATOOLS_PREFETCH { 3 | publishDir = [ 4 | path: { "${params.outdir}/sra" }, 5 | enabled: false 6 | ] 7 | } 8 | } -------------------------------------------------------------------------------- /modules/nf-core/sratools/prefetch/templates/retry_with_backoff.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -u 4 | 5 | retry_with_backoff() { 6 | local max_attempts=${1} 7 | local delay=${2} 8 | local max_time=${3} 9 | local attempt=1 10 | local output= 11 | local status= 12 | 13 | # Remove the first three arguments to this function in order to access 14 | # the 'real' command with `${@}`. 15 | shift 3 16 | 17 | while [ ${attempt} -le ${max_attempts} ]; do 18 | output=$("${@}") 19 | status=${?} 20 | 21 | if [ ${status} -eq 0 ]; then 22 | break 23 | fi 24 | 25 | if [ ${attempt} -lt ${max_attempts} ]; then 26 | echo "Failed attempt ${attempt} of ${max_attempts}. Retrying in ${delay} s." >&2 27 | sleep ${delay} 28 | elif [ ${attempt} -eq ${max_attempts} ]; then 29 | echo "Failed after ${attempt} attempts." >&2 30 | return ${status} 31 | fi 32 | 33 | attempt=$(( ${attempt} + 1 )) 34 | delay=$(( ${delay} * 2 )) 35 | if [ ${delay} -ge ${max_time} ]; then 36 | delay=${max_time} 37 | fi 38 | done 39 | 40 | echo "${output}" 41 | } 42 | 43 | export NCBI_SETTINGS="$PWD/!{ncbi_settings}" 44 | 45 | retry_with_backoff !{args2} \ 46 | prefetch \ 47 | !{args} \ 48 | !{id} 49 | 50 | [ -f !{id}.sralite ] && vdb-validate !{id}.sralite || vdb-validate !{id} 51 | 52 | cat <<-END_VERSIONS > versions.yml 53 | "!{task.process}": 54 | sratools: $(prefetch --version 2>&1 | grep -Eo '[0-9.]+') 55 | END_VERSIONS 56 | -------------------------------------------------------------------------------- /modules/nf-core/sratools/prefetch/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | name "Test Process SRATOOLS_PREFETCH" 3 | script "../main.nf" 4 | process "SRATOOLS_PREFETCH" 5 | 6 | test("sratools/prefetch") { 7 | 8 | when { 9 | process { 10 | """ 11 | input[0] = Channel.of([ [ id:'test', single_end:false ], 'DRR000774' ]) 12 | input[1] = file(params.modules_testdata_base_path + 'generic/config/ncbi_user_settings.mkfg', checkIfExists: true) 13 | input[2] = [] 14 | """ 15 | } 16 | } 17 | 18 | then { 19 | assertAll ( 20 | { assert process.success }, 21 | { assert snapshot(process.out).match() } 22 | ) 23 | } 24 | } 25 | 26 | test("sratools/prefetch with sralite") { 27 | 28 | when { 29 | process { 30 | """ 31 | input[0] = Channel.of([ [ id:'test', single_end:false ], 'SRR1170046' ]) 32 | input[1] = file(params.modules_testdata_base_path + 'generic/config/ncbi_user_settings.mkfg', checkIfExists: true) 33 | input[2] = [] 34 | """ 35 | } 36 | } 37 | 38 | then { 39 | assertAll ( 40 | { assert process.success }, 41 | { assert snapshot(process.out).match() } 42 | ) 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /modules/nf-core/sratools/prefetch/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "sratools/prefetch with sralite": { 3 | "content": [ 4 | { 5 | "0": [ 6 | [ 7 | { 8 | "id": "test", 9 | "single_end": false 10 | }, 11 | [ 12 | "SRR1170046.sralite:md5,7acfce556ca0951aff49d780899c105b" 13 | ] 14 | ] 15 | ], 16 | "1": [ 17 | "versions.yml:md5,c967dea4135cb75490e1e801c4639efc" 18 | ], 19 | "sra": [ 20 | [ 21 | { 22 | "id": "test", 23 | "single_end": false 24 | }, 25 | [ 26 | "SRR1170046.sralite:md5,7acfce556ca0951aff49d780899c105b" 27 | ] 28 | ] 29 | ], 30 | "versions": [ 31 | "versions.yml:md5,c967dea4135cb75490e1e801c4639efc" 32 | ] 33 | } 34 | ], 35 | "meta": { 36 | "nf-test": "0.8.4", 37 | "nextflow": "23.10.1" 38 | }, 39 | "timestamp": "2024-02-28T11:49:02.309737" 40 | }, 41 | "sratools/prefetch": { 42 | "content": [ 43 | { 44 | "0": [ 45 | [ 46 | { 47 | "id": "test", 48 | "single_end": false 49 | }, 50 | [ 51 | "DRR000774.sra:md5,7647dba20c89c0e3d7ad13842f060eb0" 52 | ] 53 | ] 54 | ], 55 | "1": [ 56 | "versions.yml:md5,c967dea4135cb75490e1e801c4639efc" 57 | ], 58 | "sra": [ 59 | [ 60 | { 61 | "id": "test", 62 | "single_end": false 63 | }, 64 | [ 65 | "DRR000774.sra:md5,7647dba20c89c0e3d7ad13842f060eb0" 66 | ] 67 | ] 68 | ], 69 | "versions": [ 70 | "versions.yml:md5,c967dea4135cb75490e1e801c4639efc" 71 | ] 72 | } 73 | ], 74 | "meta": { 75 | "nf-test": "0.8.4", 76 | "nextflow": "23.10.1" 77 | }, 78 | "timestamp": "2024-02-28T11:48:37.428307" 79 | } 80 | } -------------------------------------------------------------------------------- /modules/nf-core/untar/environment.yml: -------------------------------------------------------------------------------- 1 | name: untar 2 | 3 | channels: 4 | - conda-forge 5 | - bioconda 6 | - defaults 7 | 8 | dependencies: 9 | - conda-forge::grep=3.11 10 | - conda-forge::sed=4.7 11 | - conda-forge::tar=1.34 12 | -------------------------------------------------------------------------------- /modules/nf-core/untar/main.nf: -------------------------------------------------------------------------------- 1 | process UNTAR { 2 | tag "$archive" 3 | label 'process_single' 4 | 5 | conda "${moduleDir}/environment.yml" 6 | container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 7 | 'https://depot.galaxyproject.org/singularity/ubuntu:20.04' : 8 | 'nf-core/ubuntu:20.04' }" 9 | 10 | input: 11 | tuple val(meta), path(archive) 12 | 13 | output: 14 | tuple val(meta), path("$prefix"), emit: untar 15 | path "versions.yml" , emit: versions 16 | 17 | when: 18 | task.ext.when == null || task.ext.when 19 | 20 | script: 21 | def args = task.ext.args ?: '' 22 | def args2 = task.ext.args2 ?: '' 23 | prefix = task.ext.prefix ?: ( meta.id ? "${meta.id}" : archive.baseName.toString().replaceFirst(/\.tar$/, "")) 24 | 25 | """ 26 | mkdir $prefix 27 | 28 | ## Ensures --strip-components only applied when top level of tar contents is a directory 29 | ## If just files or multiple directories, place all in prefix 30 | if [[ \$(tar -taf ${archive} | grep -o -P "^.*?\\/" | uniq | wc -l) -eq 1 ]]; then 31 | tar \\ 32 | -C $prefix --strip-components 1 \\ 33 | -xavf \\ 34 | $args \\ 35 | $archive \\ 36 | $args2 37 | else 38 | tar \\ 39 | -C $prefix \\ 40 | -xavf \\ 41 | $args \\ 42 | $archive \\ 43 | $args2 44 | fi 45 | 46 | cat <<-END_VERSIONS > versions.yml 47 | "${task.process}": 48 | untar: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') 49 | END_VERSIONS 50 | """ 51 | 52 | stub: 53 | prefix = task.ext.prefix ?: ( meta.id ? "${meta.id}" : archive.toString().replaceFirst(/\.[^\.]+(.gz)?$/, "")) 54 | """ 55 | mkdir $prefix 56 | touch ${prefix}/file.txt 57 | 58 | cat <<-END_VERSIONS > versions.yml 59 | "${task.process}": 60 | untar: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') 61 | END_VERSIONS 62 | """ 63 | } 64 | -------------------------------------------------------------------------------- /modules/nf-core/untar/meta.yml: -------------------------------------------------------------------------------- 1 | name: untar 2 | description: Extract files. 3 | keywords: 4 | - untar 5 | - uncompress 6 | - extract 7 | tools: 8 | - untar: 9 | description: | 10 | Extract tar.gz files. 11 | documentation: https://www.gnu.org/software/tar/manual/ 12 | licence: ["GPL-3.0-or-later"] 13 | input: 14 | - meta: 15 | type: map 16 | description: | 17 | Groovy Map containing sample information 18 | e.g. [ id:'test', single_end:false ] 19 | - archive: 20 | type: file 21 | description: File to be untar 22 | pattern: "*.{tar}.{gz}" 23 | output: 24 | - meta: 25 | type: map 26 | description: | 27 | Groovy Map containing sample information 28 | e.g. [ id:'test', single_end:false ] 29 | - untar: 30 | type: directory 31 | description: Directory containing contents of archive 32 | pattern: "*/" 33 | - versions: 34 | type: file 35 | description: File containing software versions 36 | pattern: "versions.yml" 37 | authors: 38 | - "@joseespinosa" 39 | - "@drpatelh" 40 | - "@matthdsm" 41 | - "@jfy133" 42 | maintainers: 43 | - "@joseespinosa" 44 | - "@drpatelh" 45 | - "@matthdsm" 46 | - "@jfy133" 47 | -------------------------------------------------------------------------------- /modules/nf-core/untar/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_process { 2 | 3 | name "Test Process UNTAR" 4 | script "../main.nf" 5 | process "UNTAR" 6 | 7 | test("test_untar") { 8 | 9 | when { 10 | process { 11 | """ 12 | input[0] = [ [], file(params.modules_testdata_base_path + 'genomics/sarscov2/genome/db/kraken2.tar.gz', checkIfExists: true) ] 13 | """ 14 | } 15 | } 16 | 17 | then { 18 | assertAll ( 19 | { assert process.success }, 20 | { assert snapshot(process.out.untar).match("test_untar") }, 21 | ) 22 | } 23 | 24 | } 25 | 26 | test("test_untar_onlyfiles") { 27 | 28 | when { 29 | process { 30 | """ 31 | input[0] = [ [], file(params.modules_testdata_base_path + 'generic/tar/hello.tar.gz', checkIfExists: true) ] 32 | """ 33 | } 34 | } 35 | 36 | then { 37 | assertAll ( 38 | { assert process.success }, 39 | { assert snapshot(process.out.untar).match("test_untar_onlyfiles") }, 40 | ) 41 | } 42 | 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /modules/nf-core/untar/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "test_untar_onlyfiles": { 3 | "content": [ 4 | [ 5 | [ 6 | [ 7 | 8 | ], 9 | [ 10 | "hello.txt:md5,e59ff97941044f85df5297e1c302d260" 11 | ] 12 | ] 13 | ] 14 | ], 15 | "meta": { 16 | "nf-test": "0.8.4", 17 | "nextflow": "23.10.1" 18 | }, 19 | "timestamp": "2024-02-28T11:49:41.320643" 20 | }, 21 | "test_untar": { 22 | "content": [ 23 | [ 24 | [ 25 | [ 26 | 27 | ], 28 | [ 29 | "hash.k2d:md5,8b8598468f54a7087c203ad0190555d9", 30 | "opts.k2d:md5,a033d00cf6759407010b21700938f543", 31 | "taxo.k2d:md5,094d5891cdccf2f1468088855c214b2c" 32 | ] 33 | ] 34 | ] 35 | ], 36 | "meta": { 37 | "nf-test": "0.8.4", 38 | "nextflow": "23.10.1" 39 | }, 40 | "timestamp": "2024-02-28T11:49:33.795172" 41 | } 42 | } -------------------------------------------------------------------------------- /nf-test.config: -------------------------------------------------------------------------------- 1 | config { 2 | // Location of nf-tests 3 | testsDir "." 4 | 5 | // nf-test directory used to create temporary files for each test 6 | workDir System.getenv("NFT_WORKDIR") ?: ".nf-test" 7 | 8 | // Location of an optional nextflow.config file specific for executing pipeline tests 9 | configFile "tests/nextflow.config" 10 | } 11 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # Config file for Python. Mostly used to configure linting of bin/*.py with Ruff. 2 | # Should be kept the same as nf-core/tools to avoid fighting with template synchronisation. 3 | [tool.ruff] 4 | line-length = 120 5 | target-version = "py38" 6 | cache-dir = "~/.cache/ruff" 7 | 8 | [tool.ruff.lint] 9 | select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"] 10 | 11 | [tool.ruff.lint.isort] 12 | known-first-party = ["nf_core"] 13 | 14 | [tool.ruff.lint.per-file-ignores] 15 | "__init__.py" = ["E402", "F401"] 16 | -------------------------------------------------------------------------------- /subworkflows/local/utils_nfcore_fetchngs_pipeline/tests/main.function.nf.test: -------------------------------------------------------------------------------- 1 | 2 | nextflow_function { 3 | 4 | name "Test Functions" 5 | script "subworkflows/local/utils_nfcore_fetchngs_pipeline/main.nf" 6 | tag "UTILS_NFCORE_FETCHNGS_PIPELINE" 7 | 8 | test("Function isSraId") { 9 | 10 | function "isSraId" 11 | 12 | when { 13 | function { 14 | """ 15 | input[0] = 'DRR000774' 16 | """ 17 | } 18 | } 19 | 20 | then { 21 | assertAll( 22 | { assert function.success }, 23 | { assert snapshot(function.result).match() } 24 | ) 25 | } 26 | } 27 | 28 | test("Function sraCheckENAMetadataFields [success]") { 29 | 30 | function "sraCheckENAMetadataFields" 31 | 32 | when { 33 | function { 34 | """ 35 | input[0] = 'run_accession,experiment_accession,library_layout,fastq_ftp,fastq_md5' 36 | """ 37 | } 38 | } 39 | 40 | then { 41 | assertAll( 42 | { assert function.success }, 43 | { assert snapshot(function.result).match() } 44 | ) 45 | } 46 | } 47 | 48 | test("Function sraCheckENAMetadataFields [failure]") { 49 | 50 | function "sraCheckENAMetadataFields" 51 | 52 | when { 53 | function { 54 | """ 55 | input[0] = 'run_accession,experiment_accession,library_layout,fastq_ftp' 56 | """ 57 | } 58 | } 59 | 60 | then { 61 | assertAll( 62 | { assert !function.success } 63 | ) 64 | } 65 | } 66 | 67 | test("Function sraCurateSamplesheetWarn") { 68 | 69 | function "sraCurateSamplesheetWarn" 70 | 71 | then { 72 | assertAll( 73 | { assert function.success }, 74 | { assert snapshot(function.result).match() } 75 | ) 76 | } 77 | } 78 | 79 | } 80 | -------------------------------------------------------------------------------- /subworkflows/local/utils_nfcore_fetchngs_pipeline/tests/main.function.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Function sraCurateSamplesheetWarn": { 3 | "content": null, 4 | "meta": { 5 | "nf-test": "0.8.4", 6 | "nextflow": "23.10.1" 7 | }, 8 | "timestamp": "2024-02-28T11:55:41.001798" 9 | }, 10 | "Function sraCheckENAMetadataFields [success]": { 11 | "content": null, 12 | "meta": { 13 | "nf-test": "0.8.4", 14 | "nextflow": "23.10.1" 15 | }, 16 | "timestamp": "2024-02-28T11:55:33.679255" 17 | }, 18 | "Function isSraId": { 19 | "content": [ 20 | true 21 | ], 22 | "meta": { 23 | "nf-test": "0.8.4", 24 | "nextflow": "23.10.1" 25 | }, 26 | "timestamp": "2024-02-28T11:55:29.999289" 27 | } 28 | } -------------------------------------------------------------------------------- /subworkflows/local/utils_nfcore_fetchngs_pipeline/tests/main.workflow_pipeline_completion.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow PIPELINE_COMPLETION" 4 | script "subworkflows/local/utils_nfcore_fetchngs_pipeline/main.nf" 5 | workflow "PIPELINE_COMPLETION" 6 | tag "UTILS_NFCORE_FETCHNGS_PIPELINE" 7 | 8 | test("Should run") { 9 | 10 | when { 11 | workflow { 12 | """ 13 | email = null 14 | email_on_fail = null 15 | plaintext_email = false 16 | outdir = 'results' 17 | monochrome_logs = false 18 | hook_url = null 19 | 20 | input[0] = email 21 | input[1] = email_on_fail 22 | input[2] = plaintext_email 23 | input[3] = outdir 24 | input[4] = monochrome_logs 25 | input[5] = hook_url 26 | """ 27 | } 28 | } 29 | 30 | then { 31 | assertAll( 32 | { assert workflow.success } 33 | ) 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /subworkflows/local/utils_nfcore_fetchngs_pipeline/tests/main.workflow_pipeline_initialisation.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow PIPELINE_INITIALISATION" 4 | script "subworkflows/local/utils_nfcore_fetchngs_pipeline/main.nf" 5 | workflow "PIPELINE_INITIALISATION" 6 | tag "UTILS_NFCORE_FETCHNGS_PIPELINE" 7 | 8 | test("Should run") { 9 | 10 | when { 11 | workflow { 12 | """ 13 | version = false 14 | help = false 15 | validate_params = false 16 | monochrome_logs = false 17 | nextflow_cli_args = [] 18 | outdir = 'results' 19 | ena_metadata_fields = null 20 | 21 | input[0] = version 22 | input[1] = help 23 | input[2] = validate_params 24 | input[3] = monochrome_logs 25 | input[4] = nextflow_cli_args 26 | input[5] = outdir 27 | input[6] = 'https://raw.githubusercontent.com/nf-core/test-datasets/2732b911c57e607fa7aea5ba0c3d91b25bafb662/testdata/v1.12.0/sra_ids_test.csv' 28 | input[7] = ena_metadata_fields 29 | """ 30 | } 31 | } 32 | 33 | then { 34 | assertAll( 35 | { assert workflow.success } 36 | ) 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_download_prefetch_fasterqdump_sratools/main.nf: -------------------------------------------------------------------------------- 1 | include { CUSTOM_SRATOOLSNCBISETTINGS } from '../../../modules/nf-core/custom/sratoolsncbisettings/main' 2 | include { SRATOOLS_PREFETCH } from '../../../modules/nf-core/sratools/prefetch/main' 3 | include { SRATOOLS_FASTERQDUMP } from '../../../modules/nf-core/sratools/fasterqdump/main' 4 | 5 | // 6 | // Download FASTQ sequencing reads from the NCBI's Sequence Read Archive (SRA). 7 | // 8 | workflow FASTQ_DOWNLOAD_PREFETCH_FASTERQDUMP_SRATOOLS { 9 | take: 10 | ch_sra_ids // channel: [ val(meta), val(id) ] 11 | ch_dbgap_key // channel: [ path(dbgap_key) ] 12 | 13 | main: 14 | 15 | ch_versions = Channel.empty() 16 | 17 | // 18 | // Detect existing NCBI user settings or create new ones. 19 | // 20 | CUSTOM_SRATOOLSNCBISETTINGS ( ch_sra_ids.collect() ) 21 | ch_ncbi_settings = CUSTOM_SRATOOLSNCBISETTINGS.out.ncbi_settings 22 | ch_versions = ch_versions.mix(CUSTOM_SRATOOLSNCBISETTINGS.out.versions) 23 | 24 | // 25 | // Prefetch sequencing reads in SRA format. 26 | // 27 | SRATOOLS_PREFETCH ( ch_sra_ids, ch_ncbi_settings, ch_dbgap_key ) 28 | ch_versions = ch_versions.mix(SRATOOLS_PREFETCH.out.versions.first()) 29 | 30 | // 31 | // Convert the SRA format into one or more compressed FASTQ files. 32 | // 33 | SRATOOLS_FASTERQDUMP ( SRATOOLS_PREFETCH.out.sra, ch_ncbi_settings, ch_dbgap_key ) 34 | ch_versions = ch_versions.mix(SRATOOLS_FASTERQDUMP.out.versions.first()) 35 | 36 | emit: 37 | reads = SRATOOLS_FASTERQDUMP.out.reads // channel: [ val(meta), [ reads ] ] 38 | versions = ch_versions // channel: [ versions.yml ] 39 | } 40 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_download_prefetch_fasterqdump_sratools/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: fastq_download_prefetch_fasterqdump_sratools 3 | description: Download FASTQ sequencing reads from the NCBI's Sequence Read Archive (SRA). 4 | keywords: 5 | - SRA 6 | - NCBI 7 | - sequencing 8 | - fastq 9 | - prefetch 10 | - fasterq-dump 11 | components: 12 | - custom/sratoolsncbisettings 13 | - sratools/prefetch 14 | - sratools/fasterqdump 15 | input: 16 | - meta: 17 | type: map 18 | description: > 19 | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] 20 | 21 | - id: 22 | type: string 23 | description: > 24 | SRA run identifier. 25 | 26 | - certificate: 27 | type: file 28 | description: > 29 | Path to a JWT cart file used to access protected dbGAP data on SRA using the sra-toolkit 30 | 31 | pattern: "*.cart" 32 | # TODO Update when we decide on a standard for subworkflow docs 33 | output: 34 | - meta: 35 | type: map 36 | description: > 37 | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] 38 | 39 | - reads: 40 | type: file 41 | description: Extracted FASTQ file or files if the sequencing reads are paired-end. 42 | pattern: "*.fastq.gz" 43 | - versions: 44 | type: file 45 | description: File containing software versions 46 | pattern: "versions.yml" 47 | authors: 48 | - "@Midnighter" 49 | - "@drpatelh" 50 | maintainers: 51 | - "@Midnighter" 52 | - "@drpatelh" 53 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_download_prefetch_fasterqdump_sratools/nextflow.config: -------------------------------------------------------------------------------- 1 | includeConfig '../../../modules/nf-core/sratools/prefetch/nextflow.config' 2 | includeConfig '../../../modules/nf-core/sratools/fasterqdump/nextflow.config' 3 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_download_prefetch_fasterqdump_sratools/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test workflow: fastq_download_prefetch_fasterqdump_sratools/main.nf" 4 | script "../main.nf" 5 | workflow "FASTQ_DOWNLOAD_PREFETCH_FASTERQDUMP_SRATOOLS" 6 | 7 | tag "CUSTOM_SRATOOLSNCBISETTINGS" 8 | tag "SRATOOLS_PREFETCH" 9 | tag "SRATOOLS_FASTERQDUMP" 10 | 11 | test("Parameters: default") { 12 | 13 | when { 14 | workflow { 15 | """ 16 | input[0] = Channel.of( 17 | [[ id:'test_single_end', single_end:true ], 'DRR000774'], 18 | [[ id:'test_paired_end', single_end:false ], 'SRR11140744'] 19 | ) 20 | input[1] = [] 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | def pelines1 = path(workflow.out.reads[0][1][0]).linesGzip 27 | def pelines2 = path(workflow.out.reads[0][1][1]).linesGzip 28 | def selines = path(workflow.out.reads[1][1]).linesGzip 29 | assertAll( 30 | { assert workflow.success }, 31 | { assert snapshot(pelines1[0..5]).match("test_pe_reads_1_lines") }, 32 | { assert snapshot(pelines1.size()).match("test_pe_reads_1_size") }, 33 | { assert snapshot(pelines2[0..5]).match("test_pe_reads_2_lines") }, 34 | { assert snapshot(pelines2.size()).match("test_pe_reads_2_size") }, 35 | { assert snapshot(selines[0..5]).match("test_se_reads_lines") }, 36 | { assert snapshot(selines.size()).match("test_se_reads_size") }, 37 | { assert snapshot(workflow.out.versions).match("versions") } 38 | ) 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /subworkflows/nf-core/fastq_download_prefetch_fasterqdump_sratools/tests/main.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "test_se_reads_size": { 3 | "content": [ 4 | 19996 5 | ], 6 | "meta": { 7 | "nf-test": "0.8.4", 8 | "nextflow": "23.10.1" 9 | }, 10 | "timestamp": "2024-02-28T12:02:56.176292" 11 | }, 12 | "test_pe_reads_2_lines": { 13 | "content": [ 14 | [ 15 | "@SRR11140744.1 M01472:285:000000000-CYHNP:1:1101:12117:3295 length=251", 16 | "ACAGGACACGAGTAACTCGTCTATCTTCTGCTGGCTGCTTACGGTTTCGTCCGTGTTGCAGCCGATCATCAGCACATCTAGGTTTCGTCCGGGTGTGACCGAAAGGTAAGATGGAGAGCCTTGTCCCTGGTTTCAACGAGAAAACACACGTCCAACTCAGTTTGCCTGTTTTACAGGTTCGCGACGTGCTCGTACGTGGCTTTGGAGACTCCGTGGAGGAGGTCTTATCAGAGGCACGTCAACATCTTAAA", 17 | "+SRR11140744.1 M01472:285:000000000-CYHNP:1:1101:12117:3295 length=251", 18 | "ABAAAFBFFBDBGGGGGGGGGGHHHHHHHHHHCHGHGGGHHHGGHGGHGHGGGHFHHHHHHHHGGGGGHHHHHHHHHFHHHHGHHHGHGGGGGEFGDGHHGFGGGHHHHHGHHGGHHFHHHHGHHHHHHHHHHHHHHGFFGGHHHHHHGGHHGGHHHHHEGHHHHHHHGHHGHHFHHHHHGGGGGGGGGGGGAGGG9BEFFFFFFFFFFFFFFEEFFFFFFFA.FFFFFFFEFEFFFFFFF.BFFFFFFFB", 19 | "@SRR11140744.2 M01472:285:000000000-CYHNP:1:1101:20752:3564 length=238", 20 | "GTGCACTCACGCAGTATAATTAATAACTAATTACTGTCGTTGACAGGACACGAGTAACTCGTCTATCTTCTGCAGGCTGCTTACGGTTTCGTCCGTGTTGCAGCCGATCATCAGCACATCTAGGTTTCGTCCGGGTGTGACCGAAAGGTAAGATGGAGAGCCTTGTCCCTGGTTTCAACGAGAAAACACACGTCCAACTCAGTTTGCCTGTTTTACAGGTTCGCGACGTGCTCGTACG" 21 | ] 22 | ], 23 | "meta": { 24 | "nf-test": "0.8.4", 25 | "nextflow": "23.10.1" 26 | }, 27 | "timestamp": "2024-02-28T12:02:56.166207" 28 | }, 29 | "test_pe_reads_2_size": { 30 | "content": [ 31 | 2011460 32 | ], 33 | "meta": { 34 | "nf-test": "0.8.4", 35 | "nextflow": "23.10.1" 36 | }, 37 | "timestamp": "2024-02-28T12:02:56.168869" 38 | }, 39 | "versions": { 40 | "content": [ 41 | [ 42 | "versions.yml:md5,1a2218ff913fc33408bffccb081b5048", 43 | "versions.yml:md5,2f3b3a13b36dabf13f09327613d5558d", 44 | "versions.yml:md5,98d78bba9f3da39a0b7db6e9c7dcc224" 45 | ] 46 | ], 47 | "meta": { 48 | "nf-test": "0.8.4", 49 | "nextflow": "24.01.0" 50 | }, 51 | "timestamp": "2024-02-28T15:19:18.755939" 52 | }, 53 | "test_pe_reads_1_size": { 54 | "content": [ 55 | 2013376 56 | ], 57 | "meta": { 58 | "nf-test": "0.8.4", 59 | "nextflow": "24.01.0" 60 | }, 61 | "timestamp": "2024-02-28T15:19:18.677234" 62 | }, 63 | "test_se_reads_lines": { 64 | "content": [ 65 | [ 66 | "@DRR000774.1 1 length=421", 67 | "ACGCAGGTGCCAGCAGCCGCGGTAATACGTAGGATCCGAGCGTTGTCCGGATTTATTGGGCGTAAAGGGTGCGTAGGCGGCTTGTCAAGTCTCATGTGAAATCTCCCGGCTCAACTGGGAGGGTCATGGGAAACTGATGAGCTCGAGGGCAGTAGAGGGAAGCGGAATTCCGAGAGTAGTGGTGAAATGCGTAGATACTCGGAGGAACACCAGTGGCGAAAGCGGCTTCCTGGACTGTACCTGACGCTGAGGCACGAAAGCGTGGGGAGCAAACCGGATTAGATACCCGGGTAGTCCACGCCCTAAACGATGGATACTAGATATAGGGGGTATCGACCCTCTGTGTCGAAGCTAACGCATTAAGTATCCCGCCTGAGGAGTACGGCCGCAAGGCTAAAACTTAAGGAATTGACGGCTGCGT", 68 | "+DRR000774.1 1 length=421", 69 | "FFFFFFFFFFFIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIHHFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF:88FFF888???DBBBBB666F222ADDDFFF::;FFFFFFFFFFFFFFFFFFFFFFFFFFFF9:::FFFFCCCFFFFDDDFFFFF<<<<<8888886623//38><83238@B@@<;855557,,,,,,,0/0;;8:==DDDDDDDDD9:", 70 | "@DRR000774.2 2 length=126", 71 | "ACGCAGGTGCCAGCAGCCGCGGTAATACGGAGGGAGCTAGCGTTGTTCGGAATTACTGGGCGTAAAGCGCACGTAGGCGGCTTTTCAAGTCAGGGGTGGAAATACCCGGGGCCGTCAACCCGACCG" 72 | ] 73 | ], 74 | "meta": { 75 | "nf-test": "0.8.4", 76 | "nextflow": "23.10.1" 77 | }, 78 | "timestamp": "2024-02-28T12:02:56.171227" 79 | }, 80 | "test_pe_reads_1_lines": { 81 | "content": [ 82 | [ 83 | "@SRR11140744.1 M01472:285:000000000-CYHNP:1:1101:12117:3295 length=251", 84 | "ACATAGGGCTGTTCAAGTTGAGGCAAAACGCCTTTTTCAACTTCTACTAAGCCACAAGTGCCATCTTTAAGATGTTGACGTGCCTCTGATAAGACCTCCTCCACGGAGTCTCCAAAGCCACGTACGAGCACGTCGCGAACCTGTAAAACAGGCAAACTGAGTTGGACGTGTGTTTTCTCGTTGAAACCAGGGACAAGGCTCTCCATCTTACCTTTCGGTCACACCCGGACGAAACCTAGATGTGCTGATGA", 85 | "+SRR11140744.1 M01472:285:000000000-CYHNP:1:1101:12117:3295 length=251", 86 | "BCCCCFFFFFCFGGGGGGGGGGHGGHHHHGGGHGHHHHHHHHHHHHHHHHHHHHHHHGHHHHHHHHHHHHHHHHHHHHHGGGHHHHHGHHGHHHHHHHHHHHHHGGGGGHHHHHHHHHHHHGHHHGGGGGHGHHGGGGGGGHHHHHHHHHHHGGHHHHHFHHHHHHHGGGHHHHHHHHHGGGHHHHHHHHGGGGGGGFGGGGGGGGGGGGGGGGGGGGGGFFFFFFFFFDFFFFFFFFFFFFFFFFFFFFB", 87 | "@SRR11140744.2 M01472:285:000000000-CYHNP:1:1101:20752:3564 length=238", 88 | "CGTACGAGCACGTCGCGAACCTGTAAAACAGGCAAACTGAGTTGGACGTGTGTTTTCTCGTTGAAACCAGGGACAAGGCTCTCCATCTTACCTTTCGGTCACACCCGGACGAAACCTAGATGTGCTGATGATCGGCTGCAACACGGACGAAACCGTAAGCAGCCTGCAGAAGATAGACGAGTTACTCGTGTCCTGTCAACGACAGTAATTAGTTATTAATTATACTGCGTGAGTGCAC" 89 | ] 90 | ], 91 | "meta": { 92 | "nf-test": "0.8.4", 93 | "nextflow": "23.10.1" 94 | }, 95 | "timestamp": "2024-02-28T12:02:56.161354" 96 | } 97 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/main.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Subworkflow with functionality that may be useful for any Nextflow pipeline 3 | // 4 | 5 | import org.yaml.snakeyaml.Yaml 6 | import groovy.json.JsonOutput 7 | import nextflow.extension.FilesEx 8 | 9 | /* 10 | ======================================================================================== 11 | SUBWORKFLOW DEFINITION 12 | ======================================================================================== 13 | */ 14 | 15 | workflow UTILS_NEXTFLOW_PIPELINE { 16 | 17 | take: 18 | print_version // boolean: print version 19 | dump_parameters // boolean: dump parameters 20 | outdir // path: base directory used to publish pipeline results 21 | check_conda_channels // boolean: check conda channels 22 | 23 | main: 24 | 25 | // 26 | // Print workflow version and exit on --version 27 | // 28 | if (print_version) { 29 | log.info "${workflow.manifest.name} ${getWorkflowVersion()}" 30 | System.exit(0) 31 | } 32 | 33 | // 34 | // Dump pipeline parameters to a JSON file 35 | // 36 | if (dump_parameters && outdir) { 37 | dumpParametersToJSON(outdir) 38 | } 39 | 40 | // 41 | // When running with Conda, warn if channels have not been set-up appropriately 42 | // 43 | if (check_conda_channels) { 44 | checkCondaChannels() 45 | } 46 | 47 | emit: 48 | dummy_emit = true 49 | } 50 | 51 | /* 52 | ======================================================================================== 53 | FUNCTIONS 54 | ======================================================================================== 55 | */ 56 | 57 | // 58 | // Generate version string 59 | // 60 | def getWorkflowVersion() { 61 | String version_string = "" 62 | if (workflow.manifest.version) { 63 | def prefix_v = workflow.manifest.version[0] != 'v' ? 'v' : '' 64 | version_string += "${prefix_v}${workflow.manifest.version}" 65 | } 66 | 67 | if (workflow.commitId) { 68 | def git_shortsha = workflow.commitId.substring(0, 7) 69 | version_string += "-g${git_shortsha}" 70 | } 71 | 72 | return version_string 73 | } 74 | 75 | // 76 | // Dump pipeline parameters to a JSON file 77 | // 78 | def dumpParametersToJSON(outdir) { 79 | def timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss') 80 | def filename = "params_${timestamp}.json" 81 | def temp_pf = new File(workflow.launchDir.toString(), ".${filename}") 82 | def jsonStr = JsonOutput.toJson(params) 83 | temp_pf.text = JsonOutput.prettyPrint(jsonStr) 84 | 85 | FilesEx.copyTo(temp_pf.toPath(), "${outdir}/pipeline_info/params_${timestamp}.json") 86 | temp_pf.delete() 87 | } 88 | 89 | // 90 | // When running with -profile conda, warn if channels have not been set-up appropriately 91 | // 92 | def checkCondaChannels() { 93 | Yaml parser = new Yaml() 94 | def channels = [] 95 | try { 96 | def config = parser.load("conda config --show channels".execute().text) 97 | channels = config.channels 98 | } catch(NullPointerException | IOException e) { 99 | log.warn "Could not verify conda channel configuration." 100 | return 101 | } 102 | 103 | // Check that all channels are present 104 | // This channel list is ordered by required channel priority. 105 | def required_channels_in_order = ['conda-forge', 'bioconda', 'defaults'] 106 | def channels_missing = ((required_channels_in_order as Set) - (channels as Set)) as Boolean 107 | 108 | // Check that they are in the right order 109 | def channel_priority_violation = false 110 | def n = required_channels_in_order.size() 111 | for (int i = 0; i < n - 1; i++) { 112 | channel_priority_violation |= !(channels.indexOf(required_channels_in_order[i]) < channels.indexOf(required_channels_in_order[i+1])) 113 | } 114 | 115 | if (channels_missing | channel_priority_violation) { 116 | log.warn "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + 117 | " There is a problem with your Conda configuration!\n\n" + 118 | " You will need to set-up the conda-forge and bioconda channels correctly.\n" + 119 | " Please refer to https://bioconda.github.io/\n" + 120 | " The observed channel order is \n" + 121 | " ${channels}\n" + 122 | " but the following channel order is required:\n" + 123 | " ${required_channels_in_order}\n" + 124 | "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NEXTFLOW_PIPELINE" 3 | description: Subworkflow with functionality that may be useful for any Nextflow pipeline 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - version 9 | components: [] 10 | input: 11 | - print_version: 12 | type: boolean 13 | description: | 14 | Print the version of the pipeline and exit 15 | - dump_parameters: 16 | type: boolean 17 | description: | 18 | Dump the parameters of the pipeline to a JSON file 19 | - output_directory: 20 | type: directory 21 | description: Path to output dir to write JSON file to. 22 | pattern: "results/" 23 | - check_conda_channel: 24 | type: boolean 25 | description: | 26 | Check if the conda channel priority is correct. 27 | output: 28 | - dummy_emit: 29 | type: boolean 30 | description: | 31 | Dummy emit to make nf-core subworkflows lint happy 32 | authors: 33 | - "@adamrtalbot" 34 | - "@drpatelh" 35 | maintainers: 36 | - "@adamrtalbot" 37 | - "@drpatelh" 38 | - "@maxulysse" 39 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test: -------------------------------------------------------------------------------- 1 | 2 | nextflow_function { 3 | 4 | name "Test Functions" 5 | script "subworkflows/nf-core/utils_nextflow_pipeline/main.nf" 6 | config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" 7 | tag "UTILS_NEXTFLOW_PIPELINE" 8 | 9 | test("Test Function getWorkflowVersion") { 10 | 11 | function "getWorkflowVersion" 12 | 13 | then { 14 | assertAll( 15 | { assert function.success }, 16 | { assert snapshot(function.result).match() } 17 | ) 18 | } 19 | } 20 | 21 | test("Test Function dumpParametersToJSON") { 22 | 23 | function "dumpParametersToJSON" 24 | 25 | when { 26 | function { 27 | """ 28 | // define inputs of the function here. Example: 29 | input[0] = "$outputDir" 30 | """.stripIndent() 31 | } 32 | } 33 | 34 | then { 35 | assertAll( 36 | { assert function.success } 37 | ) 38 | } 39 | } 40 | 41 | test("Test Function checkCondaChannels") { 42 | 43 | function "checkCondaChannels" 44 | 45 | then { 46 | assertAll( 47 | { assert function.success }, 48 | { assert snapshot(function.result).match() } 49 | ) 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.function.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Test Function getWorkflowVersion": { 3 | "content": [ 4 | "v9.9.9" 5 | ], 6 | "meta": { 7 | "nf-test": "0.8.4", 8 | "nextflow": "23.10.1" 9 | }, 10 | "timestamp": "2024-02-28T12:02:05.308243" 11 | }, 12 | "Test Function checkCondaChannels": { 13 | "content": null, 14 | "meta": { 15 | "nf-test": "0.8.4", 16 | "nextflow": "23.10.1" 17 | }, 18 | "timestamp": "2024-02-28T12:02:12.425833" 19 | } 20 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow UTILS_NEXTFLOW_PIPELINE" 4 | script "../main.nf" 5 | config "subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config" 6 | workflow "UTILS_NEXTFLOW_PIPELINE" 7 | 8 | test("Should run no inputs") { 9 | 10 | when { 11 | workflow { 12 | """ 13 | print_version = false 14 | dump_parameters = false 15 | outdir = null 16 | check_conda_channels = false 17 | 18 | input[0] = print_version 19 | input[1] = dump_parameters 20 | input[2] = outdir 21 | input[3] = check_conda_channels 22 | """ 23 | } 24 | } 25 | 26 | then { 27 | assertAll( 28 | { assert workflow.success } 29 | ) 30 | } 31 | } 32 | 33 | test("Should print version") { 34 | 35 | when { 36 | workflow { 37 | """ 38 | print_version = true 39 | dump_parameters = false 40 | outdir = null 41 | check_conda_channels = false 42 | 43 | input[0] = print_version 44 | input[1] = dump_parameters 45 | input[2] = outdir 46 | input[3] = check_conda_channels 47 | """ 48 | } 49 | } 50 | 51 | then { 52 | assertAll( 53 | { assert workflow.success }, 54 | { assert workflow.stdout.contains("nextflow_workflow v9.9.9") } 55 | ) 56 | } 57 | } 58 | 59 | test("Should dump params") { 60 | 61 | when { 62 | workflow { 63 | """ 64 | print_version = false 65 | dump_parameters = true 66 | outdir = 'results' 67 | check_conda_channels = false 68 | 69 | input[0] = false 70 | input[1] = true 71 | input[2] = outdir 72 | input[3] = false 73 | """ 74 | } 75 | } 76 | 77 | then { 78 | assertAll( 79 | { assert workflow.success } 80 | ) 81 | } 82 | } 83 | 84 | test("Should not create params JSON if no output directory") { 85 | 86 | when { 87 | workflow { 88 | """ 89 | print_version = false 90 | dump_parameters = true 91 | outdir = null 92 | check_conda_channels = false 93 | 94 | input[0] = false 95 | input[1] = true 96 | input[2] = outdir 97 | input[3] = false 98 | """ 99 | } 100 | } 101 | 102 | then { 103 | assertAll( 104 | { assert workflow.success } 105 | ) 106 | } 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nextflow_pipeline/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | manifest { 2 | name = 'nextflow_workflow' 3 | author = """nf-core""" 4 | homePage = 'https://127.0.0.1' 5 | description = """Dummy pipeline""" 6 | nextflowVersion = '!>=23.04.0' 7 | version = '9.9.9' 8 | doi = 'https://doi.org/10.5281/zenodo.5070524' 9 | } 10 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NFCORE_PIPELINE" 3 | description: Subworkflow with utility functions specific to the nf-core pipeline template 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - version 9 | components: [] 10 | input: 11 | - nextflow_cli_args: 12 | type: list 13 | description: | 14 | Nextflow CLI positional arguments 15 | output: 16 | - success: 17 | type: boolean 18 | description: | 19 | Dummy output to indicate success 20 | authors: 21 | - "@adamrtalbot" 22 | maintainers: 23 | - "@adamrtalbot" 24 | - "@maxulysse" 25 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test: -------------------------------------------------------------------------------- 1 | 2 | nextflow_function { 3 | 4 | name "Test Functions" 5 | script "../main.nf" 6 | config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" 7 | tag "subworkflows" 8 | tag "subworkflows_nfcore" 9 | tag "utils_nfcore_pipeline" 10 | tag "subworkflows/utils_nfcore_pipeline" 11 | 12 | test("Test Function checkConfigProvided") { 13 | 14 | function "checkConfigProvided" 15 | 16 | then { 17 | assertAll( 18 | { assert function.success }, 19 | { assert snapshot(function.result).match() } 20 | ) 21 | } 22 | } 23 | 24 | test("Test Function checkProfileProvided") { 25 | 26 | function "checkProfileProvided" 27 | 28 | when { 29 | function { 30 | """ 31 | input[0] = [] 32 | """ 33 | } 34 | } 35 | 36 | then { 37 | assertAll( 38 | { assert function.success }, 39 | { assert snapshot(function.result).match() } 40 | ) 41 | } 42 | } 43 | 44 | test("Test Function workflowCitation") { 45 | 46 | function "workflowCitation" 47 | 48 | then { 49 | assertAll( 50 | { assert function.success }, 51 | { assert snapshot(function.result).match() } 52 | ) 53 | } 54 | } 55 | 56 | test("Test Function nfCoreLogo") { 57 | 58 | function "nfCoreLogo" 59 | 60 | when { 61 | function { 62 | """ 63 | input[0] = false 64 | """ 65 | } 66 | } 67 | 68 | then { 69 | assertAll( 70 | { assert function.success }, 71 | { assert snapshot(function.result).match() } 72 | ) 73 | } 74 | } 75 | 76 | test("Test Function dashedLine") { 77 | 78 | function "dashedLine" 79 | 80 | when { 81 | function { 82 | """ 83 | input[0] = false 84 | """ 85 | } 86 | } 87 | 88 | then { 89 | assertAll( 90 | { assert function.success }, 91 | { assert snapshot(function.result).match() } 92 | ) 93 | } 94 | } 95 | 96 | test("Test Function without logColours") { 97 | 98 | function "logColours" 99 | 100 | when { 101 | function { 102 | """ 103 | input[0] = true 104 | """ 105 | } 106 | } 107 | 108 | then { 109 | assertAll( 110 | { assert function.success }, 111 | { assert snapshot(function.result).match() } 112 | ) 113 | } 114 | } 115 | 116 | test("Test Function with logColours") { 117 | function "logColours" 118 | 119 | when { 120 | function { 121 | """ 122 | input[0] = false 123 | """ 124 | } 125 | } 126 | 127 | then { 128 | assertAll( 129 | { assert function.success }, 130 | { assert snapshot(function.result).match() } 131 | ) 132 | } 133 | } 134 | } 135 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow UTILS_NFCORE_PIPELINE" 4 | script "../main.nf" 5 | config "subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config" 6 | workflow "UTILS_NFCORE_PIPELINE" 7 | tag "subworkflows" 8 | tag "subworkflows_nfcore" 9 | tag "utils_nfcore_pipeline" 10 | tag "subworkflows/utils_nfcore_pipeline" 11 | 12 | test("Should run without failures") { 13 | 14 | when { 15 | workflow { 16 | """ 17 | input[0] = [] 18 | """ 19 | } 20 | } 21 | 22 | then { 23 | assertAll( 24 | { assert workflow.success }, 25 | { assert snapshot(workflow.out).match() } 26 | ) 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/main.workflow.nf.test.snap: -------------------------------------------------------------------------------- 1 | { 2 | "Should run without failures": { 3 | "content": [ 4 | { 5 | "0": [ 6 | true 7 | ], 8 | "valid_config": [ 9 | true 10 | ] 11 | } 12 | ], 13 | "meta": { 14 | "nf-test": "0.8.4", 15 | "nextflow": "23.10.1" 16 | }, 17 | "timestamp": "2024-02-28T12:03:25.726491" 18 | } 19 | } -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfcore_pipeline/tests/nextflow.config: -------------------------------------------------------------------------------- 1 | manifest { 2 | name = 'nextflow_workflow' 3 | author = """nf-core""" 4 | homePage = 'https://127.0.0.1' 5 | description = """Dummy pipeline""" 6 | nextflowVersion = '!>=23.04.0' 7 | version = '9.9.9' 8 | doi = 'https://doi.org/10.5281/zenodo.5070524' 9 | } 10 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfvalidation_plugin/main.nf: -------------------------------------------------------------------------------- 1 | // 2 | // Subworkflow that uses the nf-validation plugin to render help text and parameter summary 3 | // 4 | 5 | /* 6 | ======================================================================================== 7 | IMPORT NF-VALIDATION PLUGIN 8 | ======================================================================================== 9 | */ 10 | 11 | include { paramsHelp } from 'plugin/nf-validation' 12 | include { paramsSummaryLog } from 'plugin/nf-validation' 13 | include { validateParameters } from 'plugin/nf-validation' 14 | 15 | /* 16 | ======================================================================================== 17 | SUBWORKFLOW DEFINITION 18 | ======================================================================================== 19 | */ 20 | 21 | workflow UTILS_NFVALIDATION_PLUGIN { 22 | 23 | take: 24 | print_help // boolean: print help 25 | workflow_command // string: default commmand used to run pipeline 26 | pre_help_text // string: string to be printed before help text and summary log 27 | post_help_text // string: string to be printed after help text and summary log 28 | validate_params // boolean: validate parameters 29 | schema_filename // path: JSON schema file, null to use default value 30 | 31 | main: 32 | 33 | log.debug "Using schema file: ${schema_filename}" 34 | 35 | // Default values for strings 36 | pre_help_text = pre_help_text ?: '' 37 | post_help_text = post_help_text ?: '' 38 | workflow_command = workflow_command ?: '' 39 | 40 | // 41 | // Print help message if needed 42 | // 43 | if (print_help) { 44 | log.info pre_help_text + paramsHelp(workflow_command, parameters_schema: schema_filename) + post_help_text 45 | System.exit(0) 46 | } 47 | 48 | // 49 | // Print parameter summary to stdout 50 | // 51 | log.info pre_help_text + paramsSummaryLog(workflow, parameters_schema: schema_filename) + post_help_text 52 | 53 | // 54 | // Validate parameters relative to the parameter JSON schema 55 | // 56 | if (validate_params){ 57 | validateParameters(parameters_schema: schema_filename) 58 | } 59 | 60 | emit: 61 | dummy_emit = true 62 | } 63 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfvalidation_plugin/meta.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json 2 | name: "UTILS_NFVALIDATION_PLUGIN" 3 | description: Use nf-validation to initiate and validate a pipeline 4 | keywords: 5 | - utility 6 | - pipeline 7 | - initialise 8 | - validation 9 | components: [] 10 | input: 11 | - print_help: 12 | type: boolean 13 | description: | 14 | Print help message and exit 15 | - workflow_command: 16 | type: string 17 | description: | 18 | The command to run the workflow e.g. "nextflow run main.nf" 19 | - pre_help_text: 20 | type: string 21 | description: | 22 | Text to print before the help message 23 | - post_help_text: 24 | type: string 25 | description: | 26 | Text to print after the help message 27 | - validate_params: 28 | type: boolean 29 | description: | 30 | Validate the parameters and error if invalid. 31 | - schema_filename: 32 | type: string 33 | description: | 34 | The filename of the schema to validate against. 35 | output: 36 | - dummy_emit: 37 | type: boolean 38 | description: | 39 | Dummy emit to make nf-core subworkflows lint happy 40 | authors: 41 | - "@adamrtalbot" 42 | maintainers: 43 | - "@adamrtalbot" 44 | - "@maxulysse" 45 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfvalidation_plugin/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test Workflow UTILS_NFVALIDATION_PLUGIN" 4 | script "../main.nf" 5 | workflow "UTILS_NFVALIDATION_PLUGIN" 6 | 7 | test("Should run nothing") { 8 | 9 | when { 10 | 11 | params { 12 | monochrome_logs = true 13 | test_data = '' 14 | } 15 | 16 | workflow { 17 | """ 18 | help = false 19 | workflow_command = null 20 | pre_help_text = null 21 | post_help_text = null 22 | validate_params = false 23 | schema_filename = "$moduleTestDir/nextflow_schema.json" 24 | 25 | input[0] = help 26 | input[1] = workflow_command 27 | input[2] = pre_help_text 28 | input[3] = post_help_text 29 | input[4] = validate_params 30 | input[5] = schema_filename 31 | """ 32 | } 33 | } 34 | 35 | then { 36 | assertAll( 37 | { assert workflow.success } 38 | ) 39 | } 40 | } 41 | 42 | test("Should run help") { 43 | 44 | 45 | when { 46 | 47 | params { 48 | monochrome_logs = true 49 | test_data = '' 50 | } 51 | workflow { 52 | """ 53 | help = true 54 | workflow_command = null 55 | pre_help_text = null 56 | post_help_text = null 57 | validate_params = false 58 | schema_filename = "$moduleTestDir/nextflow_schema.json" 59 | 60 | input[0] = help 61 | input[1] = workflow_command 62 | input[2] = pre_help_text 63 | input[3] = post_help_text 64 | input[4] = validate_params 65 | input[5] = schema_filename 66 | """ 67 | } 68 | } 69 | 70 | then { 71 | assertAll( 72 | { assert workflow.success }, 73 | { assert workflow.exitStatus == 0 }, 74 | { assert workflow.stdout.any { it.contains('Input/output options') } }, 75 | { assert workflow.stdout.any { it.contains('--outdir') } } 76 | ) 77 | } 78 | } 79 | 80 | test("Should run help with command") { 81 | 82 | when { 83 | 84 | params { 85 | monochrome_logs = true 86 | test_data = '' 87 | } 88 | workflow { 89 | """ 90 | help = true 91 | workflow_command = "nextflow run noorg/doesntexist" 92 | pre_help_text = null 93 | post_help_text = null 94 | validate_params = false 95 | schema_filename = "$moduleTestDir/nextflow_schema.json" 96 | 97 | input[0] = help 98 | input[1] = workflow_command 99 | input[2] = pre_help_text 100 | input[3] = post_help_text 101 | input[4] = validate_params 102 | input[5] = schema_filename 103 | """ 104 | } 105 | } 106 | 107 | then { 108 | assertAll( 109 | { assert workflow.success }, 110 | { assert workflow.exitStatus == 0 }, 111 | { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, 112 | { assert workflow.stdout.any { it.contains('Input/output options') } }, 113 | { assert workflow.stdout.any { it.contains('--outdir') } } 114 | ) 115 | } 116 | } 117 | 118 | test("Should run help with extra text") { 119 | 120 | 121 | when { 122 | 123 | params { 124 | monochrome_logs = true 125 | test_data = '' 126 | } 127 | workflow { 128 | """ 129 | help = true 130 | workflow_command = "nextflow run noorg/doesntexist" 131 | pre_help_text = "pre-help-text" 132 | post_help_text = "post-help-text" 133 | validate_params = false 134 | schema_filename = "$moduleTestDir/nextflow_schema.json" 135 | 136 | input[0] = help 137 | input[1] = workflow_command 138 | input[2] = pre_help_text 139 | input[3] = post_help_text 140 | input[4] = validate_params 141 | input[5] = schema_filename 142 | """ 143 | } 144 | } 145 | 146 | then { 147 | assertAll( 148 | { assert workflow.success }, 149 | { assert workflow.exitStatus == 0 }, 150 | { assert workflow.stdout.any { it.contains('pre-help-text') } }, 151 | { assert workflow.stdout.any { it.contains('nextflow run noorg/doesntexist') } }, 152 | { assert workflow.stdout.any { it.contains('Input/output options') } }, 153 | { assert workflow.stdout.any { it.contains('--outdir') } }, 154 | { assert workflow.stdout.any { it.contains('post-help-text') } } 155 | ) 156 | } 157 | } 158 | 159 | test("Should validate params") { 160 | 161 | when { 162 | 163 | params { 164 | monochrome_logs = true 165 | test_data = '' 166 | outdir = 1 167 | } 168 | workflow { 169 | """ 170 | help = false 171 | workflow_command = null 172 | pre_help_text = null 173 | post_help_text = null 174 | validate_params = true 175 | schema_filename = "$moduleTestDir/nextflow_schema.json" 176 | 177 | input[0] = help 178 | input[1] = workflow_command 179 | input[2] = pre_help_text 180 | input[3] = post_help_text 181 | input[4] = validate_params 182 | input[5] = schema_filename 183 | """ 184 | } 185 | } 186 | 187 | then { 188 | assertAll( 189 | { assert workflow.failed }, 190 | { assert workflow.stdout.any { it.contains('ERROR ~ ERROR: Validation of pipeline parameters failed!') } } 191 | ) 192 | } 193 | } 194 | } 195 | -------------------------------------------------------------------------------- /subworkflows/nf-core/utils_nfvalidation_plugin/tests/nextflow_schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema", 3 | "$id": "https://raw.githubusercontent.com/./master/nextflow_schema.json", 4 | "title": ". pipeline parameters", 5 | "description": "", 6 | "type": "object", 7 | "definitions": { 8 | "input_output_options": { 9 | "title": "Input/output options", 10 | "type": "object", 11 | "fa_icon": "fas fa-terminal", 12 | "description": "Define where the pipeline should find input data and save output data.", 13 | "required": ["outdir"], 14 | "properties": { 15 | "validate_params": { 16 | "type": "boolean", 17 | "description": "Validate parameters?", 18 | "default": true, 19 | "hidden": true 20 | }, 21 | "outdir": { 22 | "type": "string", 23 | "format": "directory-path", 24 | "description": "The output directory where the results will be saved. You have to use absolute paths to storage on Cloud infrastructure.", 25 | "fa_icon": "fas fa-folder-open" 26 | }, 27 | "test_data_base": { 28 | "type": "string", 29 | "default": "https://raw.githubusercontent.com/nf-core/test-datasets/modules", 30 | "description": "Base for test data directory", 31 | "hidden": true 32 | }, 33 | "test_data": { 34 | "type": "string", 35 | "description": "Fake test data param", 36 | "hidden": true 37 | } 38 | } 39 | }, 40 | "generic_options": { 41 | "title": "Generic options", 42 | "type": "object", 43 | "fa_icon": "fas fa-file-import", 44 | "description": "Less common options for the pipeline, typically set in a config file.", 45 | "help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.", 46 | "properties": { 47 | "help": { 48 | "type": "boolean", 49 | "description": "Display help text.", 50 | "fa_icon": "fas fa-question-circle", 51 | "hidden": true 52 | }, 53 | "version": { 54 | "type": "boolean", 55 | "description": "Display version and exit.", 56 | "fa_icon": "fas fa-question-circle", 57 | "hidden": true 58 | }, 59 | "logo": { 60 | "type": "boolean", 61 | "default": true, 62 | "description": "Display nf-core logo in console output.", 63 | "fa_icon": "fas fa-image", 64 | "hidden": true 65 | }, 66 | "singularity_pull_docker_container": { 67 | "type": "boolean", 68 | "description": "Pull Singularity container from Docker?", 69 | "hidden": true 70 | }, 71 | "publish_dir_mode": { 72 | "type": "string", 73 | "default": "copy", 74 | "description": "Method used to save pipeline results to output directory.", 75 | "help_text": "The Nextflow `publishDir` option specifies which intermediate files should be saved to the output directory. This option tells the pipeline what method should be used to move these files. See [Nextflow docs](https://www.nextflow.io/docs/latest/process.html#publishdir) for details.", 76 | "fa_icon": "fas fa-copy", 77 | "enum": ["symlink", "rellink", "link", "copy", "copyNoFollow", "move"], 78 | "hidden": true 79 | }, 80 | "monochrome_logs": { 81 | "type": "boolean", 82 | "description": "Use monochrome_logs", 83 | "hidden": true 84 | } 85 | } 86 | } 87 | }, 88 | "allOf": [ 89 | { 90 | "$ref": "#/definitions/input_output_options" 91 | }, 92 | { 93 | "$ref": "#/definitions/generic_options" 94 | } 95 | ] 96 | } 97 | -------------------------------------------------------------------------------- /tests/nextflow.config: -------------------------------------------------------------------------------- 1 | params { 2 | // Base directory for nf-core/modules test data 3 | modules_testdata_base_path = 's3://ngi-igenomes/testdata/nf-core/modules/' 4 | 5 | // Base directory for nf-core/fetchngs test data 6 | pipelines_testdata_base_path = 's3://ngi-igenomes/testdata/nf-core/pipelines/fetchngs/1.15.0/' 7 | } 8 | 9 | // Impose sensible resource limits for testing 10 | process { 11 | withName: '.*' { 12 | cpus = 2 13 | memory = 3.GB 14 | time = 2.h 15 | } 16 | } 17 | 18 | // Impose same minimum Nextflow version as the pipeline for testing 19 | manifest { 20 | nextflowVersion = '!>=23.04.0' 21 | } 22 | 23 | // Disable all Nextflow reporting options 24 | timeline { enabled = false } 25 | report { enabled = false } 26 | trace { enabled = false } 27 | dag { enabled = false } 28 | -------------------------------------------------------------------------------- /tower.yml: -------------------------------------------------------------------------------- 1 | reports: 2 | samplesheet.csv: 3 | display: "Auto-created samplesheet with collated metadata and FASTQ paths" 4 | id_mappings.csv: 5 | display: "File with database identifier mappings that can be used to rename samples" 6 | multiqc_config.yml: 7 | display: "MultiQC config file for bulk renaming of sample names from database ids" 8 | -------------------------------------------------------------------------------- /workflows/sra/nextflow.config: -------------------------------------------------------------------------------- 1 | includeConfig "../../modules/local/multiqc_mappings_config/nextflow.config" 2 | includeConfig "../../modules/local/aspera_cli/nextflow.config" 3 | includeConfig "../../modules/local/sra_fastq_ftp/nextflow.config" 4 | includeConfig "../../modules/local/sra_ids_to_runinfo/nextflow.config" 5 | includeConfig "../../modules/local/sra_runinfo_to_ftp/nextflow.config" 6 | includeConfig "../../modules/local/sra_to_samplesheet/nextflow.config" 7 | includeConfig "../../modules/nf-core/sratools/prefetch/nextflow.config" 8 | includeConfig "../../subworkflows/nf-core/fastq_download_prefetch_fasterqdump_sratools/nextflow.config" 9 | -------------------------------------------------------------------------------- /workflows/sra/tests/main.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test workflow: sra/main.nf" 4 | script "../main.nf" 5 | workflow "SRA" 6 | tag "SRA_DEFAULT" 7 | 8 | // Dependencies 9 | tag "SRA_IDS_TO_RUNINFO" 10 | tag "SRA_RUNINFO_TO_FTP" 11 | tag "SRA_FASTQ_FTP" 12 | tag "SRA_TO_SAMPLESHEET" 13 | tag "MULTIQC_MAPPINGS_CONFIG" 14 | 15 | test("Parameters: default") { 16 | 17 | when { 18 | workflow { 19 | """ 20 | input[0] = Channel.from("DRX026011", "ERX1234253", "SRX6725035") 21 | """ 22 | } 23 | } 24 | 25 | then { 26 | assert workflow.success 27 | 28 | assertAll( 29 | { 30 | with(workflow.out.samplesheet) { 31 | assert path(get(0)).readLines().size() == 4 32 | assert path(get(0)).readLines()*.split(',')[0].take(4) == ['"sample"', '"fastq_1"', '"fastq_2"', '"run_accession"'] 33 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 34 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 35 | } 36 | }, 37 | { 38 | with(workflow.out.mappings) { 39 | assert path(get(0)).readLines().size() == 4 40 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 41 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 42 | } 43 | }, 44 | { 45 | with(workflow.out.sample_mappings) { 46 | assert path(get(0)[0]).md5 == "1ac06bb95b503703430e74660bbdd768" 47 | } 48 | } 49 | ) 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /workflows/sra/tests/sra_custom_ena_metadata_fields.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test workflow: sra/main.nf" 4 | script "../main.nf" 5 | workflow "SRA" 6 | tag "SRA_CUSTOM_ENA_METADATA_FIELDS" 7 | 8 | // Dependencies 9 | tag "SRA_IDS_TO_RUNINFO" 10 | tag "SRA_RUNINFO_TO_FTP" 11 | tag "SRA_FASTQ_FTP" 12 | tag "SRA_TO_SAMPLESHEET" 13 | tag "MULTIQC_MAPPINGS_CONFIG" 14 | 15 | test("Parameters: --nf_core_pipeline rnaseq --ena_metadata_fields ... --sample_mapping_fields ...") { 16 | 17 | when { 18 | workflow { 19 | """ 20 | input[0] = Channel.from("DRX026011", "ERX1234253", "SRX6725035") 21 | """ 22 | } 23 | params { 24 | nf_core_pipeline = "rnaseq" 25 | ena_metadata_fields = "run_accession,experiment_accession,library_layout,fastq_ftp,fastq_md5" 26 | sample_mapping_fields = "run_accession,library_layout" 27 | } 28 | } 29 | 30 | then { 31 | assert workflow.success 32 | 33 | assertAll( 34 | { 35 | with(workflow.out.samplesheet) { 36 | assert path(get(0)).readLines().size() == 4 37 | assert path(get(0)).readLines()*.split(',')[0].take(5) == ['"sample"', '"fastq_1"', '"fastq_2"', '"strandedness"' , '"run_accession"'] 38 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 39 | assert path(get(0)).text.contains('SINGLE') 40 | } 41 | }, 42 | { 43 | with(workflow.out.mappings) { 44 | assert path(get(0)).readLines().size() == 4 45 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 46 | assert path(get(0)).text.contains('SINGLE') 47 | } 48 | }, 49 | { 50 | with(workflow.out.sample_mappings) { 51 | assert path(get(0)[0]).md5 == "3b70bc9658eab4ba2f4ec98cb749ac9d" 52 | } 53 | } 54 | ) 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /workflows/sra/tests/sra_download_method_aspera.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test workflow: sra/main.nf" 4 | script "../main.nf" 5 | workflow "SRA" 6 | tag "SRA_DOWNLOAD_METHOD_ASPERA" 7 | 8 | // Dependencies 9 | tag "SRA_IDS_TO_RUNINFO" 10 | tag "SRA_RUNINFO_TO_FTP" 11 | tag "ASPERA_CLI" 12 | tag "SRA_TO_SAMPLESHEET" 13 | tag "MULTIQC_MAPPINGS_CONFIG" 14 | 15 | test("Parameters: --download_method aspera") { 16 | 17 | when { 18 | workflow { 19 | """ 20 | input[0] = Channel.from("DRX026011", "ERX1234253", "SRX6725035") 21 | """ 22 | } 23 | params { 24 | download_method = 'aspera' 25 | } 26 | } 27 | 28 | then { 29 | assert workflow.success 30 | 31 | assertAll( 32 | { 33 | with(workflow.out.samplesheet) { 34 | assert path(get(0)).readLines().size() == 4 35 | assert path(get(0)).readLines()*.split(',')[0].take(4) == ['"sample"', '"fastq_1"', '"fastq_2"', '"run_accession"'] 36 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 37 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 38 | } 39 | }, 40 | { 41 | with(workflow.out.mappings) { 42 | assert path(get(0)).readLines().size() == 4 43 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 44 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 45 | } 46 | }, 47 | { 48 | with(workflow.out.sample_mappings) { 49 | assert path(get(0)[0]).md5 == "1ac06bb95b503703430e74660bbdd768" 50 | } 51 | } 52 | ) 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /workflows/sra/tests/sra_download_method_sratools.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test workflow: sra/main.nf" 4 | script "../main.nf" 5 | workflow "SRA" 6 | tag "SRA_DOWNLOAD_METHOD_SRATOOLS" 7 | 8 | // Dependencies 9 | tag "FASTQ_DOWNLOAD_PREFETCH_FASTERQDUMP_SRATOOLS" 10 | tag "SRA_IDS_TO_RUNINFO" 11 | tag "SRA_RUNINFO_TO_FTP" 12 | tag "SRA_TO_SAMPLESHEET" 13 | tag "MULTIQC_MAPPINGS_CONFIG" 14 | 15 | test("Parameters: --download_method sratools") { 16 | 17 | when { 18 | workflow { 19 | """ 20 | input[0] = Channel.from("DRX026011", "ERX1234253", "SRX6725035") 21 | """ 22 | } 23 | params { 24 | download_method = 'sratools' 25 | } 26 | } 27 | 28 | then { 29 | assert workflow.success 30 | 31 | assertAll( 32 | { 33 | with(workflow.out.samplesheet) { 34 | assert path(get(0)).readLines().size() == 4 35 | assert path(get(0)).readLines()*.split(',')[0].take(4) == ['"sample"', '"fastq_1"', '"fastq_2"', '"run_accession"'] 36 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 37 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 38 | } 39 | }, 40 | { 41 | with(workflow.out.mappings) { 42 | assert path(get(0)).readLines().size() == 4 43 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 44 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 45 | } 46 | }, 47 | { 48 | with(workflow.out.sample_mappings) { 49 | assert path(get(0)[0]).md5 == "1ac06bb95b503703430e74660bbdd768" 50 | } 51 | } 52 | ) 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /workflows/sra/tests/sra_nf_core_pipeline_atacseq.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test workflow: sra/main.nf" 4 | script "../main.nf" 5 | workflow "SRA" 6 | tag "SRA_NF_CORE_PIPELINE_ATACSEQ" 7 | 8 | // Dependencies 9 | tag "SRA_IDS_TO_RUNINFO" 10 | tag "SRA_RUNINFO_TO_FTP" 11 | tag "SRA_FASTQ_FTP" 12 | tag "SRA_TO_SAMPLESHEET" 13 | tag "MULTIQC_MAPPINGS_CONFIG" 14 | 15 | test("Parameters: --nf_core_pipeline atacseq") { 16 | 17 | when { 18 | workflow { 19 | """ 20 | input[0] = Channel.from("DRX026011", "ERX1234253", "SRX6725035") 21 | """ 22 | } 23 | params { 24 | nf_core_pipeline = "atacseq" 25 | } 26 | } 27 | 28 | then { 29 | assert workflow.success 30 | 31 | assertAll( 32 | { 33 | with(workflow.out.samplesheet) { 34 | assert path(get(0)).readLines().size() == 4 35 | assert path(get(0)).readLines()*.split(',')[0].take(5) == ['"sample"', '"fastq_1"', '"fastq_2"', '"replicate"', '"run_accession"'] 36 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 37 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 38 | } 39 | }, 40 | { 41 | with(workflow.out.mappings) { 42 | assert path(get(0)).readLines().size() == 4 43 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 44 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 45 | } 46 | }, 47 | { 48 | with(workflow.out.sample_mappings) { 49 | assert path(get(0)[0]).md5 == "1ac06bb95b503703430e74660bbdd768" 50 | } 51 | } 52 | ) 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /workflows/sra/tests/sra_nf_core_pipeline_rnaseq.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test workflow: sra/main.nf" 4 | script "../main.nf" 5 | workflow "SRA" 6 | tag "SRA_NF_CORE_PIPELINE_RNASEQ" 7 | 8 | // Dependencies 9 | tag "SRA_IDS_TO_RUNINFO" 10 | tag "SRA_RUNINFO_TO_FTP" 11 | tag "SRA_FASTQ_FTP" 12 | tag "SRA_TO_SAMPLESHEET" 13 | tag "MULTIQC_MAPPINGS_CONFIG" 14 | 15 | test("Parameters: --nf_core_pipeline rnaseq") { 16 | 17 | when { 18 | workflow { 19 | """ 20 | input[0] = Channel.from("DRX026011", "ERX1234253", "SRX6725035") 21 | """ 22 | } 23 | params { 24 | nf_core_pipeline = "rnaseq" 25 | } 26 | } 27 | 28 | then { 29 | assert workflow.success 30 | 31 | assertAll( 32 | { 33 | with(workflow.out.samplesheet) { 34 | assert path(get(0)).readLines().size() == 4 35 | assert path(get(0)).readLines()*.split(',')[0].take(5) == ['"sample"', '"fastq_1"', '"fastq_2"', '"strandedness"' , '"run_accession"'] 36 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 37 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 38 | } 39 | }, 40 | { 41 | with(workflow.out.mappings) { 42 | assert path(get(0)).readLines().size() == 4 43 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 44 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 45 | } 46 | }, 47 | { 48 | with(workflow.out.sample_mappings) { 49 | assert path(get(0)[0]).md5 == "1ac06bb95b503703430e74660bbdd768" 50 | } 51 | } 52 | ) 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /workflows/sra/tests/sra_nf_core_pipeline_taxprofiler.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test workflow: sra/main.nf" 4 | script "../main.nf" 5 | workflow "SRA" 6 | tag "SRA_NF_CORE_PIPELINE_TAXPROFILER" 7 | 8 | // Dependencies 9 | tag "SRA_IDS_TO_RUNINFO" 10 | tag "SRA_RUNINFO_TO_FTP" 11 | tag "SRA_FASTQ_FTP" 12 | tag "SRA_TO_SAMPLESHEET" 13 | tag "MULTIQC_MAPPINGS_CONFIG" 14 | 15 | test("Parameters: --nf_core_pipeline taxprofiler") { 16 | 17 | when { 18 | workflow { 19 | """ 20 | input[0] = Channel.from("DRX026011", "ERX1234253", "SRX6725035") 21 | """ 22 | } 23 | params { 24 | nf_core_pipeline = "taxprofiler" 25 | } 26 | } 27 | 28 | then { 29 | assert workflow.success 30 | 31 | assertAll( 32 | { 33 | with(workflow.out.samplesheet) { 34 | assert path(get(0)).readLines().size() == 4 35 | assert path(get(0)).readLines()*.split(',')[0].take(5) == ['"sample"', '"fastq_1"', '"fastq_2"', '"fasta"', '"run_accession"'] 36 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 37 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 38 | } 39 | }, 40 | { 41 | with(workflow.out.mappings) { 42 | assert path(get(0)).readLines().size() == 4 43 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 44 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 45 | } 46 | }, 47 | { 48 | with(workflow.out.sample_mappings) { 49 | assert path(get(0)[0]).md5 == "1ac06bb95b503703430e74660bbdd768" 50 | } 51 | } 52 | ) 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /workflows/sra/tests/sra_nf_core_pipeline_viralrecon.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test workflow: sra/main.nf" 4 | script "../main.nf" 5 | workflow "SRA" 6 | tag "SRA_NF_CORE_PIPELINE_VIRALRECON" 7 | 8 | // Dependencies 9 | tag "SRA_IDS_TO_RUNINFO" 10 | tag "SRA_RUNINFO_TO_FTP" 11 | tag "SRA_FASTQ_FTP" 12 | tag "SRA_TO_SAMPLESHEET" 13 | tag "MULTIQC_MAPPINGS_CONFIG" 14 | 15 | test("Parameters: --nf_core_pipeline viralrecon") { 16 | 17 | when { 18 | workflow { 19 | """ 20 | input[0] = Channel.from("DRX026011", "ERX1234253", "SRX6725035") 21 | """ 22 | } 23 | params { 24 | nf_core_pipeline = "viralrecon" 25 | } 26 | } 27 | 28 | then { 29 | assert workflow.success 30 | 31 | assertAll( 32 | { 33 | with(workflow.out.samplesheet) { 34 | assert path(get(0)).readLines().size() == 4 35 | assert path(get(0)).readLines()*.split(',')[0].take(4) == ['"sample"', '"fastq_1"', '"fastq_2"', '"run_accession"'] 36 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 37 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 38 | } 39 | }, 40 | { 41 | with(workflow.out.mappings) { 42 | assert path(get(0)).readLines().size() == 4 43 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 44 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 45 | } 46 | }, 47 | { 48 | with(workflow.out.sample_mappings) { 49 | assert path(get(0)[0]).md5 == "1ac06bb95b503703430e74660bbdd768" 50 | } 51 | } 52 | ) 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /workflows/sra/tests/sra_skip_fastq_download.nf.test: -------------------------------------------------------------------------------- 1 | nextflow_workflow { 2 | 3 | name "Test workflow: sra/main.nf" 4 | script "../main.nf" 5 | workflow "SRA" 6 | tag "SRA_SKIP_FASTQ_DOWNLOAD" 7 | 8 | // Dependencies 9 | tag "SRA_IDS_TO_RUNINFO" 10 | tag "SRA_RUNINFO_TO_FTP" 11 | tag "SRA_TO_SAMPLESHEET" 12 | tag "MULTIQC_MAPPINGS_CONFIG" 13 | 14 | test("Parameters: --skip_fastq_download") { 15 | 16 | when { 17 | workflow { 18 | """ 19 | input[0] = Channel.from("DRX026011", "ERX1234253", "SRX6725035") 20 | """ 21 | } 22 | params { 23 | skip_fastq_download = true 24 | } 25 | } 26 | 27 | then { 28 | assert workflow.success 29 | 30 | assertAll( 31 | { 32 | with(workflow.out.samplesheet) { 33 | assert path(get(0)).readLines().size() == 4 34 | assert path(get(0)).readLines()*.split(',')[0].take(4) == ['"sample"', '"fastq_1"', '"fastq_2"', '"run_accession"'] 35 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 36 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 37 | } 38 | }, 39 | { 40 | with(workflow.out.mappings) { 41 | assert path(get(0)).readLines().size() == 4 42 | assert path(get(0)).readLines()*.split(',').collect { it[0] } == ['"sample"', '"DRX026011"', '"ERX1234253"', '"SRX6725035"'] 43 | assert path(get(0)).text.contains('Illumina HiSeq 2500') 44 | } 45 | }, 46 | { 47 | with(workflow.out.sample_mappings) { 48 | assert path(get(0)[0]).md5 == "1ac06bb95b503703430e74660bbdd768" 49 | } 50 | } 51 | ) 52 | } 53 | } 54 | } 55 | --------------------------------------------------------------------------------