├── .github └── workflows │ ├── build-sphinx.yml │ ├── deploy-sphinx.yml │ ├── docker-builds.yml │ └── tests-python.yml ├── .gitignore ├── CITATION.cff ├── LICENSE ├── README.md ├── bin ├── __init__.py ├── build_config.py ├── build_config_multimodal.py ├── consolidate_md.py ├── constants │ ├── constants.py │ └── suffixes.py ├── generate_image.py ├── integrate_anndata.py ├── integrate_image.py ├── ome_zarr_metadata.py ├── process_h5ad.py ├── process_merscope.py ├── process_molecules.py ├── process_spaceranger.py ├── process_xenium.py ├── router.py └── write_spatialdata.py ├── docs ├── .buildinfo ├── .nojekyll ├── _sources │ ├── citing.rst.txt │ ├── configuration.rst.txt │ ├── examples │ │ ├── visium.rst.txt │ │ └── xenium.rst.txt │ ├── index.rst.txt │ ├── installation.rst.txt │ ├── modules.rst.txt │ ├── multimodal │ │ ├── configuration.rst.txt │ │ ├── overview.rst.txt │ │ ├── run.rst.txt │ │ └── visualise.rst.txt │ ├── run.rst.txt │ ├── testing.rst.txt │ └── visualise.rst.txt ├── _static │ ├── _sphinx_javascript_frameworks_compat.js │ ├── basic.css │ ├── css │ │ ├── badge_only.css │ │ ├── custom.css │ │ ├── fonts │ │ │ ├── Roboto-Slab-Bold.woff │ │ │ ├── Roboto-Slab-Bold.woff2 │ │ │ ├── Roboto-Slab-Regular.woff │ │ │ ├── Roboto-Slab-Regular.woff2 │ │ │ ├── fontawesome-webfont.eot │ │ │ ├── fontawesome-webfont.svg │ │ │ ├── fontawesome-webfont.ttf │ │ │ ├── fontawesome-webfont.woff │ │ │ ├── fontawesome-webfont.woff2 │ │ │ ├── lato-bold-italic.woff │ │ │ ├── lato-bold-italic.woff2 │ │ │ ├── lato-bold.woff │ │ │ ├── lato-bold.woff2 │ │ │ ├── lato-normal-italic.woff │ │ │ ├── lato-normal-italic.woff2 │ │ │ ├── lato-normal.woff │ │ │ └── lato-normal.woff2 │ │ └── theme.css │ ├── doctools.js │ ├── documentation_options.js │ ├── file.png │ ├── jquery.js │ ├── js │ │ ├── badge_only.js │ │ ├── html5shiv-printshiv.min.js │ │ ├── html5shiv.min.js │ │ └── theme.js │ ├── language_data.js │ ├── minus.png │ ├── plus.png │ ├── pygments.css │ ├── searchtools.js │ └── sphinx_highlight.js ├── citing.html ├── configuration.html ├── examples │ ├── visium.html │ └── xenium.html ├── genindex.html ├── index.html ├── installation.html ├── modules.html ├── multimodal │ ├── configuration.html │ ├── overview.html │ ├── run.html │ └── visualise.html ├── objects.inv ├── py-modindex.html ├── run.html ├── search.html ├── searchindex.js ├── testing.html └── visualise.html ├── envs ├── Dockerfile ├── build-docker-imgs.sh ├── build_config │ ├── Dockerfile │ ├── environment.yaml │ └── requirements.txt ├── dev │ └── requirements.txt ├── environment.yaml └── requirements.txt ├── main.nf ├── multimodal.nf ├── nextflow.config ├── notebooks ├── generate_xenium_label.ipynb ├── preprocess_cell2location.ipynb └── preprocess_merscope.ipynb ├── sphinx ├── Makefile ├── README.md ├── _static │ └── css │ │ └── custom.css ├── citing.rst ├── conf.py ├── configuration.rst ├── examples │ ├── visium.rst │ └── xenium.rst ├── index.rst ├── installation.rst ├── make.bat ├── modules.rst ├── multimodal │ ├── configuration.rst │ ├── overview.rst │ ├── run.rst │ └── visualise.rst ├── run.rst ├── testing.rst └── visualise.rst ├── templates ├── examples │ ├── CytAssist_FFPE_Human_Breast_Cancer.yaml │ └── Xenium_FFPE_Human_Breast_Cancer_Rep1_outs.yaml ├── iss_template.yaml ├── merscope_template.yaml ├── multimodal-template.yaml ├── scrnaseq_s3_template.yaml ├── scrnaseq_template.yaml ├── template.yaml ├── visium_template.yaml └── xenium_template.yaml └── tests ├── expected_output ├── advanced_test_config.json ├── custom_test_config.json ├── minimal_test_config.json └── simple_test_config.json ├── input ├── advanced_config.json ├── custom_config.json ├── minimal_config.json └── simple_config.json ├── test_class.py └── test_class_multimodal.py /.github/workflows/build-sphinx.yml: -------------------------------------------------------------------------------- 1 | name: build-docs 2 | 3 | on: 4 | push: 5 | branches: [dev] 6 | 7 | pull_request: 8 | branches: [main, dev] 9 | 10 | jobs: 11 | run: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v2 16 | - name: Set up Python 3.10 17 | uses: actions/setup-python@v2 18 | with: 19 | python-version: "3.10" 20 | - name: Install dependencies 21 | run: | 22 | python -m pip install --upgrade pip 23 | pip install -r ./envs/requirements.txt 24 | pip install -r ./envs/dev/requirements.txt 25 | pip install -r ./envs/build_config/requirements.txt 26 | - name: Sphinx Build 27 | working-directory: ./sphinx 28 | run: | 29 | make html 30 | touch _build/html/.nojekyll 31 | -------------------------------------------------------------------------------- /.github/workflows/deploy-sphinx.yml: -------------------------------------------------------------------------------- 1 | name: deploy-docs 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | tags: ["v*"] 7 | 8 | jobs: 9 | run: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout 13 | uses: actions/checkout@v2 14 | - name: Set up Python 3.10 15 | uses: actions/setup-python@v2 16 | with: 17 | python-version: "3.10" 18 | - name: Install dependencies 19 | run: | 20 | python -m pip install --upgrade pip 21 | pip install -r ./envs/requirements.txt 22 | pip install -r ./envs/dev/requirements.txt 23 | pip install -r ./envs/build_config/requirements.txt 24 | - name: Sphinx Build 25 | working-directory: ./sphinx 26 | run: | 27 | make html 28 | touch _build/html/.nojekyll 29 | - name: Deploy to GitHub Pages 30 | uses: JamesIves/github-pages-deploy-action@v4.4.1 31 | with: 32 | branch: gh-pages 33 | folder: sphinx/_build/html 34 | target-folder: docs 35 | clean: true 36 | -------------------------------------------------------------------------------- /.github/workflows/docker-builds.yml: -------------------------------------------------------------------------------- 1 | name: docker-builds 2 | 3 | on: 4 | push: 5 | tags: ["v*"] 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout 12 | uses: actions/checkout@v2 13 | - name: Get tag 14 | uses: oprypin/find-latest-tag@v1 15 | with: 16 | repository: haniffalab/webatlas-pipeline 17 | releases-only: true 18 | id: find-latest-tag 19 | - name: Set version 20 | run: | 21 | # TAG=${{ steps.find-latest-tag.outputs.tag }} 22 | TAG=${GITHUB_REF#refs/*/} 23 | VERSION=${TAG#v} 24 | echo Version: $VERSION 25 | echo "VERSION=$VERSION" >> $GITHUB_ENV 26 | - name: Login to Docker Hub 27 | uses: docker/login-action@v3 28 | with: 29 | username: ${{ secrets.DOCKERHUB_USERNAME }} 30 | password: ${{ secrets.DOCKERHUB_TOKEN }} 31 | - name: Build Docker (webatlas-pipeline) 32 | working-directory: ./envs 33 | run: | 34 | docker build --platform=linux/amd64 -t haniffalab/webatlas-pipeline:${VERSION} -f ./Dockerfile . 35 | - name: Build Docker (webatlas-pipeline-build-config) 36 | working-directory: ./envs/build_config 37 | run: | 38 | docker build --platform=linux/amd64 -t haniffalab/webatlas-pipeline-build-config:${VERSION} -f ./Dockerfile . 39 | - name: Push Docker images 40 | run: | 41 | docker push haniffalab/webatlas-pipeline:${VERSION} 42 | docker push haniffalab/webatlas-pipeline-build-config:${VERSION} 43 | -------------------------------------------------------------------------------- /.github/workflows/tests-python.yml: -------------------------------------------------------------------------------- 1 | name: python-tests 2 | 3 | on: 4 | push: 5 | branches: [main, dev] 6 | 7 | pull_request: 8 | branches: [main, dev] 9 | 10 | jobs: 11 | run: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Install libvips 15 | run: | 16 | sudo apt-get update 17 | sudo apt-get install -y --no-install-recommends libvips 18 | - name: Checkout 19 | uses: actions/checkout@v2 20 | - name: Set up Python 3.10 21 | uses: actions/setup-python@v2 22 | with: 23 | python-version: "3.10" 24 | - name: Clone ome-zarr-metadata 25 | uses: actions/checkout@v2 26 | with: 27 | repository: ome/ome-zarr-metadata 28 | path: ./ome-zarr-metadata 29 | submodules: recursive 30 | - name: Install dependencies 31 | run: | 32 | python -m pip install --upgrade pip 33 | pip install -r ./envs/requirements.txt 34 | pip install -r ./envs/dev/requirements.txt 35 | cd ./ome-zarr-metadata && pre-commit install && pip install -e . && cd ../ 36 | - name: Run tests 37 | run: python -m pytest --cov=bin tests/test_class.py 38 | env: 39 | PYTHONPATH: ./bin 40 | - name: Upload coverage to Codecov 41 | uses: codecov/codecov-action@v3 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | *.DS_Store 3 | .vscode 4 | .snakemake_timestamp 5 | 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | sphinx/_build/ 74 | 75 | # PyBuilder 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # pyenv 82 | .python-version 83 | 84 | # celery beat schedule file 85 | celerybeat-schedule 86 | 87 | # SageMath parsed files 88 | *.sage.py 89 | 90 | # Environments 91 | .env 92 | .venv 93 | env/ 94 | venv/ 95 | ENV/ 96 | env.bak/ 97 | venv.bak/ 98 | .envrc 99 | 100 | # Spyder project settings 101 | .spyderproject 102 | .spyproject 103 | 104 | # Rope project settings 105 | .ropeproject 106 | 107 | # mkdocs documentation 108 | /site 109 | 110 | # mypy 111 | .mypy_cache/ 112 | 113 | # IDE 114 | .idea/ 115 | 116 | # Vim 117 | *.swp 118 | *.un~ 119 | 120 | # Nextflow 121 | nextflow 122 | .nextflow.log* 123 | .nextflow/ 124 | work/ 125 | input/ 126 | output/ 127 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | type: software 3 | message: "If you use this repo, please cite it" 4 | title: "WebAtlas" 5 | url: "https://github.com/haniffalab/webatlas-pipeline" 6 | doi: 10.5281/zenodo.7405818 7 | authors: 8 | - family-names: "Li" 9 | given-names: "Tong" 10 | orcid: "https://orcid.org/0000-0002-8240-4476" 11 | - family-names: "Horsfall" 12 | given-names: "David" 13 | orcid: "https://orcid.org/0000-0002-8086-812X" 14 | - family-names: "Basurto-Lozada" 15 | given-names: "Daniela" 16 | orcid: "https://orcid.org/0000-0003-3943-8424" 17 | - family-names: "Prete" 18 | given-names: "Martin" 19 | orcid: "https://orcid.org/0000-0002-5946-821X" 20 | - family-names: "Jessica" 21 | given-names: "Cox" 22 | orcid: "https://orcid.org/0000-0002-1109-1679" 23 | - family-names: "Iolo" 24 | given-names: "Squires" 25 | orcid: "https://orcid.org/0000-0002-3424-6354" -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![python-tests](https://github.com/haniffalab/webatlas-pipeline/actions/workflows/tests-python.yml/badge.svg)](https://github.com/haniffalab/webatlas-pipeline/actions/workflows/tests-python.yml) 2 | [![codecov](https://codecov.io/gh/haniffalab/webatlas-pipeline/branch/main/graph/badge.svg?token=7HQVFH08WJ)](https://app.codecov.io/gh/haniffalab/webatlas-pipeline) 3 | 4 | # WebAtlas Pipeline 5 | 6 | [![docs](https://img.shields.io/badge/Documentation-online-blue)](https://haniffalab.github.io/webatlas-pipeline) 7 | [![demo](https://img.shields.io/badge/Demos-view-blue)](https://cellatlas.io/webatlas) 8 | [![doi](https://zenodo.org/badge/DOI/10.5281/zenodo.7405818.svg)](https://doi.org/10.5281/zenodo.7405818) 9 | 10 | The WebAtlas Pipeline uses Nextflow to process spatial and single-cell experiment data for visualisation in a web browser. 11 | 12 | ## Usage 13 | 14 | The pipeline can handle data from `h5ad` files, `tif` image files, SpaceRanger, Xenium and MERSCOPE output. It can also generate label image files from data files. 15 | 16 | Running the pipeline requires a parameters file that defines configuration options and the data to be processed. 17 | Full instructions and parameters definitions for this files are available in the [documentation](https://haniffalab.com/webatlas-pipeline/configuration.html) 18 | 19 | A parameters file looks like: 20 | 21 | ```yaml 22 | outdir: "/path/to/output/" 23 | 24 | args: 25 | h5ad: 26 | compute_embeddings: "True" 27 | 28 | projects: 29 | - project: project_1 30 | datasets: 31 | - dataset: dataset_1 32 | data: 33 | - data_type: h5ad 34 | data_path: /path/to/project_1/dataset_1/anndata.h5ad 35 | - data_type: raw_image 36 | data_path: /path/to/project_1/dataset_1/raw_image.tif 37 | - data_type: label_image 38 | data_path: /path/to/project_1/dataset_1/label_image.tif 39 | 40 | vitessce_options: 41 | spatial: 42 | xy: "obsm/spatial" 43 | mappings: 44 | obsm/X_umap: [0, 1] 45 | layout: "simple" 46 | ``` 47 | 48 | The pipeline can then be run like: 49 | 50 | ```sh 51 | nextflow run main.nf -params-file /path/to/run-params.yaml -entry Full_pipeline 52 | ``` 53 | 54 | Parameters file templates are available in the `templates` directory. 55 | -------------------------------------------------------------------------------- /bin/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os, sys 3 | 4 | sys.path.append(os.path.dirname(os.path.realpath(__file__))) 5 | -------------------------------------------------------------------------------- /bin/consolidate_md.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | consolidate_md.py 4 | ==================================== 5 | Consolidates Zarr metadata 6 | """ 7 | 8 | import logging 9 | import fire 10 | from zarr import consolidate_metadata 11 | from pathlib import Path 12 | 13 | 14 | def consolidate(file_in: str) -> None: 15 | """Function to consolidate the metadata of a Zarr file 16 | 17 | Args: 18 | file_in (str): Path to Zarr file 19 | """ 20 | log = logging.getLogger() 21 | log.setLevel(logging.INFO) 22 | stem = Path(file_in).stem 23 | logging.info(stem) 24 | consolidated = consolidate_metadata(file_in) 25 | logging.info(consolidated.info) 26 | 27 | 28 | if __name__ == "__main__": 29 | fire.Fire(consolidate) 30 | -------------------------------------------------------------------------------- /bin/constants/constants.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from vitessce import ( 3 | DataType as dt, 4 | FileType as ft, 5 | Component as cm, 6 | CoordinationType as ct, 7 | ) 8 | 9 | SINGLE_ZARR = "anndata.zarr" 10 | OBS = "obs" 11 | 12 | # @TODO: remove deprecated and unused data types 13 | # Data types with ordered file types 14 | DATA_TYPES = { 15 | OBS: [ 16 | (SINGLE_ZARR, ft.ANNDATA_ZARR), 17 | ], 18 | # dt.OBS_EMBEDDING: [ 19 | # ("obsEmbedding.anndata.zarr", ft.OBS_EMBEDDING_ANNDATA_ZARR), 20 | # ("obsEmbedding.csv", ft.OBS_EMBEDDING_CSV), 21 | # ], 22 | # dt.OBS_LABELS: [ 23 | # ("obsLabels.anndata.zarr", ft.OBS_LABELS_ANNDATA_ZARR), 24 | # ("obsLabels.csv", ft.OBS_LABELS_CSV), 25 | # ], 26 | # dt.OBS_LOCATIONS: [ 27 | # ("obsLocations.anndata.zarr", ft.OBS_LOCATIONS_ANNDATA_ZARR), 28 | # ("obsLocations.csv", ft.OBS_LOCATIONS_CSV), 29 | # ], 30 | dt.MOLECULES: [ 31 | ("molecules.json", ft.MOLECULES_JSON), 32 | ], 33 | # dt.OBS_SETS: [ 34 | # ("cell-sets.json", ft.CELL_SETS_JSON), 35 | # ("obsSets.anndata.zarr", ft.OBS_SETS_ANNDATA_ZARR), 36 | # ("anndata-cell-sets.zarr", ft.ANNDATA_CELL_SETS_ZARR), 37 | # (SINGLE_ZARR, ft.OBS_SETS_ANNDATA_ZARR), 38 | # ], 39 | dt.RASTER: [ 40 | ("raster.ome-zarr", "raster.ome-zarr"), 41 | ("raster.json", ft.RASTER_JSON), 42 | ], 43 | # dt.OBS_FEATURE_MATRIX: [ 44 | # ("obsFeatureMatrix.anndata.zarr", ft.OBS_FEATURE_MATRIX_ANNDATA_ZARR), 45 | # ("expression-matrix.zarr", ft.EXPRESSION_MATRIX_ZARR), 46 | # ("anndata-expression-matrix.zarr", ft.ANNDATA_EXPRESSION_MATRIX_ZARR), 47 | # ("clusters.json", ft.CLUSTERS_JSON), 48 | # ("genes.json", ft.GENES_JSON), 49 | # (SINGLE_ZARR, ft.OBS_FEATURE_MATRIX_ANNDATA_ZARR), 50 | # ], 51 | dt.NEIGHBORHOODS: [ 52 | ("neighborhoods.json", ft.NEIGHBORHOODS_JSON), 53 | ], 54 | dt.GENOMIC_PROFILES: [ 55 | ("genomic-profiles.zarr", ft.GENOMIC_PROFILES_ZARR), 56 | ], 57 | } 58 | 59 | DEFAULT_OPTIONS = { 60 | ft.ANNDATA_ZARR: { 61 | "mappings": { 62 | "obsm/X_umap": [0, 1], 63 | }, 64 | "factors": [ 65 | "obs/sample", 66 | ], 67 | "spatial": { 68 | "xy": "obsm/spatial", 69 | }, 70 | "sets": ["obs/sample"], 71 | "matrix": "X", 72 | } 73 | } 74 | 75 | 76 | def hconcat(*cms): 77 | return "({})".format(("|").join(cms)) 78 | 79 | 80 | def vconcat(*cms): 81 | return "({})".format(("/").join(cms)) 82 | 83 | 84 | DEFAULT_LAYOUTS = { 85 | "minimal": hconcat(cm.SPATIAL.value, cm.LAYER_CONTROLLER.value), 86 | "simple": hconcat( 87 | cm.SPATIAL.value, 88 | hconcat( 89 | cm.LAYER_CONTROLLER.value, vconcat(cm.FEATURE_LIST.value, cm.OBS_SETS.value) 90 | ), 91 | ), 92 | "advanced": hconcat( 93 | cm.LAYER_CONTROLLER.value, 94 | cm.SPATIAL.value, 95 | hconcat( 96 | vconcat(cm.SCATTERPLOT.value, cm.OBS_SETS.value), 97 | cm.FEATURE_LIST.value, 98 | ), 99 | cm.GENOMIC_PROFILES.value, 100 | ), 101 | } 102 | 103 | # Coordination Types required by Components/Views 104 | COMPONENTS_COORDINATION_TYPES = {cm.SCATTERPLOT: [ct.EMBEDDING_TYPE]} 105 | 106 | # Data Types required by Components/Views 107 | COMPONENTS_DATA_TYPES = { 108 | cm.SCATTERPLOT: set([dt.OBS_EMBEDDING]), 109 | cm.HEATMAP: set([dt.OBS_FEATURE_MATRIX]), 110 | cm.SPATIAL: set([dt.RASTER, dt.OBS_LOCATIONS, dt.MOLECULES]), 111 | cm.LAYER_CONTROLLER: set([dt.RASTER, dt.OBS_LOCATIONS, dt.MOLECULES]), 112 | cm.GENOMIC_PROFILES: set([dt.GENOMIC_PROFILES]), 113 | cm.FEATURE_LIST: set([dt.OBS_FEATURE_MATRIX]), 114 | cm.OBS_SETS: set([dt.OBS_SETS]), 115 | cm.OBS_SET_SIZES: set([dt.OBS_SETS]), 116 | cm.OBS_SET_FEATURE_VALUE_DISTRIBUTION: set([dt.OBS_FEATURE_MATRIX]), 117 | } 118 | -------------------------------------------------------------------------------- /bin/constants/suffixes.py: -------------------------------------------------------------------------------- 1 | ANNDATA_ZARR_SUFFIX = "anndata.zarr" 2 | MOLECULES_JSON_SUFFIX = "molecules.json" 3 | -------------------------------------------------------------------------------- /bin/generate_image.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | generate_image.py 4 | ==================================== 5 | Generates raw/label images from spatial data 6 | """ 7 | 8 | from __future__ import annotations 9 | import fire 10 | import typing as T 11 | import tifffile as tf 12 | from process_spaceranger import visium_label 13 | from process_xenium import xenium_label 14 | from process_merscope import merscope_label, merscope_raw 15 | 16 | 17 | def create_img( 18 | stem: str, 19 | img_type: str, 20 | file_type: str, 21 | file_path: str, 22 | ref_img: str = None, 23 | args: dict[str, T.Any] = {}, 24 | ) -> None: 25 | """This function calls the corresponding function 26 | to write a label image given the metadata provided. 27 | It also obtains the image shape of a reference image if specified. 28 | 29 | Args: 30 | stem (str): Prefix for the output image filename. 31 | file_type (str): Type of file containing the metadata from which to 32 | generate the label image. 33 | file_path (str): Path to the metadata file. 34 | ref_img (str, optional): Path to reference image from which to get the 35 | shape for the label image. Defaults to None. 36 | args (dict[str,T.Any], optional): Args to be passed to the appropriate processing function. 37 | Defaults to {}. 38 | """ 39 | 40 | if ref_img: 41 | tif_img = tf.TiffFile(ref_img) 42 | args["shape"] = tif_img.pages[0].shape[:2] 43 | 44 | if img_type == "label": 45 | if file_type == "visium": 46 | visium_label(stem, file_path, **args) 47 | elif file_type == "merscope": 48 | merscope_label(stem, file_path, **args) 49 | elif file_type == "xenium": 50 | xenium_label(stem, file_path, **args) 51 | elif img_type == "raw": 52 | if file_type == "merscope": 53 | merscope_raw(stem, file_path, **args) 54 | 55 | 56 | if __name__ == "__main__": 57 | fire.Fire(create_img) 58 | -------------------------------------------------------------------------------- /bin/integrate_image.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import fire 4 | import os 5 | import tifffile as tf 6 | import numpy as np 7 | import logging 8 | from ome_zarr.reader import Reader 9 | from ome_zarr.io import parse_url 10 | import shutil 11 | from ome_zarr.writer import write_multiscale 12 | import zarr 13 | 14 | 15 | def add_offset(label, offset: int): 16 | max_id = np.max(label) 17 | mask = label != 0 18 | reindexed_label = (label + offset) * mask 19 | reindexed_max_id = np.max(reindexed_label) 20 | logging.info(max_id, reindexed_max_id) 21 | return reindexed_label 22 | 23 | 24 | def reindex_label(label_image: str, offset: int, out_filename: str) -> None: 25 | label = tf.imread(label_image).astype(np.int32) 26 | reindexed_label = add_offset(label, offset) 27 | tf.imwrite(out_filename, reindexed_label) 28 | 29 | 30 | def reindex_label_zarr(label_image_path: str, offset: int, out_filename: str) -> None: 31 | binary_path = ( 32 | label_image_path 33 | if label_image_path.endswith("/0") 34 | else os.path.join(label_image_path, "0") 35 | ) 36 | reader = Reader(parse_url(binary_path)) 37 | nodes = list(reader()) 38 | labels = nodes[0].data 39 | reindexed_labels = [add_offset(x, offset) for x in labels] 40 | os.makedirs(f"{out_filename}/OME", exist_ok=True) 41 | store = parse_url(out_filename, mode="w").store 42 | tmp_group = zarr.group(store=store) 43 | write_multiscale(reindexed_labels, tmp_group, compute=True) 44 | zarr.consolidate_metadata(out_filename) 45 | shutil.copy( 46 | label_image_path + "/OME/METADATA.ome.xml", 47 | f"{out_filename}/OME/METADATA.ome.xml", 48 | ) 49 | 50 | 51 | def process_image(label_image_path: str, **kwargs) -> None: 52 | ext = os.path.splitext(label_image_path)[-1] 53 | if ext.lower() in [".tif", ".tiff"]: 54 | reindex_label(label_image_path, **kwargs) 55 | elif ext.lower() in [".zarr"]: 56 | reindex_label_zarr(label_image_path, **kwargs) 57 | return 58 | 59 | 60 | if __name__ == "__main__": 61 | fire.Fire(process_image) 62 | -------------------------------------------------------------------------------- /bin/ome_zarr_metadata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | ome_zarr_metadata.py 4 | ==================================== 5 | Gets OME XML basic metadata 6 | """ 7 | 8 | import json 9 | import fire 10 | from xml.etree import ElementTree as ET 11 | 12 | 13 | def get_metadata(xml_path: str) -> str: 14 | """Function that parses an OME XML file 15 | and dumps basic metadata as a JSON formatted str 16 | 17 | Args: 18 | xml_path (str): Path to OME XML file 19 | 20 | Returns: 21 | str: JSON formatted metadata 22 | """ 23 | NS = {"ome": "http://www.openmicroscopy.org/Schemas/OME/2016-06"} 24 | 25 | ome_metadata = ET.parse(xml_path) 26 | dimOrder = ome_metadata.find("./*/ome:Pixels", NS).attrib["DimensionOrder"] 27 | X = ome_metadata.find("./*/ome:Pixels", NS).attrib["SizeX"] 28 | Y = ome_metadata.find("./*/ome:Pixels", NS).attrib["SizeY"] 29 | Z = ome_metadata.find("./*/ome:Pixels", NS).attrib["SizeZ"] 30 | C = ome_metadata.find("./*/ome:Pixels", NS).attrib["SizeC"] 31 | T = ome_metadata.find("./*/ome:Pixels", NS).attrib["SizeT"] 32 | 33 | channel_names = [ 34 | channel.attrib["Name"] 35 | for channel in ome_metadata.findall("./**/ome:Channel", NS) 36 | if "Name" in channel.attrib 37 | ] 38 | 39 | md = { 40 | "dimOrder": dimOrder, 41 | "channel_names": channel_names, 42 | "X": X, 43 | "Y": Y, 44 | "Z": Z, 45 | "C": C, 46 | "T": T, 47 | } 48 | return json.dumps(md) 49 | 50 | 51 | if __name__ == "__main__": 52 | fire.Fire(get_metadata) 53 | -------------------------------------------------------------------------------- /bin/process_molecules.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | process_molecules.py 4 | ==================================== 5 | Processes molecules files 6 | """ 7 | 8 | import os 9 | import csv 10 | import json 11 | import fire 12 | from constants.suffixes import MOLECULES_JSON_SUFFIX 13 | 14 | 15 | def tsv_to_json( 16 | path: str, 17 | stem: str, 18 | has_header: bool = True, 19 | gene_col_name: str = "Name", 20 | x_col_name: str = "x_int", 21 | y_col_name: str = "y_int", 22 | delimiter: str = "\t", 23 | x_scale: float = 1.0, 24 | y_scale: float = 1.0, 25 | x_offset: float = 0.0, 26 | y_offset: float = 0.0, 27 | gene_col_idx: int = None, 28 | x_col_idx: int = None, 29 | y_col_idx: int = None, 30 | filter_col_name: str = None, 31 | filter_col_idx: int = None, 32 | filter_col_value: str = None, 33 | ) -> str: 34 | """This function loads a TSV/CSV file containing gene names, X and Y coordinates 35 | and writes them to a JSON file supported by Vitessce 36 | 37 | Args: 38 | path (str): Path to tsv/csv file 39 | stem (str): Prefix for output JSON file 40 | has_header (bool, optional): If input file contains a header row. Defaults to True. 41 | gene_col_name (str, optional): Column header name where gene names are stored. Defaults to "Name". 42 | x_col_name (str, optional): Column header name where `X` coordinates are stored. Defaults to "x_int". 43 | y_col_name (str, optional): Column header name where `Y` coordinates are stored. Defaults to "y_int". 44 | delimiter (str, optional): Input file delimiter. Defaults to "\t". 45 | x_scale (float, optional): Scale to multiply `X` coordinates by. Defaults to 1.0. 46 | y_scale (float, optional): Scale to multiply `Y` coordinates by. Defaults to 1.0. 47 | x_offset (float, optional): Offset to add to `X` coordinates. Defaults to 0.0. 48 | y_offset (float, optional): Offset to add to `Y` coordinates. Defaults to 0.0. 49 | gene_col_idx (int, optional): Column index where gene names are stored if header is not present. Defaults to None. 50 | x_col_idx (int, optional): Column index where `X` coordinates are stored if header is not present. Defaults to None. 51 | y_col_idx (int, optional): Column index where `Y` coordinates are stored if header is not present. Defaults to None. 52 | filter_col_name (str, optional): Column header name storing values to filter data. Defaults to None. 53 | filter_col_idx (int, optional): Column index storing values to filter data if header is not present. Defaults to None. 54 | filter_col_value (str, optional): Value expected in filter column. 55 | If a row has a different value it will not be written to output file. Defaults to None. 56 | 57 | Raises: 58 | SystemExit: If any column header name is not in the header row. 59 | e: If coordinate values cannot be parsed to float 60 | 61 | Returns: 62 | str: Output JSON filename 63 | """ 64 | 65 | with open(path) as f: 66 | reader = csv.reader(f, delimiter=delimiter) 67 | 68 | if has_header: 69 | header = next(reader, None) 70 | try: 71 | if gene_col_idx is None: 72 | gene_col_idx = header.index(gene_col_name) 73 | if x_col_idx is None or y_col_idx is None: 74 | x_col_idx = header.index(x_col_name) 75 | y_col_idx = header.index(y_col_name) 76 | if filter_col_name is not None: 77 | filter_col_idx = header.index(filter_col_name) 78 | except ValueError as e: 79 | raise SystemExit( 80 | f"Column name(s), ({gene_col_name}, {x_col_name}, {y_col_name}) not in header" 81 | ) 82 | 83 | molecules_json = {} 84 | for row in reader: 85 | try: 86 | if filter_col_idx is not None: 87 | if row[filter_col_idx] != filter_col_value: 88 | continue 89 | gene = row[gene_col_idx] 90 | molecules_json.setdefault(gene, []).append( 91 | [ 92 | (float(row[x_col_idx]) * x_scale) + x_offset, 93 | (float(row[y_col_idx]) * y_scale) + y_offset, 94 | ] 95 | ) 96 | except ValueError as e: 97 | # TODO: add message 98 | raise e 99 | 100 | json_file = f"{stem}-{MOLECULES_JSON_SUFFIX}" 101 | json_file = ( 102 | f"{stem}-{MOLECULES_JSON_SUFFIX}" 103 | if not stem.endswith("-" + os.path.splitext(MOLECULES_JSON_SUFFIX)[0]) 104 | else f"{stem}{os.path.splitext(MOLECULES_JSON_SUFFIX)[1]}" 105 | ) 106 | 107 | with open(json_file, "w") as out_file: 108 | json.dump(molecules_json, out_file) 109 | 110 | return json_file 111 | 112 | 113 | if __name__ == "__main__": 114 | fire.Fire(tsv_to_json) 115 | -------------------------------------------------------------------------------- /bin/process_xenium.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | process_xenium.py 4 | ==================================== 5 | Processes Xenium output 6 | """ 7 | 8 | from __future__ import annotations 9 | import os 10 | import fire 11 | import json 12 | import zarr 13 | import numpy as np 14 | import scanpy as sc 15 | import pandas as pd 16 | import tifffile as tf 17 | from pathlib import Path 18 | from skimage.draw import polygon 19 | from process_h5ad import h5ad_to_zarr 20 | 21 | 22 | def xenium_to_anndata( 23 | path: str, 24 | spatial_as_pixel: bool = True, 25 | resolution: float = 0.2125, 26 | load_clusters: bool = True, 27 | load_embeddings: bool = True, 28 | ) -> sc.AnnData: 29 | """Function to create an AnnData object from Xenium output. 30 | 31 | Args: 32 | path (str): Path to a xenium output directory 33 | spatial_as_pixel (bool, optional): Boolean indicating whether spatial coordinates should be 34 | converted to pixels. Defaults to True. 35 | resolution (float, optional): Pixel resolution. Defaults to 0.2125. 36 | load_clusters (bool, optional): If cluster files should be included in the 37 | AnnData object. Defaults to True. 38 | load_embeddings (bool, optional): If embedding coordinates files should be included 39 | in the AnnData object. Defaults to True. 40 | 41 | Returns: 42 | AnnData: AnnData object created from the xenium output data 43 | """ 44 | 45 | path = Path(path) 46 | 47 | matrix_file = os.path.join(path, "cell_feature_matrix.h5") 48 | cells_file = os.path.join(path, "cells.csv.gz") 49 | 50 | adata = sc.read_10x_h5(matrix_file) 51 | 52 | with open(os.path.join(path, "experiment.xenium"), "rt") as f: 53 | adata.uns["xenium"] = json.load(f) 54 | 55 | adata.obs = pd.read_csv(cells_file, compression="gzip", index_col="cell_id") 56 | 57 | adata.obsm["X_spatial"] = adata.obs[["x_centroid", "y_centroid"]].to_numpy() 58 | 59 | if spatial_as_pixel: 60 | adata.obsm["X_spatial"] = adata.obsm["X_spatial"] / resolution 61 | 62 | if load_clusters: 63 | for cluster in [ 64 | d for d in (path / "analysis" / "clustering").iterdir() if d.is_dir() 65 | ]: 66 | cluster_name = cluster.name.replace("gene_expression_", "") 67 | cluster_df = pd.read_csv( 68 | cluster / "clusters.csv", 69 | index_col="Barcode", 70 | ) 71 | 72 | clusters = cluster_df.reindex(adata.obs.index, fill_value="Undefined") 73 | adata.obs[cluster_name] = pd.Categorical(clusters["Cluster"].astype(str)) 74 | 75 | if load_embeddings: 76 | embeddings = [ 77 | ("umap", "2_components"), 78 | ("pca", "10_components"), 79 | ] 80 | for embedding, components in embeddings: 81 | components_name = ( 82 | components 83 | if (path / "analysis" / embedding / components).exists() 84 | else f"gene_expression_{components}" 85 | ) 86 | embedding_df = pd.read_csv( 87 | os.path.join( 88 | path / "analysis" / embedding / components_name / "projection.csv" 89 | ), 90 | index_col="Barcode", 91 | ) 92 | 93 | emb = embedding_df.reindex(adata.obs.index, fill_value=0) 94 | adata.obsm[f"X_{embedding}"] = emb.values 95 | 96 | # starting on v1.3 cell_id looks like "aaabinlp-1" 97 | # pd.Categorical.codes converts them to int this is done manually at this step 98 | # instead of reindex_anndata so we control what matches the label image 99 | adata.obs = adata.obs.reset_index() 100 | adata.obs.index = (pd.Categorical(adata.obs["cell_id"]).codes + 1).astype(str) 101 | 102 | return adata 103 | 104 | 105 | def xenium_to_zarr( 106 | path: str, 107 | stem: str, 108 | spatial_as_pixel: bool = True, 109 | resolution: float = 0.2125, 110 | save_h5ad: bool = False, 111 | **kwargs, 112 | ) -> str: 113 | """Function to write to Zarr an AnnData object created from xenium output data 114 | 115 | Args: 116 | path (str): Path to a xenium output directory 117 | stem (str): Prefix for the output Zarr filename 118 | spatial_as_pixel (bool, optional): Boolean indicating whether spatial coordinates should be 119 | converted to pixels. Defaults to True. 120 | resolution (float, optional): Pixel resolution. Defaults to 0.2125. 121 | save_h5ad (bool, optional): If the AnnData object should also be written to an h5ad file. Defaults to False. 122 | 123 | Returns: 124 | str: Output Zarr filename 125 | """ 126 | 127 | adata = xenium_to_anndata(path, spatial_as_pixel, resolution) 128 | if save_h5ad: 129 | adata.write_h5ad(f"tmp-{stem}.h5ad") 130 | zarr_file = h5ad_to_zarr(adata=adata, stem=stem, **kwargs) 131 | 132 | return zarr_file 133 | 134 | 135 | def xenium_label( 136 | stem: str, path: str, shape: tuple[int, int], resolution: float = 0.2125 137 | ) -> None: 138 | """This function writes a label image tif file with drawn labels according to 139 | cell segmentation polygons from Xenium output cells.zarr.zip file 140 | 141 | Args: 142 | stem (str): Prefix for the output image filename. 143 | path (str): Path to the Xenium output directory or cells.zarr.zip file 144 | shape (tuple[int, int]): Output image shape. Defaults to None. 145 | resolution (float, optional): Pixel resolution. Defaults to 0.2125. 146 | """ 147 | if os.path.isdir(path): 148 | cells_file = os.path.join(path, "cells.zarr.zip") 149 | else: 150 | cells_file = path 151 | 152 | z = zarr.open(cells_file, "r") 153 | 154 | with open(os.path.join(path, "experiment.xenium")) as f: 155 | experiment = json.load(f) 156 | sw_version = float(experiment["analysis_sw_version"][7:10]) 157 | 158 | if sw_version < 1.3: 159 | ids = z["cell_id"] 160 | else: 161 | ids = z["cell_id"][:, 0] 162 | 163 | # starting on v1.3 cell_id looks like "aaabinlp-1" 164 | # pd.Categorical.codes converts them to int 165 | # this is required so the label image matches the h5ad ids 166 | ids = pd.Categorical(ids).codes + 1 167 | 168 | # starting on v2.0 vertices change location 169 | if sw_version < 2.0: 170 | pols = z["polygon_vertices"][1] 171 | else: 172 | pols = z["polygon_sets"][1]["vertices"] 173 | 174 | label_img = np.zeros((shape[0], shape[1]), dtype=np.min_scalar_type(max(ids))) 175 | 176 | for id, pol in zip(ids, pols): 177 | pol = pol / resolution 178 | pol = np.array(list(map(list, pol.reshape(pol.shape[0] // 2, 2)))) 179 | rr, cc = polygon(pol[:, 1], pol[:, 0]) 180 | label_img[rr - 1, cc - 1] = int(id) 181 | 182 | tf.imwrite(f"{stem}-label.tif", label_img) 183 | 184 | return 185 | 186 | 187 | if __name__ == "__main__": 188 | fire.Fire(xenium_to_zarr) 189 | -------------------------------------------------------------------------------- /bin/router.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | router.py 4 | ==================================== 5 | Calls file processing functions 6 | """ 7 | 8 | from __future__ import annotations 9 | import fire 10 | import typing as T 11 | 12 | from process_h5ad import h5ad_to_zarr 13 | from process_molecules import tsv_to_json 14 | from process_spaceranger import spaceranger_to_zarr 15 | from process_merscope import merscope_to_zarr 16 | from process_xenium import xenium_to_zarr 17 | 18 | 19 | def process(file_type: str, path: str, stem: str, args: dict[str, T.Any] = {}) -> str: 20 | """Function that calls the appropriate processing function 21 | for the input file according to its type 22 | 23 | Args: 24 | file_type (str): Type of file to process 25 | path (str): Path to file to process 26 | stem (str): Prefix for output files 27 | args (dict[str,T.Any], optional): Args to be passed to the appropriate processing function. 28 | Defaults to {}. 29 | 30 | Returns: 31 | str: Output filename 32 | """ 33 | out_file = None 34 | 35 | func_dict = { 36 | "spaceranger": spaceranger_to_zarr, 37 | "xenium": xenium_to_zarr, 38 | "merscope": merscope_to_zarr, 39 | "h5ad": h5ad_to_zarr, 40 | "molecules": tsv_to_json, 41 | } 42 | 43 | out_file = func_dict[file_type](path=path, stem=stem, **args) 44 | 45 | return out_file 46 | 47 | 48 | if __name__ == "__main__": 49 | fire.Fire(process) 50 | -------------------------------------------------------------------------------- /bin/write_spatialdata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | write_spatialdata.py 4 | ==================================== 5 | Processes H5AD and images into SpatialData 6 | """ 7 | 8 | from __future__ import annotations 9 | from typing import Union 10 | import logging 11 | import warnings 12 | import fire 13 | import tifffile as tf 14 | import anndata as ad 15 | import xarray as xr 16 | import spatialdata as sd 17 | from dask_image.imread import imread 18 | 19 | warnings.filterwarnings("ignore") 20 | logging.getLogger().setLevel(logging.INFO) 21 | 22 | 23 | def read_image(path: str, is_label: bool = False): 24 | tif = tf.TiffFile(path) 25 | dims = list(tif.series[0].axes.lower() 26 | .replace("s", "c") 27 | .replace("i", "c") 28 | ) 29 | image = imread(path).squeeze() 30 | imarray = xr.DataArray(image, dims=dims).chunk(chunks="auto") 31 | if is_label: 32 | return sd.models.Labels2DModel.parse(imarray) 33 | else: 34 | return sd.models.Image2DModel.parse(imarray) 35 | 36 | 37 | def write_spatialdata( 38 | anndata_path: str, 39 | stem: str = "", 40 | raw_img_path: Union[str, list[str]] = [], 41 | label_img_path: Union[str, list[str]] = [], 42 | ) -> str: 43 | """This function takes an AnnData object and image files to 44 | write to a SpatialData objetc 45 | 46 | Args: 47 | path (str): Path to the h5ad file. 48 | stem (str, optional): Prefix for the output file. Defaults to "". 49 | raw_img_path (str, optional): Raw image to process. Defaults to None. 50 | label_img_path (str, optional): Label image to process. Defaults to None. 51 | 52 | Returns: 53 | str: Output SpatialData filename 54 | """ 55 | if anndata_path.endswith(".h5ad"): 56 | adata = ad.read(anndata_path, backed=True) 57 | elif anndata_path.endswith(".zarr"): 58 | adata = ad.read_zarr(anndata_path) 59 | else: 60 | raise SystemError("Path to AnnData not .h5ad nor .zarr") 61 | 62 | OBS_IDX_NAME = "webatlas_index" 63 | adata.obs = adata.obs.reset_index(names=OBS_IDX_NAME).set_index( 64 | OBS_IDX_NAME, drop=False 65 | ) # have index as both index and column 66 | adata.obs[OBS_IDX_NAME] = adata.obs[OBS_IDX_NAME].astype(int) 67 | 68 | # ensure library_id in obs 69 | if "library_id" not in adata.obs: 70 | adata.obs["library_id"] = 0 71 | adata.obs["library_id"] = adata.obs["library_id"].astype("category") 72 | 73 | region_key = "library_id" 74 | region = adata.obs["library_id"].cat.categories.to_list() 75 | 76 | # `region_key`, `region` and `instance_key` are supposed to be optional 77 | # but enforced by code 78 | sd.models.TableModel.parse( 79 | adata, region_key=region_key, region=region, instance_key=OBS_IDX_NAME 80 | ) 81 | 82 | sdata = sd.SpatialData(table=adata) 83 | 84 | if isinstance(raw_img_path, str): 85 | raw_img_path = [raw_img_path] 86 | for raw_img in raw_img_path: 87 | sdata.add_image("raw", read_image(raw_img)) 88 | 89 | if isinstance(label_img_path, str): 90 | label_img_path = [label_img_path] 91 | for label_img in label_img_path: 92 | sdata.add_labels("label", read_image(label_img, is_label=True)) 93 | 94 | zarr_file = f"{stem}-spatialdata.zarr" 95 | sdata.write(zarr_file) 96 | 97 | return zarr_file 98 | 99 | 100 | if __name__ == "__main__": 101 | fire.Fire(write_spatialdata) 102 | -------------------------------------------------------------------------------- /docs/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: b0bb638516d5929555228a8219f6cc18 4 | tags: 645f666f9bcd5a90fca523b33c5a78b7 5 | -------------------------------------------------------------------------------- /docs/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/.nojekyll -------------------------------------------------------------------------------- /docs/_sources/citing.rst.txt: -------------------------------------------------------------------------------- 1 | .. _citing: 2 | 3 | 4 | Citation 5 | ======== 6 | 7 | |DOI| 8 | 9 | .. |DOI| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.7405818.svg 10 | :target: https://doi.org/10.5281/zenodo.7405818 11 | 12 | If you use this software in a scientific publication, please cite using the following Zenodo reference. 13 | 14 | **Li, Tong, Horsfall, David, Basurto-Lozada, Daniela, Prete, Martin, Jessica, Cox, & Iolo, Squires. (2023). WebAtlas Pipeline (v0.4.0). Zenodo. https://doi.org/10.5281/zenodo.7863308** 15 | -------------------------------------------------------------------------------- /docs/_sources/examples/visium.rst.txt: -------------------------------------------------------------------------------- 1 | .. _example_visium: 2 | 3 | Visium 4 | ====== 5 | 6 | Sample details 7 | ************** 8 | 9 | .. list-table:: 10 | :widths: 25 75 11 | :header-rows: 0 12 | 13 | * - Study Name 14 | - `High resolution mapping of the breast cancer tumor microenvironment using integrated single cell, spatial and in situ analysis of FFPE tissue `__ 15 | * - WebAtlas 16 | - `Demo `__ 17 | * - Tissue 18 | - Human breast cancer 19 | * - Data Source Link 20 | - `CytAssist_FFPE_Human_Breast_Cancer `__ 21 | 22 | Steps to reproduce 23 | ****************** 24 | 25 | Follow the steps below to reproduce this sample in the pipeline, and visualise the data yourself 26 | in your web browser. It can be followed on any POSIX compatible system (Linux, OS X, etc). This 27 | example requires you to have already :ref:`setup your environment first `. 28 | 29 | **#1. Download the sample data** 30 | 31 | .. code-block:: shell 32 | :caption: Input 33 | 34 | mkdir -p input/CytAssist_FFPE_Human_Breast_Cancer 35 | wget https://cf.10xgenomics.com/samples/spatial-exp/2.0.0/CytAssist_FFPE_Human_Breast_Cancer/CytAssist_FFPE_Human_Breast_Cancer_tissue_image.tif -O input/CytAssist_FFPE_Human_Breast_Cancer/tissue_image.tif 36 | wget https://cf.10xgenomics.com/samples/spatial-exp/2.0.0/CytAssist_FFPE_Human_Breast_Cancer/CytAssist_FFPE_Human_Breast_Cancer_analysis.tar.gz -O input/CytAssist_FFPE_Human_Breast_Cancer/analysis.tar.gz 37 | wget https://cf.10xgenomics.com/samples/spatial-exp/2.0.0/CytAssist_FFPE_Human_Breast_Cancer/CytAssist_FFPE_Human_Breast_Cancer_filtered_feature_bc_matrix.h5 -O input/CytAssist_FFPE_Human_Breast_Cancer/filtered_feature_bc_matrix.h5 38 | wget https://cf.10xgenomics.com/samples/spatial-exp/2.0.0/CytAssist_FFPE_Human_Breast_Cancer/CytAssist_FFPE_Human_Breast_Cancer_spatial.tar.gz -O input/CytAssist_FFPE_Human_Breast_Cancer/spatial.tar.gz 39 | 40 | .. code-block:: shell 41 | :caption: Output 42 | 43 | --2023-05-17 21:37:57-- https://cf.10xgenomics.com/samples/spatial-exp/2.0.0/CytAssist_FFPE_Human_Breast_Cancer/CytAssist_FFPE_Human_Breast_Cancer_spatial.tar.gz -O ./input/CytAssist_FFPE_Human_Breast_Cancer/spatial.tar.gz 44 | Resolving cf.10xgenomics.com (cf.10xgenomics.com)... 104.18.0.173, 104.18.1.173, 2606:4700::6812:ad, ... 45 | Connecting to cf.10xgenomics.com (cf.10xgenomics.com)|104.18.0.173|:443... connected. 46 | HTTP request sent, awaiting response... 200 OK 47 | Length: 34479952 (33M) [application/x-tar] 48 | Saving to: ‘input/CytAssist_FFPE_Human_Breast_Cancer/spatial.tar.gz 49 | 50 | ./input/CytAssist_FFPE_Human_Breas 100%[================================================================>] 32.88M --.-KB/s in 0s 51 | 52 | 2023-05-17 21:37:58 (7.16 MB/s) - ‘input/CytAssist_FFPE_Human_Breast_Cancer/spatial.tar.gz’ saved [34479952/34479952] 53 | 54 | **#2. Extract and process sample data** 55 | 56 | .. code-block:: shell 57 | :caption: Input 58 | 59 | tar -xzvf input/CytAssist_FFPE_Human_Breast_Cancer/analysis.tar.gz -C input/CytAssist_FFPE_Human_Breast_Cancer 60 | tar -xzvf input/CytAssist_FFPE_Human_Breast_Cancer/spatial.tar.gz -C input/CytAssist_FFPE_Human_Breast_Cancer 61 | 62 | .. code-block:: shell 63 | :caption: Output 64 | 65 | analysis/ 66 | analysis/umap/ 67 | analysis/umap/gene_expression_2_components/ 68 | analysis/umap/gene_expression_2_components/projection.csv 69 | ... 70 | spatial/scalefactors_json.json 71 | spatial/aligned_fiducials.jpg 72 | spatial/tissue_hires_image.png 73 | 74 | **#3. Run the pipeline** 75 | 76 | .. code-block:: shell 77 | :caption: Input 78 | 79 | nextflow run main.nf \ 80 | -params-file templates/examples/CytAssist_FFPE_Human_Breast_Cancer.yaml \ 81 | -entry Full_pipeline 82 | 83 | .. code-block:: shell 84 | :caption: Output 85 | 86 | N E X T F L O W ~ version 22.04.5 87 | Launching `main.nf` [insane_dijkstra] DSL2 - revision: 1b6a73f4d6 88 | [05/d2276b] process > Full_pipeline:Process_files:route_file (spaceranger, CytAssist_FFPE_Human_Breast_Cancer) [100%] 1 of 1 ✔ 89 | [0c/3ffdac] process > Full_pipeline:Process_images:Generate_image ([visium, breast-cancer], label, CytAssist_... [100%] 1 of 1 ✔ 90 | [f1/efaaae] process > Full_pipeline:Process_images:image_to_zarr (tissue_image.tif) [100%] 2 of 2 ✔ 91 | [44/2bcaeb] process > Full_pipeline:Process_images:ome_zarr_metadata (METADATA.ome.xml) [100%] 2 of 2 ✔ 92 | [43/04893d] process > Full_pipeline:Output_to_config:Build_config ([visium, breast-cancer]) [100%] 1 of 1 ✔ 93 | 94 | **#4. Check execution was successful** 95 | 96 | The output from the pipeline will indicate if the execution was successful. You can also 97 | verify the expected directories are created. 98 | 99 | .. code-block:: shell 100 | :caption: Input 101 | 102 | ls -l output/CytAssist_FFPE_Human_Breast_Cancer/0.4.0 103 | 104 | .. code-block:: shell 105 | :caption: Output 106 | 107 | total 1103476 108 | -rw-r--r-- 1 dh74 dh74 288446018 May 17 21:42 tmp-visium-breast-cancer.h5ad 109 | drwxrwxr-x 11 dh74 dh74 4096 May 17 21:42 visium-breast-cancer-anndata.zarr 110 | -rw-r--r-- 1 dh74 dh74 4667 May 17 21:43 visium-breast-cancer-config.json 111 | -rw-r--r-- 1 dh74 dh74 841484966 May 17 21:42 visium-breast-cancer-label.tif 112 | drwxrwxr-x 4 dh74 dh74 4096 May 17 21:43 visium-breast-cancer-label.zarr 113 | drwxrwxr-x 4 dh74 dh74 4096 May 17 21:43 visium-breast-cancer-raw.zarr 114 | 115 | **#5. Serve the data output through a local web server** 116 | 117 | To browse and explore the data, you need to serve the output data through a web server. 118 | You can use your preferred web server, but you must ensure the data is served over port 3000, 119 | at http://localhost:3000, and that CORS is enabled via the Access-Control-Allow-Origin header. 120 | 121 | .. code-block:: shell 122 | :caption: Input 123 | 124 | npx http-server output/CytAssist_FFPE_Human_Breast_Cancer/0.4.0 --port 3000 --cors 125 | 126 | .. code-block:: shell 127 | :caption: Output 128 | 129 | Starting up http-server, serving ./ 130 | 131 | http-server version: 14.1.1 132 | 133 | http-server settings: 134 | CORS: true 135 | Cache: 3600 seconds 136 | Connection Timeout: 120 seconds 137 | Directory Listings: visible 138 | AutoIndex: visible 139 | Serve GZIP Files: false 140 | Serve Brotli Files: false 141 | Default File Extension: none 142 | 143 | Available on: 144 | http://127.0.0.1:3000 145 | http://192.168.0.23:3000 146 | Hit CTRL-C to stop the server 147 | 148 | **#6. Explore data in your browser** 149 | 150 | Start your web browser and open: 151 | 152 | https://webatlas.cog.sanger.ac.uk/latest/index.html?theme=dark&config=http://127.0.0.1:3000/visium-breast-cancer-config.json -------------------------------------------------------------------------------- /docs/_sources/examples/xenium.rst.txt: -------------------------------------------------------------------------------- 1 | .. _example_xenium: 2 | 3 | Xenium 4 | ====== 5 | 6 | Sample details 7 | ************** 8 | 9 | .. list-table:: 10 | :widths: 25 75 11 | :header-rows: 0 12 | 13 | * - Study Name 14 | - `High resolution mapping of the breast cancer tumor microenvironment using integrated single cell, spatial and in situ analysis of FFPE tissue `__ 15 | * - WebAtlas 16 | - `Demo `__ 17 | * - Tissue 18 | - Human breast cancer 19 | * - Data Source Link 20 | - `Xenium_FFPE_Human_Breast_Cancer_Rep1_outs.zip `__ 21 | 22 | Steps to reproduce 23 | ****************** 24 | 25 | Follow the steps below to reproduce this sample in the pipeline, and visualise the data yourself 26 | in your web browser. It can be followed on any POSIX compatible system (Linux, OS X, etc). This 27 | example requires you to have already :ref:`setup your environment first `. 28 | 29 | **#1. Download the sample data** 30 | 31 | .. code-block:: shell 32 | :caption: Input 33 | 34 | mkdir -p input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs 35 | wget https://cf.10xgenomics.com/samples/xenium/1.0.1/Xenium_FFPE_Human_Breast_Cancer_Rep1/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs.zip -P input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs 36 | 37 | .. code-block:: shell 38 | :caption: Output 39 | 40 | --2023-05-17 15:05:24-- https://cf.10xgenomics.com/samples/xenium/1.0.1/Xenium_FFPE_Human_Breast_Cancer_Rep1/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs.zip 41 | Resolving cf.10xgenomics.com (cf.10xgenomics.com)... 104.18.0.173, 104.18.1.173, 2606:4700::6812:ad, ... 42 | Connecting to cf.10xgenomics.com (cf.10xgenomics.com)|104.18.0.173|:443... connected. 43 | HTTP request sent, awaiting response... 200 OK 44 | Length: 9861155708 (9.2G) [application/zip] 45 | Saving to: ‘input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs.zip’ 46 | 47 | Xenium_FFPE_Human_Breast_Cancer_Rep1 100%[===================================================================>] 9.18G 14.3MB/s in 10m 6s 48 | 49 | 2023-05-17 15:15:31 (15.5 MB/s) - ‘input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs.zip’ saved [9861155708/9861155708] 50 | 51 | **#2. Extract the sample data** 52 | 53 | .. code-block:: shell 54 | :caption: Input 55 | 56 | unzip input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs.zip -d input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs 57 | 58 | .. code-block:: shell 59 | :caption: Output 60 | 61 | Archive: input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs.zip 62 | creating: input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/outs/ 63 | inflating: input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/outs/experiment.xenium 64 | creating: input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/outs/cell_feature_matrix/ 65 | inflating: input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/outs/cell_feature_matrix/barcodes.tsv.gz 66 | ... 67 | ... 68 | inflating: input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/outs/metrics_summary.csv 69 | inflating: input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/outs/gene_panel.json 70 | inflating: input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/outs/analysis_summary.html 71 | 72 | **#3. Run the pipeline** 73 | 74 | .. code-block:: shell 75 | :caption: Input 76 | 77 | nextflow run main.nf \ 78 | -params-file templates/examples/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs.yaml \ 79 | -entry Full_pipeline 80 | 81 | .. code-block:: shell 82 | :caption: Output 83 | 84 | N E X T F L O W ~ version 22.10.6 85 | Launching `main.nf` [gigantic_murdock] DSL2 - revision: 1b6a73f4d6 86 | [fc/782a3f] process > Full_pipeline:Process_files:route_file (xenium, outs) [100%] 1 of 1 ✔ 87 | [b0/f5ff27] process > Full_pipeline:Process_images:Generate_image ([xenium, breast-cancer], label, outs) [100%] 1 of 1 ✔ 88 | [2b/054048] process > Full_pipeline:Process_images:image_to_zarr (morphology.ome.tif) [100%] 2 of 2 ✔ 89 | [07/5e37c4] process > Full_pipeline:Process_images:ome_zarr_metadata (METADATA.ome.xml) [100%] 2 of 2 ✔ 90 | [c8/f2378c] process > Full_pipeline:Output_to_config:Build_config ([xenium, breast-cancer]) [100%] 1 of 1 ✔ 91 | 92 | Completed at: 17-May-2023 16:40:58 93 | Duration : 32m 47s 94 | CPU hours : 0.6 95 | Succeeded : 7 96 | 97 | **#4. Check execution was successful** 98 | 99 | The output from the pipeline will indicate if the execution was successful. You can also 100 | verify the expected directories are created. 101 | 102 | .. code-block:: shell 103 | :caption: Input 104 | 105 | ls -l output/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/0.4.0 106 | 107 | .. code-block:: shell 108 | :caption: Output 109 | 110 | total 3566252 111 | drwxrwxr-x 11 dh74 dh74 4096 May 17 16:08 xenium-breast-cancer-anndata.zarr 112 | -rw-r--r-- 1 dh74 dh74 4984 May 17 16:40 xenium-breast-cancer-config.json 113 | -rw-r--r-- 1 dh74 dh74 3651814848 May 17 16:12 xenium-breast-cancer-label.tif 114 | drwxrwxr-x 4 dh74 dh74 4096 May 17 16:13 xenium-breast-cancer-label.zarr 115 | drwxrwxr-x 4 dh74 dh74 4096 May 17 16:40 xenium-breast-cancer-raw.zarr 116 | 117 | **#5. Serve the data output through a local web server** 118 | 119 | To browse and explore the data, you need to serve the output data through a web server. 120 | You can use your preferred web server, but you must ensure the data is served over port 3000, 121 | at http://localhost:3000, and that CORS is enabled via the Access-Control-Allow-Origin header. 122 | 123 | .. code-block:: shell 124 | :caption: Input 125 | 126 | npx http-server output/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/0.4.0 --port 3000 --cors 127 | 128 | .. code-block:: shell 129 | :caption: Output 130 | 131 | Starting up http-server, serving ./ 132 | 133 | http-server version: 14.1.1 134 | 135 | http-server settings: 136 | CORS: true 137 | Cache: 3600 seconds 138 | Connection Timeout: 120 seconds 139 | Directory Listings: visible 140 | AutoIndex: visible 141 | Serve GZIP Files: false 142 | Serve Brotli Files: false 143 | Default File Extension: none 144 | 145 | Available on: 146 | http://127.0.0.1:3000 147 | http://192.168.0.23:3000 148 | Hit CTRL-C to stop the server 149 | 150 | **#10. Explore data in your browser** 151 | 152 | Start your web browser and open: 153 | 154 | https://webatlas.cog.sanger.ac.uk/latest/index.html?theme=dark&config=http://127.0.0.1:3000/xenium-breast-cancer-config.json -------------------------------------------------------------------------------- /docs/_sources/index.rst.txt: -------------------------------------------------------------------------------- 1 | |Tests| |Sphinx| |Coverage| |DOI| 2 | 3 | .. |Tests| image:: https://github.com/haniffalab/webatlas-pipeline/actions/workflows/tests-python.yml/badge.svg 4 | :target: https://github.com/haniffalab/webatlas-pipeline/actions/workflows/tests-python.yml 5 | .. |Sphinx| image:: https://github.com/haniffalab/webatlas-pipeline/actions/workflows/sphinx-build.yml/badge.svg 6 | :target: https://github.com/haniffalab/webatlas-pipeline/actions/workflows/sphinx-build.yml 7 | .. |Coverage| image:: https://codecov.io/gh/haniffalab/webatlas-pipeline/branch/main/graph/badge.svg?token=7HQVFH08WJ 8 | :target: https://app.codecov.io/gh/haniffalab/webatlas-pipeline 9 | .. |DOI| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.7405818.svg 10 | :target: https://doi.org/10.5281/zenodo.7405818 11 | 12 | WebAtlas pipeline 13 | ================= 14 | 15 | This Nextflow pipeline processes spatial and single-cell experiment data for visualisation in `WebAtlas App`_. 16 | The pipeline generates data files for `supported data types`_, and builds a `view config`_. 17 | 18 | .. _WebAtlas App: https://github.com/haniffalab/webatlas-app 19 | .. _supported data types: https://vitessce.io/docs/data-types-file-types/ 20 | .. _view config: https://vitessce.io/docs/view-config-json/ 21 | 22 | .. toctree:: 23 | :maxdepth: 1 24 | :caption: Documentation 25 | :glob: 26 | 27 | installation 28 | configuration 29 | run 30 | visualise 31 | Demos 32 | testing 33 | modules 34 | 35 | Indices and tables 36 | ================== 37 | * :ref:`genindex` 38 | * :ref:`modindex` 39 | * :ref:`search` 40 | 41 | .. toctree:: 42 | :maxdepth: 2 43 | :caption: Multimodal 44 | 45 | multimodal/overview 46 | multimodal/configuration 47 | multimodal/run 48 | multimodal/visualise 49 | 50 | .. toctree:: 51 | :maxdepth: 2 52 | :hidden: 53 | :caption: Example Workflows 54 | 55 | examples/visium 56 | examples/xenium 57 | 58 | .. toctree:: 59 | :maxdepth: 2 60 | :hidden: 61 | :caption: Project Links 62 | 63 | citing 64 | Source Code 65 | Issue Tracker 66 | WebAtlas App 67 | 68 | .. toctree:: 69 | :maxdepth: 2 70 | :hidden: 71 | :caption: Project Teams 72 | 73 | Bayraktar Lab 74 | Haniffa Lab 75 | Open Microscopy Environment 76 | -------------------------------------------------------------------------------- /docs/_sources/installation.rst.txt: -------------------------------------------------------------------------------- 1 | .. _official nextflow documentation: https://www.nextflow.io/index.html#GetStarted 2 | .. _official Docker Install guide: https://docs.docker.com/engine/install/ 3 | .. _releases on GitHub: https://github.com/haniffalab/webatlas-pipeline/releases 4 | .. _conda: https://docs.conda.io/projects/miniconda/en/latest/ 5 | .. _mamba: https://mamba.readthedocs.io/en/latest/mamba-installation.html 6 | 7 | .. _installation: 8 | 9 | Installation 10 | ============ 11 | 12 | Download the WebAtlas Pipeline release. You can look for previous `releases on GitHub`_. 13 | 14 | .. code-block:: shell 15 | :caption: Input 16 | 17 | wget https://github.com/haniffalab/webatlas-pipeline/archive/refs/tags/v0.4.0.tar.gz 18 | 19 | .. code-block:: shell 20 | :caption: Expected Output 21 | 22 | Resolving github.com (github.com)... 140.82.121.3 23 | Connecting to github.com (github.com)|140.82.121.3|:443... connected. 24 | HTTP request sent, awaiting response... 302 Found 25 | Location: https://codeload.github.com/haniffalab/webatlas-pipeline/tar.gz/refs/tags/v0.4.0 [following] 26 | --2023-05-18 09:30:15-- https://codeload.github.com/haniffalab/webatlas-pipeline/tar.gz/refs/tags/v0.4.0 27 | Resolving codeload.github.com (codeload.github.com)... 140.82.121.9 28 | Connecting to codeload.github.com (codeload.github.com)|140.82.121.9|:443... connected. 29 | HTTP request sent, awaiting response... 200 OK 30 | Length: unspecified [application/x-gzip] 31 | Saving to: ‘v0.4.0.tar.gz’ 32 | 33 | v0.4.0.tar.gz [ <=> ] 2.70M 9.12MB/s in 0.3s 34 | 35 | 2023-05-18 09:30:16 (9.12 MB/s) - ‘v0.4.0.tar.gz’ saved [2835534] 36 | 37 | Extract the WebAtlas compressed tag and change directory into the new repo. 38 | 39 | .. code-block:: shell 40 | :caption: Input 41 | 42 | tar -xzvf ./v0.4.0.tar.gz 43 | cd webatlas-pipeline-0.4.0 44 | 45 | .. code-block:: shell 46 | :caption: Expected Output 47 | 48 | webatlas-pipeline-0.4.0/ 49 | webatlas-pipeline-0.4.0/.github/ 50 | ... 51 | ... 52 | webatlas-pipeline-0.4.0/tests/input/simple_config.json 53 | webatlas-pipeline-0.4.0/tests/test_class.py 54 | 55 | .. _environment: 56 | 57 | Environment setup 58 | ================= 59 | 60 | .. _environment_conda: 61 | 62 | Follow these Environment Setup instructions using conda or manually installing the required components to run WebAtlas. 63 | 64 | Using conda 65 | ----------- 66 | 67 | If you have `conda`_ or `mamba`_ already installed then you can use the ``environment.yaml`` file included in the WebAtlas release to create the environment. 68 | 69 | .. code-block:: shell 70 | :caption: Input 71 | 72 | conda env create -f environment.yaml 73 | 74 | Then make sure you activate the ``webatlas`` environment beforer you use the pipeline. 75 | 76 | .. code-block:: shell 77 | :caption: Input 78 | 79 | conda activate webatlas 80 | 81 | 82 | .. _environment_manual: 83 | 84 | Manual setup 85 | ------------ 86 | 87 | **#1. Check git is installed** 88 | 89 | Make sure git 2.17 or later is installed on your computer by using the command: 90 | 91 | .. code-block:: shell 92 | :caption: Input 93 | 94 | git --version 95 | 96 | .. code-block:: shell 97 | :caption: Output 98 | 99 | git version 2.25.1 100 | 101 | If Git is missing you will have to follow the `Getting Started Installing Git guide `__. 102 | 103 | **#2. Check java is installed** 104 | 105 | Make sure Java 11 or later is installed on your computer by using the command: 106 | 107 | .. code-block:: shell 108 | :caption: Input 109 | 110 | java -version 111 | 112 | .. code-block:: shell 113 | :caption: Output 114 | 115 | openjdk version "11.0.18" 2023-01-17 116 | OpenJDK Runtime Environment (build 11.0.18+10-post-Ubuntu-0ubuntu120.04.1) 117 | OpenJDK 64-Bit Server VM (build 11.0.18+10-post-Ubuntu-0ubuntu120.04.1, mixed mode, sharing) 118 | 119 | If not installed, `download and install Java `__. 120 | 121 | **#3. Install Nextflow** 122 | 123 | Enter the following command in your terminal to install nextflow in the current directory: 124 | 125 | .. code-block:: shell 126 | :caption: Input 127 | 128 | curl -s https://get.nextflow.io | bash 129 | 130 | .. code-block:: shell 131 | :caption: Output 132 | 133 | CAPSULE: Downloading dependency org.apache.ivy:ivy:jar:2.5.1 134 | ... 135 | CAPSULE: Downloading dependency io.nextflow:nf-commons:jar:23.04.1 136 | 137 | N E X T F L O W 138 | version 23.04.1 build 5866 139 | created 15-04-2023 06:51 UTC (07:51 BST) 140 | cite doi:10.1038/nbt.3820 141 | http://nextflow.io 142 | 143 | 144 | Nextflow installation completed. Please note: 145 | - the executable file `nextflow` has been created in the folder: ./webatlas-pipeline 146 | - you may complete the installation by moving it to a directory in your $PATH 147 | 148 | You can read more about how to install nextflow in the `official nextflow documentation`_. 149 | 150 | **#4. Check Docker is installed** 151 | 152 | Make sure Docker Engine 20.10 or later is installed on your computer by using the command: 153 | 154 | .. code-block:: shell 155 | :caption: Input 156 | 157 | docker version 158 | 159 | .. code-block:: shell 160 | :caption: Output 161 | 162 | Client: Docker Engine - Community 163 | Version: 23.0.4 164 | API version: 1.42 165 | Go version: go1.19.8 166 | Git commit: f480fb1 167 | Built: Fri Apr 14 10:32:23 2023 168 | OS/Arch: linux/amd64 169 | Context: default 170 | 171 | Follow the `official Docker Install guide`_ if it is not installed already. 172 | 173 | **#5. Build local docker images (optional)** 174 | 175 | When using docker the pipleine can use local images or pull them from DockerHub. If you want to build the images yourself you can do it like this: 176 | 177 | :: 178 | 179 | cd docker 180 | ./build-docker-imgs.sh 181 | 182 | -------------------------------------------------------------------------------- /docs/_sources/modules.rst.txt: -------------------------------------------------------------------------------- 1 | 2 | Modules 3 | ======= 4 | 5 | .. automodule:: build_config 6 | :members: 7 | 8 | .. automodule:: constants 9 | :members: 10 | 11 | .. automodule:: consolidate_md 12 | :members: 13 | 14 | .. automodule:: generate_image 15 | :members: 16 | 17 | .. automodule:: ome_zarr_metadata 18 | :members: 19 | 20 | .. automodule:: process_h5ad 21 | :members: 22 | 23 | .. automodule:: process_molecules 24 | :members: 25 | 26 | .. automodule:: process_spaceranger 27 | :members: 28 | 29 | .. automodule:: process_xenium 30 | :members: 31 | 32 | .. automodule:: process_merscope 33 | :members: 34 | 35 | .. automodule:: integrate_anndata 36 | :members: 37 | 38 | .. automodule:: integrate_image 39 | :members: 40 | 41 | .. automodule:: build_config_multimodal 42 | :members: -------------------------------------------------------------------------------- /docs/_sources/multimodal/configuration.rst.txt: -------------------------------------------------------------------------------- 1 | .. _multimodal_configuration: 2 | 3 | ###################### 4 | Multimodal configuration 5 | ###################### 6 | 7 | After running the main conversion pipeline you can populate the required YAML parameters file to run the multimodal integration pipeline. 8 | 9 | .. _multimodal_parameters_file: 10 | 11 | *************** 12 | Parameters file 13 | *************** 14 | 15 | The parameters file looks like this: 16 | 17 | .. code-block:: yaml 18 | 19 | outdir: "/path/to/output/" 20 | 21 | url: http://localhost:3000/ 22 | project: my_project 23 | title: "My Project" 24 | 25 | data: 26 | - 27 | dataset: scrnaseq 28 | obs_type: cell 29 | anndata: /path/to/main/output/scrnaseq-anndata.zarr 30 | offset: 0 31 | is_spatial: false 32 | vitessce_options: 33 | spatial: 34 | xy: obsm/spatial 35 | mappings: 36 | obsm/X_umap: [0,1] 37 | matrix: X 38 | - 39 | dataset: visium 40 | obs_type: spot 41 | anndata: /path/to/main/output/visium-anndata.zarr 42 | offset: 1000000 43 | is_spatial: true 44 | raw_image: /path/to/main/output/visium-raw.zarr 45 | label_image: /path/to/main/output/visium-label.zarr 46 | vitessce_options: 47 | spatial: 48 | xy: obsm/spatial 49 | matrix: X 50 | 51 | In contrast to the main conversion pipeline's parameters file, this file includes a single `project` to which multiple `datasets` belong to. 52 | 53 | Each ``dataset`` block defines the name of the dataset and paths to the converted data and image files (if any). 54 | 55 | Each ``dataset`` also requires a set of ``vitessce_options`` that specify the location of certain data (spatial coordinates, embeddings, expression matrix, etc.) within the AnnData object that is processed/generated. 56 | This follows the same structure as in the :ref:`main conversion's vitessce_options `. 57 | 58 | Additionally, each ``dataset`` requires: 59 | 60 | * ``obs_type``, the type of observation of the dataset. For example, "cell" or "spot". 61 | * ``offset``, an integer offset to add to the dataset's ID's so they don't clash with the other datasets. 62 | * ``is_spatial``, whether the dataset contains spatial information and has associated image files (raw and/or label images) 63 | 64 | Given that raw images are only read but not modified the pipeline does not generate new output from them. 65 | In order for the output directory (defined by ``outdir``) to contain all necessary files that need to be served for the web application to consume, 66 | by default, the pipeline copies the raw images to the output directory. 67 | This process can take a long time depending on the size of the image. 68 | You may want to manually copy or move the image or serve it from its own directory separate from the rest of the output. 69 | The default copying can be disabled by setting ``copy_raw: false`` as a project-wide parameter (at the same level as ``outdir``, ``project``, etc). 70 | For example, 71 | 72 | .. code-block:: yaml 73 | 74 | outdir: "/path/to/output/" 75 | url: http://localhost:3000/ 76 | project: my_project 77 | title: "My Project" 78 | copy_raw: false 79 | 80 | 81 | With additional features 82 | ======================== 83 | 84 | Using the above example parameters file to run the multimodal integration pipeline will run the reindexing and intersection steps. 85 | To perform the concatenation of additional features (like celltypes) to visualise them as continuous values, some extra parameters need to be added. 86 | 87 | As a project-wide parameter (at the same level as ``outdir``, ``project``, etc.): 88 | 89 | * ``extend_feature_name``, the name of the additional feature. For example, "celltype" 90 | 91 | And at a ``dataset`` level: 92 | 93 | * ``extend_feature``, the location of the additional feature information. 94 | This can be either the path to a *cell2location* output file, or the location within the AnnData object where the feature is stored as a categorical within ``obs``. 95 | For example, ``/path/to/c2l.h5ad`` containing predicted continuous values, or ``obs/celltype`` containing categoricals. 96 | 97 | The full parameters file will then look like this 98 | 99 | .. code-block:: yaml 100 | 101 | outdir: "/path/to/output/" 102 | 103 | url: http://localhost:3000/ 104 | project: my_project 105 | title: "My Project" 106 | 107 | extend_feature_name: celltype 108 | 109 | data: 110 | - 111 | dataset: scrnaseq 112 | obs_type: cell 113 | anndata: /path/to/main/output/scrnaseq-anndata.zarr 114 | extend_feature: obs/celltype 115 | offset: 0 116 | is_spatial: false 117 | vitessce_options: 118 | spatial: 119 | xy: obsm/spatial 120 | mappings: 121 | obsm/X_umap: [0,1] 122 | matrix: X 123 | - 124 | dataset: visium 125 | obs_type: spot 126 | anndata: /path/to/main/output/visium-anndata.zarr 127 | extend_feature: /path/to/c2l.h5ad 128 | offset: 1000000 129 | is_spatial: true 130 | raw_image: /path/to/main/output/visium-raw.zarr 131 | label_image: /path/to/main/output/visium-label.zarr 132 | vitessce_options: 133 | spatial: 134 | xy: obsm/spatial 135 | matrix: X 136 | 137 | With this parameters the multimodal integration pipeline will concatenate the expression matrix with the additional feature values so both can be queried and visualised across datasets within the same portal. -------------------------------------------------------------------------------- /docs/_sources/multimodal/overview.rst.txt: -------------------------------------------------------------------------------- 1 | .. _multimodal_overview: 2 | 3 | ################### 4 | Multimodal overview 5 | ################### 6 | 7 | After the ``main.nf`` pipeline has been successfully run, WebAtlas can optionally process a group of multimodal datasets that 8 | share common features. This step will prepare the unified multimodal visualision for the web app. 9 | 10 | The data outputs generated by running the ``main.nf`` conversion pipeline serve as inputs for this multimodal integration pipeline. 11 | 12 | *************** 13 | Running the multimodal pipeline 14 | *************** 15 | 16 | Follow the instructions below to run the multimodal pipeline. 17 | 18 | 1. :ref:`Configure ` the parameters file for the ``multimodal.nf`` pipeline 19 | 2. :ref:`Run ` the ``multimodal.nf`` pipeline 20 | 3. :ref:`Visualise ` the multimodal data in a web browser 21 | 22 | *************** 23 | Tasks completed by the pipeline 24 | *************** 25 | 26 | The multimodal integration pipeline performs several tasks: 27 | 28 | 1. Reindex each dataset by a user-inputed offset so ID's do not clash between modalities. 29 | 2. *Optionally*, concatenate other observation-by-feature matrices or categorical values to the expression matrix to enable their visualisation as continuous values. For example, a celltype prediction matrix and/or celltype categories. 30 | 3. Find the intersection of features between all datasets and subset them to visualise only the intersection (as including features not present in all datasets can produce misleading visualisations.) 31 | **Note** the features are intersected using their index in the AnnData objects (``var`` table). All datasets must use the same type of data as index for the intersection to be correctly computed. For example, all datasets use names as index, or all datasets use IDs as index. -------------------------------------------------------------------------------- /docs/_sources/multimodal/run.rst.txt: -------------------------------------------------------------------------------- 1 | .. _multimodal_run: 2 | 3 | Multimodal run 4 | =========== 5 | 6 | In additional to the main conversion pipeline, we offer a subsequent pipeline to process multiple datasets with matching features. This allows the users to 7 | visualise and query all common features such as genes and cell types across all modalities from a single web portal. 8 | 9 | Configurations and data are input through a :ref:`parameters yaml file ` (slightly different from the parameters file required by the main pipeline). 10 | 11 | To run this pipeline use 12 | 13 | .. code-block:: shell 14 | 15 | nextflow run multimodal.nf -params-file /path/to/multimodal-params.yaml 16 | 17 | Running using Docker 18 | -------------------- 19 | 20 | The default pipeline will run on local executor without any type of environment creation. To run the pipeline using Docker containers use the ``-profile docker`` option: 21 | 22 | .. code-block:: shell 23 | 24 | nextflow run multimodal.nf \ 25 | -params-file /path/to/multimodal-params.yaml \ 26 | -profile docker 27 | 28 | Pulling the containers when the pipline is launched may take a few minutes. 29 | 30 | Running using Singularity 31 | ------------------------- 32 | 33 | The default pipeline will run on local executor without any type of environment creation. To run the pipeline using Singularity containers use the ``-profile singularity`` option: 34 | 35 | .. code-block:: shell 36 | 37 | nextflow run multimodal.nf \ 38 | -params-file /path/to/multimodal-params.yaml \ 39 | -profile singularity 40 | 41 | Pulling the containers when the pipline is launched may take a few minutes. 42 | 43 | Running using Conda 44 | ------------------- 45 | 46 | The default pipeline will run on local executor without any type of environment creation. If you've already setup your conda environment you don't have to do anything else. 47 | 48 | However, if you are working on a compute cluster you will need to make sure the conda environment is avaiable and active in your worker nodes. To run the pipeline using a new conda environment use the ``-profile conda`` option: 49 | 50 | .. code-block:: shell 51 | 52 | nextflow run multimodal.nf \ 53 | -params-file /path/to/multimodal-params.yaml \ 54 | -profile conda 55 | 56 | Creating the environment when the pipleine is launched may take a few minutes. 57 | 58 | Further reading 59 | --------------- 60 | 61 | For more information about Docker image pulling/local conda env creation in Nextflow please refer to Nextflow's official docs for `containers `__ and `conda `__. -------------------------------------------------------------------------------- /docs/_sources/multimodal/visualise.rst.txt: -------------------------------------------------------------------------------- 1 | .. _multimodal_visualise: 2 | 3 | Multimodal visualision 4 | =========== 5 | 6 | The pipeline generates a Vitessce view config file for each processed dataset. 7 | This file can then be used to load the views and data as configured in the parameters files. 8 | 9 | You can locally serve and visualize the data in a few steps. 10 | 11 | By default, the base ``url`` used within the view config is ``http://localhost:3000/`` 12 | (this can be changed in the :ref:`parameters file `). 13 | This ``url`` indicates Vitessce to look for data at that location. 14 | 15 | You can set up a ``http`` server locally to serve the processed files so a Vitessce instance can load them. 16 | There are several tools that can setup a ``http`` server. 17 | We recommend using `serve `__ (requires `Node.js `__), 18 | but you can use any tool that can enable CORS. 19 | 20 | You can serve the view config file and data by specifying the output directory 21 | (note that the pipeline adds its version to the ``outdir`` defined in the :ref:`parameters file ` file). 22 | 23 | .. parsed-literal:: 24 | 25 | serve -C -p 3000 /path/to/outdir/|release|/ 26 | 27 | Make sure to enable CORS and set the appropriate port number. 28 | In this case, using `serve `__, this is done through the ``-C`` and ``-p`` flags respectively. 29 | 30 | Your view configs should then be accessible at ``http://localhost:3000/{project}-{dataset}-config.json``. 31 | 32 | You can then load them in a Vitessce instance like the `WebAtlas app `__ 33 | deployed at ``__. 34 | 35 | Specify your locally served view config through the ``config`` parameter like 36 | ``https://webatlas.cog.sanger.ac.uk/latest/index.html?config=http://localhost:3000/{project}-{dataset}-config.json`` 37 | and load this URL in your browser to visualize your data in a Vitessce viewer. -------------------------------------------------------------------------------- /docs/_sources/run.rst.txt: -------------------------------------------------------------------------------- 1 | .. _run: 2 | 3 | Running 4 | ======= 5 | 6 | The pipeline offers a workflow that processes files, images, and 7 | builds a Vitessce config file from the generated files. 8 | Alternatively, the workflow to process files only, or the workflow to process images only 9 | can be called independently. 10 | 11 | Each of these workflows work as entry points that can be specified when running the 12 | pipeline through the command line. 13 | 14 | - The ``Full_pipeline`` workflow runs the other workflows as needed and 15 | builds a Vitessce config file per dataset. 16 | - The ``Process_files`` workflow handles data files and their conversions. 17 | - The ``Process_images`` workflow handles image files and/or label image data and their conversions. 18 | 19 | Configurations and data are input through a :ref:`parameters yaml file `. 20 | 21 | To run the ``Full_pipeline`` use 22 | 23 | .. code-block:: shell 24 | 25 | nextflow run main.nf -params-file /path/to/params.yaml -entry Full_pipeline 26 | 27 | 28 | This will handle all input files, whether they are data files or images, for all datasets 29 | defined. 30 | 31 | You can modify the entry point if you're interested in only getting the converted outputs. 32 | Use ``-entry Process_files`` or ``-entry Process_images`` as you need. 33 | 34 | Running using Docker 35 | -------------------- 36 | 37 | The default pipeline will run on local executor without any type of environment creation. To run the pipeline using Docker containers use the ``-profile docker`` option: 38 | 39 | .. code-block:: shell 40 | 41 | nextflow run main.nf \ 42 | -params-file /path/to/params.yaml \ 43 | -entry Full_pipeline \ 44 | -profile docker 45 | 46 | Pulling the containers when the pipline is launched may take a few minutes. 47 | 48 | Running using Singularity 49 | ------------------------- 50 | 51 | The default pipeline will run on local executor without any type of environment creation. To run the pipeline using Singularity containers use the ``-profile singularity`` option: 52 | 53 | .. code-block:: shell 54 | 55 | nextflow run main.nf \ 56 | -params-file /path/to/params.yaml \ 57 | -entry Full_pipeline \ 58 | -profile singularity 59 | 60 | Pulling the containers when the pipline is launched may take a few minutes. 61 | 62 | Running using Conda 63 | ------------------- 64 | 65 | The default pipeline will run on local executor without any type of environment creation. If you've already setup your conda environment you don't have to do anything else. 66 | 67 | However, if you are working on a compute cluster you will need to make sure the conda environment is avaiable and active in your worker nodes. To run the pipeline using a new conda environment use the ``-profile conda`` option: 68 | 69 | .. code-block:: shell 70 | 71 | nextflow run main.nf \ 72 | -params-file /path/to/params.yaml \ 73 | -entry Full_pipeline \ 74 | -profile conda 75 | 76 | Creating the environment when the pipleine is launched may take a few minutes. 77 | 78 | Further reading 79 | --------------- 80 | 81 | For more information about Docker image pulling/local conda env creation in Nextflow please refer to Nextflow's official docs for `containers `__ and `conda `__. -------------------------------------------------------------------------------- /docs/_sources/testing.rst.txt: -------------------------------------------------------------------------------- 1 | .. _testing: 2 | 3 | Testing 4 | ======= 5 | 6 | Python testing 7 | -------------- 8 | 9 | Testing of python scripts uses `pytest`_. 10 | 11 | Set the :code:`PYTHONPATH` environment variable to the :code:`bin` directory where the scripts are stored, and then run the following command: 12 | 13 | :: 14 | 15 | python -m pytest -q tests/test_class.py 16 | 17 | .. _pytest: https://docs.pytest.org/en/7.1.x/ -------------------------------------------------------------------------------- /docs/_sources/visualise.rst.txt: -------------------------------------------------------------------------------- 1 | .. _visualise: 2 | 3 | Visualising 4 | =========== 5 | 6 | The pipeline generates a Vitessce view config file for each processed dataset. 7 | This file can then be used to load the views and data as configured in the parameters files. 8 | 9 | You can locally serve and visualize the data in a few steps. 10 | 11 | By default, the base ``url`` used within the view config is ``http://localhost:3000/`` 12 | (this can be changed in the :ref:`parameters file `). 13 | This ``url`` indicates Vitessce to look for data at that location. 14 | 15 | You can set up a ``http`` server locally to serve the processed files so a Vitessce instance can load them. 16 | There are several tools that can setup a ``http`` server. 17 | We recommend using `serve `__ (requires `Node.js `__), 18 | but you can use any tool that can enable CORS. 19 | 20 | You can serve the view config file and data by specifying the output directory 21 | (note that the pipeline adds its version to the ``outdir`` defined in the :ref:`parameters file ` file). 22 | 23 | .. parsed-literal:: 24 | 25 | serve -C -p 3000 /path/to/outdir/|release|/ 26 | 27 | Make sure to enable CORS and set the appropriate port number. 28 | In this case, using `serve `__, this is done through the ``-C`` and ``-p`` flags respectively. 29 | 30 | Your view configs should then be accessible at ``http://localhost:3000/{project}-{dataset}-config.json``. 31 | 32 | You can then load them in a Vitessce instance like the `WebAtlas app `__ 33 | deployed at ``__. 34 | 35 | Specify your locally served view config through the ``config`` parameter like 36 | ``https://webatlas.cog.sanger.ac.uk/latest/index.html?config=http://localhost:3000/{project}-{dataset}-config.json`` 37 | and load this URL in your browser to visualize your data in a Vitessce viewer. -------------------------------------------------------------------------------- /docs/_static/_sphinx_javascript_frameworks_compat.js: -------------------------------------------------------------------------------- 1 | /* Compatability shim for jQuery and underscores.js. 2 | * 3 | * Copyright Sphinx contributors 4 | * Released under the two clause BSD licence 5 | */ 6 | 7 | /** 8 | * small helper function to urldecode strings 9 | * 10 | * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL 11 | */ 12 | jQuery.urldecode = function(x) { 13 | if (!x) { 14 | return x 15 | } 16 | return decodeURIComponent(x.replace(/\+/g, ' ')); 17 | }; 18 | 19 | /** 20 | * small helper function to urlencode strings 21 | */ 22 | jQuery.urlencode = encodeURIComponent; 23 | 24 | /** 25 | * This function returns the parsed url parameters of the 26 | * current request. Multiple values per key are supported, 27 | * it will always return arrays of strings for the value parts. 28 | */ 29 | jQuery.getQueryParameters = function(s) { 30 | if (typeof s === 'undefined') 31 | s = document.location.search; 32 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 33 | var result = {}; 34 | for (var i = 0; i < parts.length; i++) { 35 | var tmp = parts[i].split('=', 2); 36 | var key = jQuery.urldecode(tmp[0]); 37 | var value = jQuery.urldecode(tmp[1]); 38 | if (key in result) 39 | result[key].push(value); 40 | else 41 | result[key] = [value]; 42 | } 43 | return result; 44 | }; 45 | 46 | /** 47 | * highlight a given string on a jquery object by wrapping it in 48 | * span elements with the given class name. 49 | */ 50 | jQuery.fn.highlightText = function(text, className) { 51 | function highlight(node, addItems) { 52 | if (node.nodeType === 3) { 53 | var val = node.nodeValue; 54 | var pos = val.toLowerCase().indexOf(text); 55 | if (pos >= 0 && 56 | !jQuery(node.parentNode).hasClass(className) && 57 | !jQuery(node.parentNode).hasClass("nohighlight")) { 58 | var span; 59 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); 60 | if (isInSVG) { 61 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 62 | } else { 63 | span = document.createElement("span"); 64 | span.className = className; 65 | } 66 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 67 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 68 | document.createTextNode(val.substr(pos + text.length)), 69 | node.nextSibling)); 70 | node.nodeValue = val.substr(0, pos); 71 | if (isInSVG) { 72 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); 73 | var bbox = node.parentElement.getBBox(); 74 | rect.x.baseVal.value = bbox.x; 75 | rect.y.baseVal.value = bbox.y; 76 | rect.width.baseVal.value = bbox.width; 77 | rect.height.baseVal.value = bbox.height; 78 | rect.setAttribute('class', className); 79 | addItems.push({ 80 | "parent": node.parentNode, 81 | "target": rect}); 82 | } 83 | } 84 | } 85 | else if (!jQuery(node).is("button, select, textarea")) { 86 | jQuery.each(node.childNodes, function() { 87 | highlight(this, addItems); 88 | }); 89 | } 90 | } 91 | var addItems = []; 92 | var result = this.each(function() { 93 | highlight(this, addItems); 94 | }); 95 | for (var i = 0; i < addItems.length; ++i) { 96 | jQuery(addItems[i].parent).before(addItems[i].target); 97 | } 98 | return result; 99 | }; 100 | 101 | /* 102 | * backward compatibility for jQuery.browser 103 | * This will be supported until firefox bug is fixed. 104 | */ 105 | if (!jQuery.browser) { 106 | jQuery.uaMatch = function(ua) { 107 | ua = ua.toLowerCase(); 108 | 109 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 110 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 111 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 112 | /(msie) ([\w.]+)/.exec(ua) || 113 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 114 | []; 115 | 116 | return { 117 | browser: match[ 1 ] || "", 118 | version: match[ 2 ] || "0" 119 | }; 120 | }; 121 | jQuery.browser = {}; 122 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 123 | } 124 | -------------------------------------------------------------------------------- /docs/_static/css/badge_only.css: -------------------------------------------------------------------------------- 1 | .clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} -------------------------------------------------------------------------------- /docs/_static/css/custom.css: -------------------------------------------------------------------------------- 1 | .rst-content .code-block-caption { 2 | text-align: left; 3 | font-weight: 700; 4 | display: block; 5 | color: #fff; 6 | background: #6ab0de; 7 | padding: 6px 12px; 8 | } 9 | .rst-content div[class^="highlight"] { 10 | margin-top: 0; 11 | } 12 | .admonition.shell-output { 13 | font-size: 85%; 14 | line-height: 1; 15 | } 16 | .admonition.shell-output .admonition-title:before { 17 | content: None; 18 | } 19 | .admonition.shell-output .admonition-title { 20 | font-style: italic; 21 | } 22 | .citation { 23 | font-weight: bold; 24 | font-style: italic; 25 | } 26 | code.docutils.literal.notranslate { 27 | white-space: nowrap; 28 | } 29 | -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/Roboto-Slab-Bold.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/Roboto-Slab-Regular.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/Roboto-Slab-Regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/lato-bold-italic.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/lato-bold-italic.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/lato-bold.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/lato-bold.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/lato-normal-italic.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/lato-normal-italic.woff2 -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/lato-normal.woff -------------------------------------------------------------------------------- /docs/_static/css/fonts/lato-normal.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/css/fonts/lato-normal.woff2 -------------------------------------------------------------------------------- /docs/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Base JavaScript utilities for all Sphinx HTML documentation. 6 | * 7 | * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | "use strict"; 12 | 13 | const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ 14 | "TEXTAREA", 15 | "INPUT", 16 | "SELECT", 17 | "BUTTON", 18 | ]); 19 | 20 | const _ready = (callback) => { 21 | if (document.readyState !== "loading") { 22 | callback(); 23 | } else { 24 | document.addEventListener("DOMContentLoaded", callback); 25 | } 26 | }; 27 | 28 | /** 29 | * Small JavaScript module for the documentation. 30 | */ 31 | const Documentation = { 32 | init: () => { 33 | Documentation.initDomainIndexTable(); 34 | Documentation.initOnKeyListeners(); 35 | }, 36 | 37 | /** 38 | * i18n support 39 | */ 40 | TRANSLATIONS: {}, 41 | PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), 42 | LOCALE: "unknown", 43 | 44 | // gettext and ngettext don't access this so that the functions 45 | // can safely bound to a different name (_ = Documentation.gettext) 46 | gettext: (string) => { 47 | const translated = Documentation.TRANSLATIONS[string]; 48 | switch (typeof translated) { 49 | case "undefined": 50 | return string; // no translation 51 | case "string": 52 | return translated; // translation exists 53 | default: 54 | return translated[0]; // (singular, plural) translation tuple exists 55 | } 56 | }, 57 | 58 | ngettext: (singular, plural, n) => { 59 | const translated = Documentation.TRANSLATIONS[singular]; 60 | if (typeof translated !== "undefined") 61 | return translated[Documentation.PLURAL_EXPR(n)]; 62 | return n === 1 ? singular : plural; 63 | }, 64 | 65 | addTranslations: (catalog) => { 66 | Object.assign(Documentation.TRANSLATIONS, catalog.messages); 67 | Documentation.PLURAL_EXPR = new Function( 68 | "n", 69 | `return (${catalog.plural_expr})` 70 | ); 71 | Documentation.LOCALE = catalog.locale; 72 | }, 73 | 74 | /** 75 | * helper function to focus on search bar 76 | */ 77 | focusSearchBar: () => { 78 | document.querySelectorAll("input[name=q]")[0]?.focus(); 79 | }, 80 | 81 | /** 82 | * Initialise the domain index toggle buttons 83 | */ 84 | initDomainIndexTable: () => { 85 | const toggler = (el) => { 86 | const idNumber = el.id.substr(7); 87 | const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); 88 | if (el.src.substr(-9) === "minus.png") { 89 | el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; 90 | toggledRows.forEach((el) => (el.style.display = "none")); 91 | } else { 92 | el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; 93 | toggledRows.forEach((el) => (el.style.display = "")); 94 | } 95 | }; 96 | 97 | const togglerElements = document.querySelectorAll("img.toggler"); 98 | togglerElements.forEach((el) => 99 | el.addEventListener("click", (event) => toggler(event.currentTarget)) 100 | ); 101 | togglerElements.forEach((el) => (el.style.display = "")); 102 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); 103 | }, 104 | 105 | initOnKeyListeners: () => { 106 | // only install a listener if it is really needed 107 | if ( 108 | !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && 109 | !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS 110 | ) 111 | return; 112 | 113 | document.addEventListener("keydown", (event) => { 114 | // bail for input elements 115 | if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; 116 | // bail with special keys 117 | if (event.altKey || event.ctrlKey || event.metaKey) return; 118 | 119 | if (!event.shiftKey) { 120 | switch (event.key) { 121 | case "ArrowLeft": 122 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; 123 | 124 | const prevLink = document.querySelector('link[rel="prev"]'); 125 | if (prevLink && prevLink.href) { 126 | window.location.href = prevLink.href; 127 | event.preventDefault(); 128 | } 129 | break; 130 | case "ArrowRight": 131 | if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; 132 | 133 | const nextLink = document.querySelector('link[rel="next"]'); 134 | if (nextLink && nextLink.href) { 135 | window.location.href = nextLink.href; 136 | event.preventDefault(); 137 | } 138 | break; 139 | } 140 | } 141 | 142 | // some keyboard layouts may need Shift to get / 143 | switch (event.key) { 144 | case "/": 145 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; 146 | Documentation.focusSearchBar(); 147 | event.preventDefault(); 148 | } 149 | }); 150 | }, 151 | }; 152 | 153 | // quick alias for translations 154 | const _ = Documentation.gettext; 155 | 156 | _ready(Documentation.init); 157 | -------------------------------------------------------------------------------- /docs/_static/documentation_options.js: -------------------------------------------------------------------------------- 1 | const DOCUMENTATION_OPTIONS = { 2 | VERSION: '0.4.0', 3 | LANGUAGE: 'en', 4 | COLLAPSE_INDEX: false, 5 | BUILDER: 'html', 6 | FILE_SUFFIX: '.html', 7 | LINK_SUFFIX: '.html', 8 | HAS_SOURCE: true, 9 | SOURCELINK_SUFFIX: '.txt', 10 | NAVIGATION_WITH_KEYS: false, 11 | SHOW_SEARCH_SUMMARY: true, 12 | ENABLE_SEARCH_SHORTCUTS: true, 13 | }; -------------------------------------------------------------------------------- /docs/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/file.png -------------------------------------------------------------------------------- /docs/_static/js/badge_only.js: -------------------------------------------------------------------------------- 1 | !function(e){var t={};function r(n){if(t[n])return t[n].exports;var o=t[n]={i:n,l:!1,exports:{}};return e[n].call(o.exports,o,o.exports,r),o.l=!0,o.exports}r.m=e,r.c=t,r.d=function(e,t,n){r.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},r.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},r.t=function(e,t){if(1&t&&(e=r(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(r.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var o in e)r.d(n,o,function(t){return e[t]}.bind(null,o));return n},r.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return r.d(t,"a",t),t},r.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},r.p="",r(r.s=4)}({4:function(e,t,r){}}); -------------------------------------------------------------------------------- /docs/_static/js/html5shiv-printshiv.min.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @preserve HTML5 Shiv 3.7.3-pre | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed 3 | */ 4 | !function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); -------------------------------------------------------------------------------- /docs/_static/js/html5shiv.min.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed 3 | */ 4 | !function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); -------------------------------------------------------------------------------- /docs/_static/js/theme.js: -------------------------------------------------------------------------------- 1 | !function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t0 63 | var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 64 | var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 65 | var s_v = "^(" + C + ")?" + v; // vowel in stem 66 | 67 | this.stemWord = function (w) { 68 | var stem; 69 | var suffix; 70 | var firstch; 71 | var origword = w; 72 | 73 | if (w.length < 3) 74 | return w; 75 | 76 | var re; 77 | var re2; 78 | var re3; 79 | var re4; 80 | 81 | firstch = w.substr(0,1); 82 | if (firstch == "y") 83 | w = firstch.toUpperCase() + w.substr(1); 84 | 85 | // Step 1a 86 | re = /^(.+?)(ss|i)es$/; 87 | re2 = /^(.+?)([^s])s$/; 88 | 89 | if (re.test(w)) 90 | w = w.replace(re,"$1$2"); 91 | else if (re2.test(w)) 92 | w = w.replace(re2,"$1$2"); 93 | 94 | // Step 1b 95 | re = /^(.+?)eed$/; 96 | re2 = /^(.+?)(ed|ing)$/; 97 | if (re.test(w)) { 98 | var fp = re.exec(w); 99 | re = new RegExp(mgr0); 100 | if (re.test(fp[1])) { 101 | re = /.$/; 102 | w = w.replace(re,""); 103 | } 104 | } 105 | else if (re2.test(w)) { 106 | var fp = re2.exec(w); 107 | stem = fp[1]; 108 | re2 = new RegExp(s_v); 109 | if (re2.test(stem)) { 110 | w = stem; 111 | re2 = /(at|bl|iz)$/; 112 | re3 = new RegExp("([^aeiouylsz])\\1$"); 113 | re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 114 | if (re2.test(w)) 115 | w = w + "e"; 116 | else if (re3.test(w)) { 117 | re = /.$/; 118 | w = w.replace(re,""); 119 | } 120 | else if (re4.test(w)) 121 | w = w + "e"; 122 | } 123 | } 124 | 125 | // Step 1c 126 | re = /^(.+?)y$/; 127 | if (re.test(w)) { 128 | var fp = re.exec(w); 129 | stem = fp[1]; 130 | re = new RegExp(s_v); 131 | if (re.test(stem)) 132 | w = stem + "i"; 133 | } 134 | 135 | // Step 2 136 | re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; 137 | if (re.test(w)) { 138 | var fp = re.exec(w); 139 | stem = fp[1]; 140 | suffix = fp[2]; 141 | re = new RegExp(mgr0); 142 | if (re.test(stem)) 143 | w = stem + step2list[suffix]; 144 | } 145 | 146 | // Step 3 147 | re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; 148 | if (re.test(w)) { 149 | var fp = re.exec(w); 150 | stem = fp[1]; 151 | suffix = fp[2]; 152 | re = new RegExp(mgr0); 153 | if (re.test(stem)) 154 | w = stem + step3list[suffix]; 155 | } 156 | 157 | // Step 4 158 | re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; 159 | re2 = /^(.+?)(s|t)(ion)$/; 160 | if (re.test(w)) { 161 | var fp = re.exec(w); 162 | stem = fp[1]; 163 | re = new RegExp(mgr1); 164 | if (re.test(stem)) 165 | w = stem; 166 | } 167 | else if (re2.test(w)) { 168 | var fp = re2.exec(w); 169 | stem = fp[1] + fp[2]; 170 | re2 = new RegExp(mgr1); 171 | if (re2.test(stem)) 172 | w = stem; 173 | } 174 | 175 | // Step 5 176 | re = /^(.+?)e$/; 177 | if (re.test(w)) { 178 | var fp = re.exec(w); 179 | stem = fp[1]; 180 | re = new RegExp(mgr1); 181 | re2 = new RegExp(meq1); 182 | re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 183 | if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) 184 | w = stem; 185 | } 186 | re = /ll$/; 187 | re2 = new RegExp(mgr1); 188 | if (re.test(w) && re2.test(w)) { 189 | re = /.$/; 190 | w = w.replace(re,""); 191 | } 192 | 193 | // and turn initial Y back to y 194 | if (firstch == "y") 195 | w = firstch.toLowerCase() + w.substr(1); 196 | return w; 197 | } 198 | } 199 | 200 | -------------------------------------------------------------------------------- /docs/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/minus.png -------------------------------------------------------------------------------- /docs/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/_static/plus.png -------------------------------------------------------------------------------- /docs/_static/pygments.css: -------------------------------------------------------------------------------- 1 | pre { line-height: 125%; } 2 | td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } 3 | span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } 4 | td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } 5 | span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } 6 | .highlight .hll { background-color: #ffffcc } 7 | .highlight { background: #f8f8f8; } 8 | .highlight .c { color: #3D7B7B; font-style: italic } /* Comment */ 9 | .highlight .err { border: 1px solid #FF0000 } /* Error */ 10 | .highlight .k { color: #008000; font-weight: bold } /* Keyword */ 11 | .highlight .o { color: #666666 } /* Operator */ 12 | .highlight .ch { color: #3D7B7B; font-style: italic } /* Comment.Hashbang */ 13 | .highlight .cm { color: #3D7B7B; font-style: italic } /* Comment.Multiline */ 14 | .highlight .cp { color: #9C6500 } /* Comment.Preproc */ 15 | .highlight .cpf { color: #3D7B7B; font-style: italic } /* Comment.PreprocFile */ 16 | .highlight .c1 { color: #3D7B7B; font-style: italic } /* Comment.Single */ 17 | .highlight .cs { color: #3D7B7B; font-style: italic } /* Comment.Special */ 18 | .highlight .gd { color: #A00000 } /* Generic.Deleted */ 19 | .highlight .ge { font-style: italic } /* Generic.Emph */ 20 | .highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ 21 | .highlight .gr { color: #E40000 } /* Generic.Error */ 22 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 23 | .highlight .gi { color: #008400 } /* Generic.Inserted */ 24 | .highlight .go { color: #717171 } /* Generic.Output */ 25 | .highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */ 26 | .highlight .gs { font-weight: bold } /* Generic.Strong */ 27 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 28 | .highlight .gt { color: #0044DD } /* Generic.Traceback */ 29 | .highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */ 30 | .highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */ 31 | .highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */ 32 | .highlight .kp { color: #008000 } /* Keyword.Pseudo */ 33 | .highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */ 34 | .highlight .kt { color: #B00040 } /* Keyword.Type */ 35 | .highlight .m { color: #666666 } /* Literal.Number */ 36 | .highlight .s { color: #BA2121 } /* Literal.String */ 37 | .highlight .na { color: #687822 } /* Name.Attribute */ 38 | .highlight .nb { color: #008000 } /* Name.Builtin */ 39 | .highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */ 40 | .highlight .no { color: #880000 } /* Name.Constant */ 41 | .highlight .nd { color: #AA22FF } /* Name.Decorator */ 42 | .highlight .ni { color: #717171; font-weight: bold } /* Name.Entity */ 43 | .highlight .ne { color: #CB3F38; font-weight: bold } /* Name.Exception */ 44 | .highlight .nf { color: #0000FF } /* Name.Function */ 45 | .highlight .nl { color: #767600 } /* Name.Label */ 46 | .highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */ 47 | .highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */ 48 | .highlight .nv { color: #19177C } /* Name.Variable */ 49 | .highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */ 50 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */ 51 | .highlight .mb { color: #666666 } /* Literal.Number.Bin */ 52 | .highlight .mf { color: #666666 } /* Literal.Number.Float */ 53 | .highlight .mh { color: #666666 } /* Literal.Number.Hex */ 54 | .highlight .mi { color: #666666 } /* Literal.Number.Integer */ 55 | .highlight .mo { color: #666666 } /* Literal.Number.Oct */ 56 | .highlight .sa { color: #BA2121 } /* Literal.String.Affix */ 57 | .highlight .sb { color: #BA2121 } /* Literal.String.Backtick */ 58 | .highlight .sc { color: #BA2121 } /* Literal.String.Char */ 59 | .highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */ 60 | .highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */ 61 | .highlight .s2 { color: #BA2121 } /* Literal.String.Double */ 62 | .highlight .se { color: #AA5D1F; font-weight: bold } /* Literal.String.Escape */ 63 | .highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */ 64 | .highlight .si { color: #A45A77; font-weight: bold } /* Literal.String.Interpol */ 65 | .highlight .sx { color: #008000 } /* Literal.String.Other */ 66 | .highlight .sr { color: #A45A77 } /* Literal.String.Regex */ 67 | .highlight .s1 { color: #BA2121 } /* Literal.String.Single */ 68 | .highlight .ss { color: #19177C } /* Literal.String.Symbol */ 69 | .highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */ 70 | .highlight .fm { color: #0000FF } /* Name.Function.Magic */ 71 | .highlight .vc { color: #19177C } /* Name.Variable.Class */ 72 | .highlight .vg { color: #19177C } /* Name.Variable.Global */ 73 | .highlight .vi { color: #19177C } /* Name.Variable.Instance */ 74 | .highlight .vm { color: #19177C } /* Name.Variable.Magic */ 75 | .highlight .il { color: #666666 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /docs/_static/sphinx_highlight.js: -------------------------------------------------------------------------------- 1 | /* Highlighting utilities for Sphinx HTML documentation. */ 2 | "use strict"; 3 | 4 | const SPHINX_HIGHLIGHT_ENABLED = true 5 | 6 | /** 7 | * highlight a given string on a node by wrapping it in 8 | * span elements with the given class name. 9 | */ 10 | const _highlight = (node, addItems, text, className) => { 11 | if (node.nodeType === Node.TEXT_NODE) { 12 | const val = node.nodeValue; 13 | const parent = node.parentNode; 14 | const pos = val.toLowerCase().indexOf(text); 15 | if ( 16 | pos >= 0 && 17 | !parent.classList.contains(className) && 18 | !parent.classList.contains("nohighlight") 19 | ) { 20 | let span; 21 | 22 | const closestNode = parent.closest("body, svg, foreignObject"); 23 | const isInSVG = closestNode && closestNode.matches("svg"); 24 | if (isInSVG) { 25 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 26 | } else { 27 | span = document.createElement("span"); 28 | span.classList.add(className); 29 | } 30 | 31 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 32 | const rest = document.createTextNode(val.substr(pos + text.length)); 33 | parent.insertBefore( 34 | span, 35 | parent.insertBefore( 36 | rest, 37 | node.nextSibling 38 | ) 39 | ); 40 | node.nodeValue = val.substr(0, pos); 41 | /* There may be more occurrences of search term in this node. So call this 42 | * function recursively on the remaining fragment. 43 | */ 44 | _highlight(rest, addItems, text, className); 45 | 46 | if (isInSVG) { 47 | const rect = document.createElementNS( 48 | "http://www.w3.org/2000/svg", 49 | "rect" 50 | ); 51 | const bbox = parent.getBBox(); 52 | rect.x.baseVal.value = bbox.x; 53 | rect.y.baseVal.value = bbox.y; 54 | rect.width.baseVal.value = bbox.width; 55 | rect.height.baseVal.value = bbox.height; 56 | rect.setAttribute("class", className); 57 | addItems.push({ parent: parent, target: rect }); 58 | } 59 | } 60 | } else if (node.matches && !node.matches("button, select, textarea")) { 61 | node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); 62 | } 63 | }; 64 | const _highlightText = (thisNode, text, className) => { 65 | let addItems = []; 66 | _highlight(thisNode, addItems, text, className); 67 | addItems.forEach((obj) => 68 | obj.parent.insertAdjacentElement("beforebegin", obj.target) 69 | ); 70 | }; 71 | 72 | /** 73 | * Small JavaScript module for the documentation. 74 | */ 75 | const SphinxHighlight = { 76 | 77 | /** 78 | * highlight the search words provided in localstorage in the text 79 | */ 80 | highlightSearchWords: () => { 81 | if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight 82 | 83 | // get and clear terms from localstorage 84 | const url = new URL(window.location); 85 | const highlight = 86 | localStorage.getItem("sphinx_highlight_terms") 87 | || url.searchParams.get("highlight") 88 | || ""; 89 | localStorage.removeItem("sphinx_highlight_terms") 90 | url.searchParams.delete("highlight"); 91 | window.history.replaceState({}, "", url); 92 | 93 | // get individual terms from highlight string 94 | const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); 95 | if (terms.length === 0) return; // nothing to do 96 | 97 | // There should never be more than one element matching "div.body" 98 | const divBody = document.querySelectorAll("div.body"); 99 | const body = divBody.length ? divBody[0] : document.querySelector("body"); 100 | window.setTimeout(() => { 101 | terms.forEach((term) => _highlightText(body, term, "highlighted")); 102 | }, 10); 103 | 104 | const searchBox = document.getElementById("searchbox"); 105 | if (searchBox === null) return; 106 | searchBox.appendChild( 107 | document 108 | .createRange() 109 | .createContextualFragment( 110 | '" 114 | ) 115 | ); 116 | }, 117 | 118 | /** 119 | * helper function to hide the search marks again 120 | */ 121 | hideSearchWords: () => { 122 | document 123 | .querySelectorAll("#searchbox .highlight-link") 124 | .forEach((el) => el.remove()); 125 | document 126 | .querySelectorAll("span.highlighted") 127 | .forEach((el) => el.classList.remove("highlighted")); 128 | localStorage.removeItem("sphinx_highlight_terms") 129 | }, 130 | 131 | initEscapeListener: () => { 132 | // only install a listener if it is really needed 133 | if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; 134 | 135 | document.addEventListener("keydown", (event) => { 136 | // bail for input elements 137 | if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; 138 | // bail with special keys 139 | if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; 140 | if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { 141 | SphinxHighlight.hideSearchWords(); 142 | event.preventDefault(); 143 | } 144 | }); 145 | }, 146 | }; 147 | 148 | _ready(() => { 149 | /* Do not call highlightSearchWords() when we are on the search page. 150 | * It will highlight words from the *previous* search query. 151 | */ 152 | if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); 153 | SphinxHighlight.initEscapeListener(); 154 | }); 155 | -------------------------------------------------------------------------------- /docs/citing.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Citation — WebAtlas 0.4.1 documentation 8 | 9 | 10 | 11 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 | 85 | 86 |
90 | 91 |
92 |
93 |
94 | 101 |
102 |
103 |
104 |
105 | 106 |
107 |

Citation

108 |

DOI

109 |

If you use this software in a scientific publication, please cite using the following Zenodo reference.

110 |

Li, Tong, Horsfall, David, Basurto-Lozada, Daniela, Prete, Martin, Jessica, Cox, & Iolo, Squires. (2023). WebAtlas Pipeline (v0.4.1). Zenodo. https://doi.org/10.5281/zenodo.7863308

111 |
112 | 113 | 114 |
115 |
116 |
119 | 120 |
121 | 122 |
123 |

© Copyright 2023, Haniffa Lab.

124 |
125 | 126 | Built with Sphinx using a 127 | theme 128 | provided by Read the Docs. 129 | 130 | 131 |
132 |
133 |
134 |
135 |
136 | 141 | 142 | 143 | -------------------------------------------------------------------------------- /docs/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/haniffalab/webatlas-pipeline/65b43936446ec44229949489af2ad5d0146bd8bf/docs/objects.inv -------------------------------------------------------------------------------- /docs/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Search — WebAtlas 0.4.0 documentation 7 | 8 | 9 | 10 | 11 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 |
29 | 86 | 87 |
91 | 92 |
93 |
94 |
95 |
    96 |
  • 97 | 98 |
  • 99 |
  • 100 |
101 |
102 |
103 |
104 |
105 | 106 | 113 | 114 | 115 |
116 | 117 |
118 | 119 |
120 |
121 |
122 | 123 |
124 | 125 |
126 |

© Copyright 2023, Haniffa Lab.

127 |
128 | 129 | Built with Sphinx using a 130 | theme 131 | provided by Read the Docs. 132 | 133 | 134 |
135 |
136 |
137 |
138 |
139 | 144 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | -------------------------------------------------------------------------------- /envs/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10 2 | ARG BIOFORMATS2RAW="0.7.0" 3 | 4 | COPY ./requirements.txt /requirements.txt 5 | RUN apt-get update && \ 6 | apt-get install -y --no-install-recommends \ 7 | wget unzip cmake g++ ant procps libblosc1 libvips libtiff-tools openjdk-17-jre-headless && \ 8 | apt-get clean && \ 9 | rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* && \ 10 | wget -q -O bioformats2raw.zip https://github.com/glencoesoftware/bioformats2raw/releases/download/v${BIOFORMATS2RAW}/bioformats2raw-${BIOFORMATS2RAW}.zip && \ 11 | unzip bioformats2raw.zip -d /usr/local/share/ && \ 12 | ln -sf /usr/local/share/bioformats2raw-${BIOFORMATS2RAW}/bin/bioformats2raw /usr/local/bin/bioformats2raw && \ 13 | rm -rf bioformats2raw.zip 14 | 15 | RUN pip install --upgrade pip setuptools distlib --no-cache-dir && \ 16 | pip install --no-cache-dir -r /requirements.txt 17 | 18 | ENTRYPOINT [] 19 | -------------------------------------------------------------------------------- /envs/build-docker-imgs.sh: -------------------------------------------------------------------------------- 1 | #! /bin/sh 2 | VERSION=0.5.3 3 | 4 | # 5 | # Build local docker images 6 | # 7 | # When using docker the pipleine can use local images or pull them from DockerHub. 8 | # If you want to build the images yourself you can do it like this: 9 | # 10 | # cd envs 11 | # ./build-docker-imgs.sh 12 | # 13 | 14 | docker build --platform=linux/amd64 -t haniffalab/webatlas-pipeline:${VERSION} -f ./Dockerfile . 15 | cd build_config/ 16 | docker build --platform=linux/amd64 -t haniffalab/webatlas-pipeline-build-config:${VERSION} -f ./Dockerfile . 17 | -------------------------------------------------------------------------------- /envs/build_config/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10 2 | 3 | COPY ./requirements.txt /requirements.txt 4 | RUN apt-get update && \ 5 | apt-get install -y --no-install-recommends && \ 6 | apt-get clean && \ 7 | rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* 8 | 9 | RUN pip install --upgrade pip --no-cache-dir && \ 10 | pip install --no-cache-dir -r /requirements.txt 11 | -------------------------------------------------------------------------------- /envs/build_config/environment.yaml: -------------------------------------------------------------------------------- 1 | name: webatlas-build_config 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - fire==0.5.0 6 | - python==3.10 7 | - regex==2023.10.3 8 | - pip: 9 | - vitessce==3.1.0 10 | -------------------------------------------------------------------------------- /envs/build_config/requirements.txt: -------------------------------------------------------------------------------- 1 | fire==0.5.0 2 | regex==2023.10.3 3 | vitessce==3.1.0 -------------------------------------------------------------------------------- /envs/dev/requirements.txt: -------------------------------------------------------------------------------- 1 | pytest==7.4.3 2 | pytest-cov==4.1.0 3 | pre-commit==3.5.0 4 | Sphinx==7.2.6 5 | sphinx-rtd-theme==1.3.0 6 | vitessce==3.1.0 -------------------------------------------------------------------------------- /envs/environment.yaml: -------------------------------------------------------------------------------- 1 | name: webatlas 2 | channels: 3 | - conda-forge 4 | - bioconda 5 | dependencies: 6 | - ome::bioformats2raw==0.7.0 7 | - pillow==10.1.0 8 | - xmlschema==2.5.0 9 | - pip==23.2.1 10 | - scipy==1.11.3 11 | - fire==0.5.0 12 | - regex==2023.8.8 13 | - scikit-image==0.22.0 14 | - ome::bioformats2raw-libs==0.7.0 15 | - ome::raw2ometiff-libs==0.5.0=0 16 | - tifffile==2023.9.26 17 | - numpy==1.24.4 18 | - ome::raw2ometiff==0.5.0=0 19 | - pandas==2.1.0 20 | - pyvips==2.2.1 21 | - scanpy==1.9.4 22 | - ome-zarr==0.8 23 | - python==3.10 24 | - anndata==0.9.1 25 | - zarr==2.16.1 26 | - h5py==3.10.0 27 | - rioxarray==0.15.0 28 | - pip: 29 | - spatialdata==0.0.15 30 | -------------------------------------------------------------------------------- /envs/requirements.txt: -------------------------------------------------------------------------------- 1 | aiohttp==3.8.6 2 | aiosignal==1.3.1 3 | anndata==0.9.1 4 | apeer-ometiff-library==1.9.0 5 | asciitree==0.3.3 6 | async-timeout==4.0.3 7 | attrs==23.1.0 8 | Brotli==1.1.0 9 | cached-property==1.5.2 10 | certifi==2023.7.22 11 | cffi==1.16.0 12 | charset-normalizer==3.3.2 13 | click==8.1.7 14 | cloudpickle==3.0.0 15 | colorama==0.4.6 16 | contourpy==1.1.1 17 | cycler==0.12.1 18 | cytoolz==0.12.2 19 | dask==2023.10.1 20 | distributed==2023.10.1 21 | elementpath==4.1.5 22 | fasteners==0.17.3 23 | fire==0.5.0 24 | fonttools==4.43.1 25 | frozenlist==1.4.0 26 | fsspec==2023.6 27 | h5py==3.10.0 28 | idna==3.4 29 | imagecodecs==2023.9.18 30 | imageio==2.31.5 31 | importlib-metadata==6.8.0 32 | Jinja2==3.1.2 33 | joblib==1.3.2 34 | kiwisolver==1.4.5 35 | lazy_loader==0.3 36 | llvmlite==0.40.1 37 | locket==1.0.0 38 | MarkupSafe==2.1.3 39 | matplotlib==3.8.1 40 | msgpack==1.0.6 41 | multidict==6.0.4 42 | munkres==1.1.4 43 | natsort==8.4.0 44 | networkx==3.2.1 45 | numba==0.57.1 46 | numcodecs==0.12.1 47 | numpy==1.24.4 48 | ome-zarr==0.8.0 49 | packaging==23.2 50 | pandas==2.1.0 51 | partd==1.4.1 52 | patsy==0.5.3 53 | Pillow==10.1.0 54 | pip==23.2.1 55 | pkgconfig==1.5.5 56 | psutil==5.9.5 57 | pycparser==2.21 58 | pynndescent==0.5.10 59 | pyparsing==3.1.1 60 | PySocks==1.7.1 61 | python-dateutil==2.8.2 62 | pytz==2023.3.post1 63 | pyvips==2.2.1 64 | PyWavelets==1.4.1 65 | PyYAML==6.0.1 66 | regex==2023.8.8 67 | requests==2.31.0 68 | rioxarray==0.15.0 69 | scanpy==1.9.4 70 | scikit-image==0.22.0 71 | scikit-learn==1.3.2 72 | scipy==1.11.3 73 | seaborn==0.13.0 74 | session-info==1.0.0 75 | setuptools==68.2.2 76 | six==1.16.0 77 | sortedcontainers==2.4.0 78 | spatialdata==0.0.15 79 | Sphinx==7.2.6 80 | sphinx-rtd-theme==1.3.0 81 | statsmodels==0.14.0 82 | stdlib-list==0.8.0 83 | tblib==2.0.0 84 | termcolor==2.3.0 85 | threadpoolctl==3.2.0 86 | tifffile==2023.9.26 87 | toolz==0.12.0 88 | tornado==6.3.3 89 | tqdm==4.66.1 90 | typing_extensions==4.8.0 91 | tzdata==2023.3 92 | umap-learn==0.5.4 93 | unicodedata2==15.1.0 94 | urllib3==1.25.4 95 | wheel==0.41.3 96 | xmlschema==2.5.0 97 | yarl==1.9.2 98 | zarr==2.16.1 99 | zict==3.0.0 100 | zipp==3.17.0 -------------------------------------------------------------------------------- /nextflow.config: -------------------------------------------------------------------------------- 1 | process { 2 | withName: image_to_zarr { 3 | memory = 40.GB 4 | cpus = 4 5 | } 6 | 7 | withName: ome_zarr_metadata { 8 | memory = 2.GB 9 | cpus = 1 10 | } 11 | 12 | withName: route_file { 13 | // max between 4GB or size of sparce matrix 14 | memory = 64.GB 15 | cpus = 1 16 | } 17 | 18 | withName: Build_config { 19 | memory = 2.GB 20 | cpus = 1 21 | } 22 | 23 | withName: Generate_image { 24 | memory = 40.GB 25 | cpus = 4 26 | } 27 | } 28 | 29 | profiles { 30 | 31 | conda { 32 | conda.enabled = true 33 | process { 34 | conda = "$projectDir/envs/environment.yaml" 35 | withLabel: build_config { 36 | conda = "$projectDir/envs/build_config/environment.yaml" 37 | } 38 | } 39 | } 40 | 41 | docker { 42 | docker.enabled = true 43 | process { 44 | container = 'haniffalab/webatlas-pipeline:0.5.3' 45 | withLabel: build_config { 46 | container = 'haniffalab/webatlas-pipeline-build-config:0.5.2' 47 | } 48 | } 49 | } 50 | 51 | singularity { 52 | singularity.enabled = true 53 | singularity.autoMounts = true 54 | process { 55 | container = 'haniffalab/webatlas-pipeline:0.5.3' 56 | withLabel: build_config { 57 | container = 'haniffalab/webatlas-pipeline-build-config:0.5.3' 58 | } 59 | } 60 | } 61 | 62 | sanger { 63 | executor { 64 | name = 'lsf' 65 | perJobMemLimit = true 66 | } 67 | 68 | process.executor = 'lsf' 69 | process.queue = 'normal' 70 | singularity.runOptions = '--bind $(mktemp -d /tmp/webatlas.XXXXX):/tmp --env NUMBA_CACHE_DIR=/tmp' 71 | } 72 | } -------------------------------------------------------------------------------- /notebooks/generate_xenium_label.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import os\n", 10 | "import tifffile\n", 11 | "import zarr\n", 12 | "import numpy as np\n", 13 | "from skimage.draw import polygon" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": null, 19 | "metadata": {}, 20 | "outputs": [], 21 | "source": [ 22 | "zarr_dir = \"/path/to/xenium_output/cells.zarr.zip\"\n", 23 | "out_dir = \"./\"\n", 24 | "\n", 25 | "res = 0.2125 # xenium pixel resolution" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "width = 10000 # raw image width\n", 35 | "height = 10000 # raw image height\n", 36 | "\n", 37 | "# or\n", 38 | "\n", 39 | "# raw_img = \"/path/to/xenium_output/raw.ome.tif\"\n", 40 | "# tif = tifffile.TiffFile(raw_img)\n", 41 | "# height, width = tif.pages[0].shape" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": null, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "z = zarr.open(zarr_dir, \"r\")\n", 51 | "ids = z[\"cell_id\"]\n", 52 | "pols = z[\"polygon_vertices\"][1]\n", 53 | "\n", 54 | "label = np.zeros((height, width), dtype=np.min_scalar_type(max(ids)))\n", 55 | "\n", 56 | "for id, pol in zip(ids,pols):\n", 57 | " pol = pol/res\n", 58 | " pol = np.array(list(map(list, pol.reshape(pol.shape[0]//2,2))))\n", 59 | " rr, cc = polygon(pol[:,1], pol[:,0])\n", 60 | " label[rr-1, cc-1] = int(id)" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": null, 66 | "metadata": {}, 67 | "outputs": [], 68 | "source": [ 69 | "tifffile.imwrite(os.path.join(out_dir, \"xenium_label.tif\"), label)" 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": null, 75 | "metadata": {}, 76 | "outputs": [], 77 | "source": [] 78 | } 79 | ], 80 | "metadata": { 81 | "kernelspec": { 82 | "display_name": "Python 3.10.8 64-bit", 83 | "language": "python", 84 | "name": "python3" 85 | }, 86 | "language_info": { 87 | "codemirror_mode": { 88 | "name": "ipython", 89 | "version": 3 90 | }, 91 | "file_extension": ".py", 92 | "mimetype": "text/x-python", 93 | "name": "python", 94 | "nbconvert_exporter": "python", 95 | "pygments_lexer": "ipython3", 96 | "version": "3.10.8" 97 | }, 98 | "orig_nbformat": 4, 99 | "vscode": { 100 | "interpreter": { 101 | "hash": "b0fa6594d8f4cbf19f97940f81e996739fb7646882a419484c72d19e05852a7e" 102 | } 103 | } 104 | }, 105 | "nbformat": 4, 106 | "nbformat_minor": 2 107 | } 108 | -------------------------------------------------------------------------------- /sphinx/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /sphinx/README.md: -------------------------------------------------------------------------------- 1 | Tested with Python 3.10 2 | 3 | From the project root directory: 4 | 5 | ``` 6 | python -m venv venv 7 | . venv/bin/activate 8 | pip install --upgrade pip 9 | pip install -r ./envs/requirements.txt 10 | pip install -r ./envs/dev/requirements.txt 11 | cd sphinx/ 12 | make html 13 | ``` 14 | -------------------------------------------------------------------------------- /sphinx/_static/css/custom.css: -------------------------------------------------------------------------------- 1 | .rst-content .code-block-caption { 2 | text-align: left; 3 | font-weight: 700; 4 | display: block; 5 | color: #fff; 6 | background: #6ab0de; 7 | padding: 6px 12px; 8 | } 9 | .rst-content div[class^="highlight"] { 10 | margin-top: 0; 11 | } 12 | .admonition.shell-output { 13 | font-size: 85%; 14 | line-height: 1; 15 | } 16 | .admonition.shell-output .admonition-title:before { 17 | content: None; 18 | } 19 | .admonition.shell-output .admonition-title { 20 | font-style: italic; 21 | } 22 | .citation { 23 | font-weight: bold; 24 | font-style: italic; 25 | } 26 | code.docutils.literal.notranslate { 27 | white-space: nowrap; 28 | } 29 | -------------------------------------------------------------------------------- /sphinx/citing.rst: -------------------------------------------------------------------------------- 1 | .. _citing: 2 | 3 | 4 | Citation 5 | ======== 6 | 7 | |DOI| 8 | 9 | .. |DOI| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.7405818.svg 10 | :target: https://doi.org/10.5281/zenodo.7405818 11 | 12 | If you use this software in a scientific publication, please cite using the following Zenodo reference. 13 | 14 | **Li, Tong, Horsfall, David, Basurto-Lozada, Daniela, Prete, Martin, Jessica, Cox, & Iolo, Squires. (2023). WebAtlas Pipeline (v0.5.2). Zenodo. https://doi.org/10.5281/zenodo.7863308** 15 | -------------------------------------------------------------------------------- /sphinx/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | import sys 15 | 16 | sys.path.insert(0, os.path.abspath("../bin")) 17 | 18 | 19 | # -- Project information ----------------------------------------------------- 20 | 21 | project = "WebAtlas" 22 | copyright = "2023, Haniffa Lab" 23 | author = "Haniffa Lab" 24 | 25 | # The full version, including alpha/beta/rc tags 26 | release = "0.5.3" 27 | 28 | # -- General configuration --------------------------------------------------- 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon"] 34 | 35 | # Add any paths that contain templates here, relative to this directory. 36 | templates_path = ["_templates"] 37 | 38 | # List of patterns, relative to source directory, that match files and 39 | # directories to ignore when looking for source files. 40 | # This pattern also affects html_static_path and html_extra_path. 41 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 42 | 43 | 44 | # -- Options for HTML output ------------------------------------------------- 45 | 46 | # The theme to use for HTML and HTML Help pages. See the documentation for 47 | # a list of builtin themes. 48 | # 49 | html_theme = "sphinx_rtd_theme" 50 | 51 | # These folders are copied to the documentation's HTML output 52 | html_static_path = ["_static"] 53 | 54 | # These paths are either relative to html_static_path 55 | # or fully qualified paths (eg. https://...) 56 | html_css_files = [ 57 | "css/custom.css", 58 | ] 59 | -------------------------------------------------------------------------------- /sphinx/index.rst: -------------------------------------------------------------------------------- 1 | |Tests| |Sphinx| |Coverage| |DOI| 2 | 3 | .. |Tests| image:: https://github.com/haniffalab/webatlas-pipeline/actions/workflows/tests-python.yml/badge.svg 4 | :target: https://github.com/haniffalab/webatlas-pipeline/actions/workflows/tests-python.yml 5 | .. |Sphinx| image:: https://github.com/haniffalab/webatlas-pipeline/actions/workflows/deploy-sphinx.yml/badge.svg 6 | :target: https://github.com/haniffalab/webatlas-pipeline/actions/workflows/deploy-sphinx.yml 7 | .. |Coverage| image:: https://codecov.io/gh/haniffalab/webatlas-pipeline/branch/main/graph/badge.svg?token=7HQVFH08WJ 8 | :target: https://app.codecov.io/gh/haniffalab/webatlas-pipeline 9 | .. |DOI| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.7405818.svg 10 | :target: https://doi.org/10.5281/zenodo.7405818 11 | 12 | WebAtlas pipeline 13 | ================= 14 | 15 | This Nextflow pipeline processes spatial and single-cell experiment data for visualisation in `WebAtlas App`_. 16 | The pipeline generates data files for `supported data types`_, and builds a `view config`_. 17 | 18 | .. _WebAtlas App: https://github.com/haniffalab/webatlas-app 19 | .. _supported data types: https://vitessce.io/docs/data-types-file-types/ 20 | .. _view config: https://vitessce.io/docs/view-config-json/ 21 | 22 | .. toctree:: 23 | :maxdepth: 1 24 | :caption: Documentation 25 | :glob: 26 | 27 | installation 28 | configuration 29 | run 30 | visualise 31 | Demos 32 | testing 33 | modules 34 | 35 | Indices and tables 36 | ================== 37 | * :ref:`genindex` 38 | * :ref:`modindex` 39 | * :ref:`search` 40 | 41 | .. toctree:: 42 | :maxdepth: 2 43 | :caption: Multimodal 44 | 45 | multimodal/overview 46 | multimodal/configuration 47 | multimodal/run 48 | multimodal/visualise 49 | 50 | .. toctree:: 51 | :maxdepth: 2 52 | :hidden: 53 | :caption: Example Workflows 54 | 55 | examples/visium 56 | examples/xenium 57 | 58 | .. toctree:: 59 | :maxdepth: 2 60 | :hidden: 61 | :caption: Project Links 62 | 63 | citing 64 | Source Code 65 | Issue Tracker 66 | WebAtlas App 67 | 68 | .. toctree:: 69 | :maxdepth: 2 70 | :hidden: 71 | :caption: Project Teams 72 | 73 | Bayraktar Lab 74 | Haniffa Lab 75 | Open Microscopy Environment 76 | -------------------------------------------------------------------------------- /sphinx/installation.rst: -------------------------------------------------------------------------------- 1 | .. _official nextflow documentation: https://www.nextflow.io/index.html#GetStarted 2 | .. _official Docker Install guide: https://docs.docker.com/engine/install/ 3 | .. _releases on GitHub: https://github.com/haniffalab/webatlas-pipeline/releases 4 | .. _conda: https://docs.conda.io/projects/miniconda/en/latest/ 5 | .. _mamba: https://mamba.readthedocs.io/en/latest/mamba-installation.html 6 | 7 | .. _installation: 8 | 9 | Installation 10 | ============ 11 | 12 | Download the WebAtlas Pipeline release. You can look for previous `releases on GitHub`_. 13 | 14 | Using `wget` 15 | """""""""""" 16 | 17 | .. code-block:: shell 18 | :caption: Input 19 | 20 | wget https://github.com/haniffalab/webatlas-pipeline/archive/refs/tags/v0.5.3.tar.gz 21 | 22 | .. code-block:: shell 23 | :caption: Expected Output 24 | 25 | Resolving github.com (github.com)... 140.82.121.3 26 | Connecting to github.com (github.com)|140.82.121.3|:443... connected. 27 | HTTP request sent, awaiting response... 302 Found 28 | Location: https://codeload.github.com/haniffalab/webatlas-pipeline/tar.gz/refs/tags/v0.5.3 [following] 29 | --2023-05-18 09:30:15-- https://codeload.github.com/haniffalab/webatlas-pipeline/tar.gz/refs/tags/v0.5.3 30 | Resolving codeload.github.com (codeload.github.com)... 140.82.121.9 31 | Connecting to codeload.github.com (codeload.github.com)|140.82.121.9|:443... connected. 32 | HTTP request sent, awaiting response... 200 OK 33 | Length: unspecified [application/x-gzip] 34 | Saving to: ‘v0.5.3.tar.gz’ 35 | 36 | v0.5.3.tar.gz [ <=> ] 2.70M 9.12MB/s in 0.3s 37 | 38 | 2023-05-18 09:30:16 (9.12 MB/s) - ‘v0.5.3.tar.gz’ saved [2835534] 39 | 40 | 41 | Using `curl` 42 | """""""""""" 43 | 44 | .. code-block:: shell 45 | :caption: Input 46 | 47 | curl -L -o v0.5.3.tar.gz https://github.com/haniffalab/webatlas-pipeline/archive/refs/tags/v0.5.3.tar.gz 48 | 49 | .. code-block:: shell 50 | :caption: Expected Output 51 | 52 | % Total % Received % Xferd Average Speed Time Time Time Current 53 | Dload Upload Total Spent Left Speed 54 | 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 55 | 100 2844k 0 2844k 0 0 1970k 0 --:--:-- 0:00:01 --:--:-- 2539k 56 | 57 | 58 | Extract the WebAtlas compressed tag and change directory into the new repo. 59 | 60 | .. code-block:: shell 61 | :caption: Input 62 | 63 | tar -xzvf ./v0.5.3.tar.gz 64 | cd webatlas-pipeline-0.5.3 65 | 66 | .. code-block:: shell 67 | :caption: Expected Output 68 | 69 | webatlas-pipeline-0.5.3/ 70 | webatlas-pipeline-0.5.3/.github/ 71 | ... 72 | ... 73 | webatlas-pipeline-0.5.3/tests/input/simple_config.json 74 | webatlas-pipeline-0.5.3/tests/test_class.py 75 | 76 | .. _environment: 77 | 78 | Environment setup 79 | ================= 80 | 81 | .. _environment_conda: 82 | 83 | Follow these Environment Setup instructions using conda or manually installing the required components to run WebAtlas. 84 | 85 | Using conda 86 | ----------- 87 | 88 | If you have `conda`_ or `mamba`_ already installed then you can use the ``environment.yaml`` file included in the WebAtlas release to create the environment. 89 | 90 | .. code-block:: shell 91 | :caption: Input 92 | 93 | conda env create -f envs/environment.yaml 94 | 95 | Then make sure you activate the ``webatlas`` environment before you use the pipeline. 96 | 97 | .. code-block:: shell 98 | :caption: Input 99 | 100 | conda activate webatlas 101 | 102 | .. warning:: 103 | Users working on newer Silicon-based Macs may encounter problems installing this environment. 104 | Some packages have not yet been compiled for Apple silicon processors therefore, 105 | we recommend you install the packages originally compiled for Mac computers with Intel processors. Set 106 | an environment variable that specifies the architecture before installing and activating the Conda 107 | environment, like this: 108 | 109 | .. code-block:: shell 110 | :caption: Input 111 | 112 | export CONDA_SUBDIR=osx-64 113 | conda env create -f envs/environment.yaml 114 | conda activate webatlas 115 | 116 | .. _environment_manual: 117 | 118 | Manual setup 119 | ------------ 120 | 121 | **#1. Check git is installed** 122 | 123 | Make sure git 2.17 or later is installed on your computer by using the command: 124 | 125 | .. code-block:: shell 126 | :caption: Input 127 | 128 | git --version 129 | 130 | .. code-block:: shell 131 | :caption: Output 132 | 133 | git version 2.25.1 134 | 135 | If Git is missing you will have to follow the `Getting Started Installing Git guide `__. 136 | 137 | **#2. Install Nextflow** 138 | 139 | Java is required by Nextflow. Refer to `Nextflow's guidelines `__ to install it. 140 | 141 | Enter the following command in your terminal to install Nextflow in the current directory: 142 | 143 | Using `wget` 144 | """""""""""" 145 | 146 | .. code-block:: shell 147 | :caption: Input 148 | 149 | wget -qO- https://get.nextflow.io | bash 150 | 151 | 152 | Using `curl` 153 | """""""""""" 154 | 155 | .. code-block:: shell 156 | :caption: Input 157 | 158 | curl -s https://get.nextflow.io | bash 159 | 160 | 161 | .. code-block:: shell 162 | :caption: Output 163 | 164 | CAPSULE: Downloading dependency org.apache.ivy:ivy:jar:2.5.1 165 | ... 166 | CAPSULE: Downloading dependency io.nextflow:nf-commons:jar:23.04.1 167 | 168 | N E X T F L O W 169 | version 23.04.1 build 5866 170 | created 15-04-2023 06:51 UTC (07:51 BST) 171 | cite doi:10.1038/nbt.3820 172 | http://nextflow.io 173 | 174 | 175 | Nextflow installation completed. Please note: 176 | - the executable file `nextflow` has been created in the folder: ./webatlas-pipeline 177 | - you may complete the installation by moving it to a directory in your $PATH 178 | 179 | You can read more about how to install nextflow in the `official nextflow documentation`_. 180 | 181 | **#3. Check Docker is installed (optional)** 182 | 183 | If you want to use Docker, make sure Docker Engine 20.10 or later is installed on your computer by using the command: 184 | 185 | .. code-block:: shell 186 | :caption: Input 187 | 188 | docker version 189 | 190 | .. code-block:: shell 191 | :caption: Output 192 | 193 | Client: Docker Engine - Community 194 | Version: 23.0.4 195 | API version: 1.42 196 | Go version: go1.19.8 197 | Git commit: f480fb1 198 | Built: Fri Apr 14 10:32:23 2023 199 | OS/Arch: linux/amd64 200 | Context: default 201 | 202 | Follow the `official Docker Install guide`_ if it is not installed already. 203 | -------------------------------------------------------------------------------- /sphinx/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.https://www.sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /sphinx/modules.rst: -------------------------------------------------------------------------------- 1 | 2 | Modules 3 | ======= 4 | 5 | .. automodule:: build_config 6 | :members: 7 | 8 | .. automodule:: constants 9 | :members: 10 | 11 | .. automodule:: consolidate_md 12 | :members: 13 | 14 | .. automodule:: generate_image 15 | :members: 16 | 17 | .. automodule:: ome_zarr_metadata 18 | :members: 19 | 20 | .. automodule:: process_h5ad 21 | :members: 22 | 23 | .. automodule:: process_molecules 24 | :members: 25 | 26 | .. automodule:: process_spaceranger 27 | :members: 28 | 29 | .. automodule:: process_xenium 30 | :members: 31 | 32 | .. automodule:: process_merscope 33 | :members: 34 | 35 | .. automodule:: integrate_anndata 36 | :members: 37 | 38 | .. automodule:: integrate_image 39 | :members: 40 | 41 | .. automodule:: build_config_multimodal 42 | :members: 43 | 44 | .. automodule:: write_spatialdata 45 | :members: 46 | -------------------------------------------------------------------------------- /sphinx/multimodal/overview.rst: -------------------------------------------------------------------------------- 1 | .. _multimodal_overview: 2 | 3 | ################### 4 | Multimodal overview 5 | ################### 6 | 7 | After the ``main.nf`` pipeline has been successfully run, WebAtlas can optionally process a group of multimodal datasets that 8 | share common features. This step will prepare the unified multimodal visualision for the web app. 9 | 10 | The data outputs generated by running the ``main.nf`` conversion pipeline serve as inputs for this multimodal integration pipeline. 11 | 12 | ******************************* 13 | Tasks completed by the pipeline 14 | ******************************* 15 | 16 | The multimodal integration pipeline performs several tasks: 17 | 18 | 1. Reindex each dataset by a user-inputted offset so ID's do not clash between modalities. 19 | 2. *Optionally*, concatenate other observation-by-feature matrices or categorical values to the expression matrix to enable their visualisation as continuous values. For example, a celltype prediction matrix and/or celltype categories. 20 | 3. Find the intersection of features between all datasets and subset them to visualise only the intersection (as including features not present in all datasets can produce misleading visualisations.) 21 | **Note** the features are intersected using their index in the AnnData objects (``var`` table). All datasets must use the same type of data as index for the intersection to be correctly computed. For example, all datasets use names as index, or all datasets use IDs as index. 22 | 23 | ******************************* 24 | Running the multimodal pipeline 25 | ******************************* 26 | 27 | Follow the instructions below to run the multimodal pipeline. 28 | 29 | 1. :ref:`Configure the parameters file ` for the ``multimodal.nf`` pipeline 30 | 2. :ref:`Run ` the ``multimodal.nf`` pipeline 31 | 3. :ref:`Visualise ` the multimodal data in a web browser 32 | -------------------------------------------------------------------------------- /sphinx/multimodal/run.rst: -------------------------------------------------------------------------------- 1 | .. _multimodal_run: 2 | 3 | Multimodal run 4 | ============== 5 | 6 | In addition to the main conversion pipeline, we offer a subsequent pipeline to process multiple datasets with matching features. This allows the users to 7 | visualise and query all common features such as genes and cell types across all modalities from a single web portal. 8 | 9 | Configurations and data are inputted through a :ref:`parameters yaml file ` (slightly different from the parameters file required by the main pipeline). 10 | 11 | To run this pipeline use 12 | 13 | .. code-block:: shell 14 | 15 | nextflow run multimodal.nf -params-file /path/to/multimodal-params.yaml 16 | 17 | Running using Docker 18 | -------------------- 19 | 20 | The default pipeline will run on local executor without any type of environment creation. To run the pipeline using Docker containers use the ``-profile docker`` option: 21 | 22 | .. code-block:: shell 23 | 24 | nextflow run multimodal.nf \ 25 | -params-file /path/to/multimodal-params.yaml \ 26 | -profile docker 27 | 28 | Pulling the containers when the pipline is launched may take a few minutes. 29 | 30 | Running using Singularity 31 | ------------------------- 32 | 33 | The default pipeline will run on local executor without any type of environment creation. To run the pipeline using Singularity containers use the ``-profile singularity`` option: 34 | 35 | .. code-block:: shell 36 | 37 | nextflow run multimodal.nf \ 38 | -params-file /path/to/multimodal-params.yaml \ 39 | -profile singularity 40 | 41 | Pulling the containers when the pipline is launched may take a few minutes. 42 | 43 | Running using Conda 44 | ------------------- 45 | 46 | The default pipeline will run on local executor without any type of environment creation. If you've already setup your conda environment you don't have to do anything else. 47 | 48 | However, if you are working on a compute cluster you will need to make sure the conda environment is avaiable and active in your worker nodes. To run the pipeline using a new conda environment use the ``-profile conda`` option: 49 | 50 | .. code-block:: shell 51 | 52 | nextflow run multimodal.nf \ 53 | -params-file /path/to/multimodal-params.yaml \ 54 | -profile conda 55 | 56 | Creating the environment when the pipleine is launched may take a few minutes. 57 | 58 | Further reading 59 | --------------- 60 | 61 | For more information about Docker image pulling/local conda env creation in Nextflow please refer to Nextflow's official docs for `containers `__ and `conda `__. -------------------------------------------------------------------------------- /sphinx/multimodal/visualise.rst: -------------------------------------------------------------------------------- 1 | .. _multimodal_visualise: 2 | 3 | Multimodal visualision 4 | ====================== 5 | 6 | The pipeline generates a Vitessce view config file for each ``project`` to visualise its multiple ``datasets`` together. 7 | This file can then be used to load the views and data as configured in the parameters files. 8 | Unlike the main conversion pipeline, the layout of the view config file generated for multimodal datasets cannot be configured. 9 | 10 | For each spatial ``dataset`` (has ``is_spatial: true`` and contains either a ``raw_image`` or ``label_image``) the pipeline adds an spatial image component and a layer controller to the view config. 11 | For each non-spatial ``dataset`` (has ``is_spatial: false`` or contains no images) the pipeline adds a scatterplot component to the view config to visualise an embedding. 12 | As datasets have been subsetted to the shared features only, the pipeline adds one feature selection component to the view config from which the user can query across datasets. 13 | If an additional feature has been specified, the pipeline adds another feature selection component to the view config for that particular feature type. 14 | 15 | To set the layout of the view config the pipeline just concatenates components in a recursive manner. 16 | Thus, components might end up with sizes that are not the most ideal. 17 | During visualisation the user can resize and reorganise the components as needed. 18 | Alternatively, the generated view config file can be manually modified to set a more appropriate layout. 19 | 20 | You can locally serve and visualize the data in a few steps. 21 | 22 | By default, the base ``url`` used within the view config is ``http://localhost:3000/`` 23 | (this can be changed in the :ref:`parameters file `). 24 | This ``url`` indicates Vitessce to look for data at that location. 25 | 26 | You can set up a ``http`` server locally to serve the processed files so a Vitessce instance can load them. 27 | There are several tools that can setup a ``http`` server. 28 | We recommend using `serve `__ (requires `Node.js `__), 29 | but you can use any tool that can enable CORS. 30 | 31 | You can serve the view config file and data by specifying the output directory 32 | (note that the pipeline adds its version to the ``outdir`` defined in the :ref:`parameters file ` file). 33 | 34 | .. parsed-literal:: 35 | 36 | serve -C -p 3000 /path/to/outdir/|release|/ 37 | 38 | Make sure to enable CORS and set the appropriate port number. 39 | In this case, using `serve `__, this is done through the ``-C`` and ``-p`` flags respectively. 40 | 41 | Your view configs should then be accessible at ``http://localhost:3000/{project}-{dataset}-config.json``. 42 | 43 | You can then load them in a Vitessce instance like the `WebAtlas app `__ 44 | deployed at ``__. 45 | 46 | Specify your locally served view config through the ``config`` parameter like 47 | ``https://webatlas.cog.sanger.ac.uk/latest/index.html?config=http://localhost:3000/{project}-{dataset}-config.json`` 48 | and load this URL in your browser to visualize your data in a Vitessce viewer. -------------------------------------------------------------------------------- /sphinx/run.rst: -------------------------------------------------------------------------------- 1 | .. _run: 2 | 3 | Running 4 | ======= 5 | 6 | The pipeline offers a workflow that processes files, images, and 7 | builds a Vitessce config file from the generated files. 8 | Alternatively, the workflow to process files only, or the workflow to process images only 9 | can be called independently. 10 | 11 | Each of these workflows work as entry points that can be specified when running the 12 | pipeline through the command line. 13 | 14 | - The ``Full_pipeline`` workflow runs the other workflows as needed and 15 | builds a Vitessce config file per dataset. 16 | - The ``Process_files`` workflow handles data files and their conversions. 17 | - The ``Process_images`` workflow handles image files and/or label image data and their conversions. 18 | 19 | The pipeline configuration and the location of input data is specified through the 20 | :ref:`parameters yaml file `. This file is passed to Nextflow when 21 | running the pipeline. 22 | 23 | To run the ``Full_pipeline`` use 24 | 25 | .. code-block:: shell 26 | 27 | nextflow run main.nf -params-file /path/to/params.yaml -entry Full_pipeline 28 | 29 | 30 | This will handle all input files, whether they are data files or images, for all datasets 31 | defined. 32 | 33 | You can modify the entry point if you're interested in only getting the converted outputs. 34 | Use ``-entry Process_files`` or ``-entry Process_images`` as you need. 35 | 36 | Running using Conda 37 | ------------------- 38 | 39 | The default pipeline will run on local executor without any type of environment creation. To run the pipeline using conda environments use the ``-profile conda`` option: 40 | 41 | .. code-block:: shell 42 | 43 | nextflow run main.nf \ 44 | -params-file /path/to/params.yaml \ 45 | -entry Full_pipeline \ 46 | -profile conda 47 | 48 | Creating the environment when the pipeline is launched may take a few minutes. 49 | 50 | Running using Docker 51 | -------------------- 52 | 53 | The default pipeline will run on local executor without any type of environment creation. To run the pipeline using Docker containers use the ``-profile docker`` option: 54 | 55 | .. code-block:: shell 56 | 57 | nextflow run main.nf \ 58 | -params-file /path/to/params.yaml \ 59 | -entry Full_pipeline \ 60 | -profile docker 61 | 62 | Pulling the containers when the pipline is launched may take a few minutes. 63 | 64 | Running using Singularity 65 | ------------------------- 66 | 67 | The default pipeline will run on local executor without any type of environment creation. To run the pipeline using Singularity containers use the ``-profile singularity`` option: 68 | 69 | .. code-block:: shell 70 | 71 | nextflow run main.nf \ 72 | -params-file /path/to/params.yaml \ 73 | -entry Full_pipeline \ 74 | -profile singularity 75 | 76 | Pulling the containers when the pipline is launched may take a few minutes. 77 | 78 | Further reading 79 | --------------- 80 | 81 | For more information about Docker image pulling/local conda env creation in Nextflow please refer to Nextflow's official docs for `containers `__ and `conda `__. -------------------------------------------------------------------------------- /sphinx/testing.rst: -------------------------------------------------------------------------------- 1 | .. _testing: 2 | 3 | Testing 4 | ======= 5 | 6 | Python testing 7 | -------------- 8 | 9 | Testing of python scripts uses `pytest`_. 10 | 11 | Set the :code:`PYTHONPATH` environment variable to the :code:`bin` directory where the scripts are stored, and then run the following command: 12 | 13 | :: 14 | 15 | python -m pytest -q tests/test_class.py 16 | 17 | .. _pytest: https://docs.pytest.org/en/7.1.x/ -------------------------------------------------------------------------------- /sphinx/visualise.rst: -------------------------------------------------------------------------------- 1 | .. _visualise: 2 | 3 | Visualising 4 | =========== 5 | 6 | The pipeline generates a Vitessce view config file for each processed dataset. 7 | This file can then be used to load the views and data as configured in the parameters files. 8 | 9 | You can locally serve and visualize the data in a few steps. 10 | 11 | By default, the base ``url`` used within the view config is ``http://localhost:3000/`` 12 | (this can be changed in the :ref:`parameters file `). 13 | This ``url`` indicates Vitessce to look for data at that location. 14 | 15 | You can set up a ``http`` server locally to serve the processed files so a Vitessce instance can load them. 16 | There are several tools that can setup a ``http`` server. 17 | We recommend using `serve `__ (requires `Node.js `__), 18 | but you can use any tool that can enable CORS. 19 | 20 | You can serve the view config file and data by specifying the output directory 21 | (note that the pipeline adds its version to the ``outdir`` defined in the :ref:`parameters file ` file). 22 | 23 | .. parsed-literal:: 24 | 25 | serve -C -p 3000 /path/to/outdir/|release|/ 26 | 27 | Make sure to enable CORS and set the appropriate port number. 28 | In this case, using `serve `__, this is done through the ``-C`` and ``-p`` flags respectively. 29 | 30 | Your view configs should then be accessible at ``http://localhost:3000/{project}-{dataset}-config.json``. 31 | 32 | You can then load them in a Vitessce instance like the `WebAtlas app `__ 33 | deployed at ``__. 34 | 35 | Specify your locally served view config through the ``config`` parameter like 36 | ``https://webatlas.cog.sanger.ac.uk/latest/index.html?config=http://localhost:3000/{project}-{dataset}-config.json`` 37 | and load this URL in your browser to visualize your data in a Vitessce viewer. -------------------------------------------------------------------------------- /templates/examples/CytAssist_FFPE_Human_Breast_Cancer.yaml: -------------------------------------------------------------------------------- 1 | outdir: ./output/CytAssist_FFPE_Human_Breast_Cancer/ 2 | 3 | args: 4 | spaceranger: 5 | save_h5ad: True 6 | 7 | projects: 8 | - project: visium 9 | datasets: 10 | - dataset: breast-cancer 11 | title: "Visium CytAssist - High resolution mapping of the breast cancer tumor microenvironment" 12 | data: 13 | - data_type: spaceranger 14 | data_path: ./input/CytAssist_FFPE_Human_Breast_Cancer/ 15 | - data_type: raw_image 16 | data_path: ./input/CytAssist_FFPE_Human_Breast_Cancer/tissue_image.tif 17 | - data_type: label_image_data 18 | data_path: ./input/CytAssist_FFPE_Human_Breast_Cancer/ 19 | file_type: visium 20 | ref_img: ./input/CytAssist_FFPE_Human_Breast_Cancer/tissue_image.tif 21 | 22 | vitessce_options: 23 | spatial: 24 | xy: "obsm/spatial" 25 | matrix: "X" 26 | layout: "advanced" 27 | -------------------------------------------------------------------------------- /templates/examples/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs.yaml: -------------------------------------------------------------------------------- 1 | outdir: ./output/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/ 2 | 3 | args: 4 | h5ad: 5 | var_index: "gene_name" 6 | 7 | projects: 8 | - project: xenium 9 | datasets: 10 | - dataset: breast-cancer 11 | title: "Xenium - High resolution mapping of the breast cancer tumor microenvironment" 12 | data: 13 | - data_type: xenium 14 | data_path: ./input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/outs/ 15 | - data_type: raw_image 16 | data_path: ./input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/outs/morphology.ome.tif 17 | - data_type: label_image_data 18 | data_path: ./input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/outs/ 19 | file_type: xenium 20 | ref_img: ./input/Xenium_FFPE_Human_Breast_Cancer_Rep1_outs/outs/morphology.ome.tif 21 | 22 | vitessce_options: 23 | spatial: 24 | xy: "obsm/X_spatial" 25 | mappings: 26 | obsm/X_umap: [0, 1] 27 | matrix: "X" 28 | layout: advanced 29 | -------------------------------------------------------------------------------- /templates/iss_template.yaml: -------------------------------------------------------------------------------- 1 | outdir: ./output/ 2 | 3 | args: 4 | h5ad: 5 | compute_embeddings: False 6 | 7 | projects: 8 | - project: test 9 | datasets: 10 | - dataset: iss 11 | title: "Test ISS Dataset" 12 | data: 13 | - data_type: h5ad 14 | data_path: /path/to/test/iss/anndata.h5ad 15 | - data_type: raw_image 16 | data_path: /path/to/test/iss/raw_image.tif 17 | - data_type: label_image 18 | data_path: /path/to/iss/label_image.tif 19 | 20 | vitessce_options: 21 | spatial: 22 | xy: "obsm/spatial" 23 | mappings: 24 | obsm/X_umap: [0, 1] 25 | obsm/X_pca: [0, 1] 26 | factors: 27 | - "obs/sample" 28 | sets: 29 | - "obs/cluster" 30 | - name: "obs/celltype" 31 | score: "obs/celltype_prob" 32 | matrix: "X" 33 | layout: advanced # advanced, minimal 34 | # custom_layout: "spatial|((scatterplot/scatterplot)|(genes/cellSets))" 35 | -------------------------------------------------------------------------------- /templates/merscope_template.yaml: -------------------------------------------------------------------------------- 1 | outdir: ./output/ 2 | 3 | args: 4 | h5ad: 5 | compute_embeddings: True 6 | 7 | projects: 8 | - project: test 9 | datasets: 10 | - dataset: merscope 11 | title: "Test MERSCOPE Dataset" 12 | data: 13 | - data_type: merscope 14 | data_path: /path/to/test/merscope/ 15 | - data_type: raw_image_data 16 | data_path: /path/to/merscope/ 17 | file_type: merscope 18 | - data_type: label_image_data 19 | data_path: /path/to/merscope/ 20 | file_type: merscope 21 | shape: [10000, 10000] 22 | 23 | vitessce_options: 24 | spatial: 25 | xy: "obsm/spatial" 26 | mappings: 27 | obsm/X_umap: [0, 1] 28 | obsm/X_pca: [0, 1] 29 | factors: 30 | - "obs/sample" 31 | sets: 32 | - "obs/clustering" 33 | matrix: "X" 34 | layout: advanced 35 | # custom_layout: "spatial|((scatterplot/scatterplot)|(genes/cellSets))" 36 | -------------------------------------------------------------------------------- /templates/multimodal-template.yaml: -------------------------------------------------------------------------------- 1 | url: http://localhost/ 2 | project: HZ_HLB 3 | title: "lowerlimb test" 4 | description: "" 5 | outdir: ./output/ 6 | extend_feature_name: "celltype" 7 | 8 | data: 9 | - dataset: iss_demo 10 | obs_type: "cell" 11 | anndata: test-iss-anndata.zarr 12 | offset: 0 13 | is_spatial: true 14 | raw_image: test-iss-raw.zarr 15 | label_image: test-iss-label.zarr 16 | extend_feature: obs/celltype 17 | vitessce_options: 18 | spatial: 19 | xy: "obsm/spatial" 20 | mappings: 21 | obsm/X_umap: [0, 1] 22 | factors: 23 | - "obs/celltype" 24 | sets: 25 | - "obs/celltype" 26 | matrix: "X" 27 | - dataset: visium_demo 28 | obs_type: "spot" 29 | anndata: test-visium-anndata.zarr 30 | offset: 1000000 31 | is_spatial: true 32 | raw_image: test-visium-raw.zarr 33 | label_image: test-visium-label.zarr 34 | extend_feature: 35 | path: /path/to/c2l_output.h5ad 36 | args: 37 | sample: ["library_id", "sample_1"] 38 | vitessce_options: 39 | spatial: 40 | xy: "obsm/spatial" 41 | matrix: "X" 42 | - dataset: scrnaseq_demo 43 | obs_type: "cell" 44 | anndata: test-scrnaseq-anndata.zarr 45 | offset: 2000000 46 | is_spatial: false 47 | extend_feature: obs/celltype 48 | vitessce_options: 49 | spatial: 50 | xy: "obsm/spatial" 51 | mappings: 52 | obsm/X_umap: [0, 1] 53 | factors: 54 | - "obs/celltype" 55 | sets: 56 | - "obs/celltype" 57 | matrix: "X" 58 | -------------------------------------------------------------------------------- /templates/scrnaseq_s3_template.yaml: -------------------------------------------------------------------------------- 1 | outdir: ./output/ 2 | 3 | args: 4 | h5ad: 5 | compute_embeddings: True 6 | 7 | projects: 8 | - project: test 9 | datasets: 10 | - dataset: scRNAseq 11 | title: "Test scRNAseq Dataset" 12 | data: 13 | - data_type: h5ad 14 | data_path: s3://bucket/path/to/anndata.h5ad 15 | 16 | vitessce_options: 17 | mappings: 18 | obsm/X_umap: [0, 1] 19 | obsm/X_pca: [0, 1] 20 | sets: 21 | - "obs/tissue" 22 | - name: "obs/celltype" 23 | - name: "obs/celltype" 24 | score: "obs/celltype_prob" 25 | matrix: "X" 26 | # layout: 'advanced' # advanced, minimal 27 | custom_layout: "((scatterplot/scatterplot)|(genes/cellSets))" 28 | -------------------------------------------------------------------------------- /templates/scrnaseq_template.yaml: -------------------------------------------------------------------------------- 1 | outdir: ./output/ 2 | 3 | args: 4 | h5ad: 5 | compute_embeddings: True 6 | 7 | projects: 8 | - project: test 9 | datasets: 10 | - dataset: scRNAseq 11 | title: "Test scRNAseq Dataset" 12 | data: 13 | - data_type: h5ad 14 | data_path: /path/to/test/scrnaseq/anndata.h5ad 15 | 16 | vitessce_options: 17 | mappings: 18 | obsm/X_umap: [0, 1] 19 | obsm/X_pca: [0, 1] 20 | sets: 21 | - "obs/tissue" 22 | - name: "obs/celltype" 23 | - name: "obs/celltype" 24 | score: "obs/celltype_prob" 25 | matrix: "X" 26 | # layout: 'advanced' # advanced, minimal 27 | custom_layout: ((scatterplot/scatterplot)|(genes/cellSets)) 28 | -------------------------------------------------------------------------------- /templates/template.yaml: -------------------------------------------------------------------------------- 1 | outdir: ./output/ 2 | 3 | args: 4 | h5ad: 5 | batch_processing: True 6 | 7 | projects: 8 | - project: project_1 9 | datasets: 10 | - dataset: scRNAseq 11 | title: "Project 1 scRNAseq Dataset" 12 | data: 13 | - data_type: h5ad 14 | data_path: /path/to/project_1/scrnaseq/anndata.h5ad 15 | - project: project_2 16 | datasets: 17 | - dataset: visium 18 | title: "Project 2 Visium Dataset" 19 | data: 20 | - data_type: h5ad 21 | data_path: /path/to/project_2/visium/anndata.h5ad 22 | - data_type: raw_image 23 | data_path: /path/to/project_2/visium/raw_image.tif 24 | - data_type: label_image_data 25 | data_path: /path/to/project_2/visium/anndata.h5ad 26 | file_type: visium 27 | ref_img: /path/to/project_2/visium/raw_image.tif 28 | - project: project_3 29 | datasets: 30 | - dataset: iss 31 | title: "Project 3 ISS Dataset" 32 | data: 33 | - data_type: h5ad 34 | data_path: /path/to/project_3/iss/anndata.h5ad 35 | - data_type: raw_image 36 | data_path: /path/to/project_3/iss/raw_image.tif 37 | - data_type: label_image 38 | data_path: /path/to/project_3/iss/label_image.tif 39 | 40 | vitessce_options: 41 | mappings: 42 | obsm/X_umap: [0, 1] 43 | factors: 44 | - "obs/sample" 45 | sets: 46 | - "obs/celltype" 47 | matrix: "X" 48 | layout: simple 49 | # custom_layout: "spatial|((scatterplot/scatterplot)|(genes/cellSets))" 50 | -------------------------------------------------------------------------------- /templates/visium_template.yaml: -------------------------------------------------------------------------------- 1 | outdir: ./output/ 2 | 3 | args: 4 | h5ad: 5 | compute_embeddings: True 6 | spaceranger: 7 | save_h5ad: True 8 | 9 | projects: 10 | - project: test 11 | datasets: 12 | - dataset: visium 13 | title: "Test Visium Dataset" 14 | data: 15 | - data_type: spaceranger 16 | data_path: /path/to/test/visium/slide_1/ 17 | - data_type: raw_image 18 | data_path: /path/to/visium/slide_1/raw_image.tif 19 | - data_type: label_image_data 20 | data_path: /path/to/visium/slide_1/ 21 | file_type: visium 22 | ref_img: /path/to/visium/slide_1/raw_image.tif 23 | 24 | vitessce_options: 25 | spatial: 26 | xy: "obsm/spatial" 27 | mappings: 28 | obsm/X_umap: [0, 1] 29 | obsm/X_pca: [0, 1] 30 | factors: 31 | - "obs/sample" 32 | sets: 33 | - "obs/clustering" 34 | matrix: "X" 35 | layout: advanced 36 | # custom_layout: "spatial|((scatterplot/scatterplot)|(genes/cellSets))" 37 | -------------------------------------------------------------------------------- /templates/xenium_template.yaml: -------------------------------------------------------------------------------- 1 | outdir: ./output/ 2 | 3 | args: 4 | h5ad: 5 | compute_embeddings: True 6 | 7 | projects: 8 | - project: test 9 | datasets: 10 | - dataset: xenium 11 | title: "Test Xenium Dataset" 12 | url: "" 13 | data: 14 | - data_type: xenium 15 | data_path: /path/to/test/xenium/ 16 | - data_type: raw_image 17 | data_path: /path/to/xenium/raw_image.tif 18 | - data_type: label_image_data 19 | data_path: /path/to/xenium/ 20 | file_type: xenium 21 | ref_img: /path/to/xenium/raw_image.tif 22 | 23 | vitessce_options: 24 | spatial: 25 | xy: "obsm/X_spatial" 26 | mappings: 27 | obsm/X_umap: [0, 1] 28 | obsm/X_pca: [0, 1] 29 | sets: 30 | - "obs/graphclust" 31 | matrix: "X" 32 | layout: advanced 33 | # custom_layout: "spatial|((scatterplot/scatterplot)|(genes/cellSets))" 34 | -------------------------------------------------------------------------------- /tests/expected_output/custom_test_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0.7", 3 | "name": "", 4 | "description": "", 5 | "datasets": [ 6 | { 7 | "uid": "test", 8 | "name": "custom", 9 | "files": [ 10 | { 11 | "type": "raster", 12 | "fileType": "raster.json", 13 | "options": { 14 | "renderLayers": [ 15 | "image_raw", 16 | "image_label", 17 | "image2_label" 18 | ], 19 | "schemaVersion": "0.0.2", 20 | "images": [ 21 | { 22 | "name": "image_raw", 23 | "url": "http://localhost/image_raw.zarr", 24 | "type": "zarr", 25 | "metadata": { 26 | "isBitmask": false, 27 | "dimensions": [ 28 | { 29 | "field": "t", 30 | "type": "quantitative", 31 | "values": null 32 | }, 33 | { 34 | "field": "channel", 35 | "type": "nominal", 36 | "values": [ 37 | "Channel 1", 38 | "Channel 2" 39 | ] 40 | }, 41 | { 42 | "field": "y", 43 | "type": "quantitative", 44 | "values": null 45 | }, 46 | { 47 | "field": "x", 48 | "type": "quantitative", 49 | "values": null 50 | } 51 | ], 52 | "isPyramid": true, 53 | "transform": { 54 | "translate": { 55 | "y": 0, 56 | "x": 0 57 | }, 58 | "scale": 1 59 | } 60 | } 61 | }, 62 | { 63 | "name": "image_label", 64 | "url": "http://localhost/image_label.zarr", 65 | "type": "zarr", 66 | "metadata": { 67 | "isBitmask": true, 68 | "dimensions": [ 69 | { 70 | "field": "t", 71 | "type": "quantitative", 72 | "values": null 73 | }, 74 | { 75 | "field": "channel", 76 | "type": "nominal", 77 | "values": [ 78 | "Labels" 79 | ] 80 | }, 81 | { 82 | "field": "y", 83 | "type": "quantitative", 84 | "values": null 85 | }, 86 | { 87 | "field": "x", 88 | "type": "quantitative", 89 | "values": null 90 | } 91 | ], 92 | "isPyramid": true, 93 | "transform": { 94 | "translate": { 95 | "y": 0, 96 | "x": 0 97 | }, 98 | "scale": 1 99 | } 100 | } 101 | }, 102 | { 103 | "name": "image2_label", 104 | "url": "http://localhost/image2_label.zarr", 105 | "type": "zarr", 106 | "metadata": { 107 | "isBitmask": true, 108 | "dimensions": [ 109 | { 110 | "field": "t", 111 | "type": "quantitative", 112 | "values": null 113 | }, 114 | { 115 | "field": "channel", 116 | "type": "nominal", 117 | "values": [ 118 | "Labels" 119 | ] 120 | }, 121 | { 122 | "field": "y", 123 | "type": "quantitative", 124 | "values": null 125 | }, 126 | { 127 | "field": "x", 128 | "type": "quantitative", 129 | "values": null 130 | } 131 | ], 132 | "isPyramid": true, 133 | "transform": { 134 | "translate": { 135 | "y": 0, 136 | "x": 0 137 | }, 138 | "scale": 1 139 | } 140 | } 141 | } 142 | ] 143 | } 144 | }, 145 | { 146 | "type": "cells", 147 | "fileType": "anndata-cells.zarr", 148 | "url": "http://localhost/anndata.zarr", 149 | "options": { 150 | "xy": "obsm/spatial", 151 | "mappings": { 152 | "X_UMAP": { 153 | "key": "obsm/X_umap", 154 | "dims": [ 155 | 0, 156 | 1 157 | ] 158 | }, 159 | "X_PCA": { 160 | "key": "obsm/X_pca", 161 | "dims": [ 162 | 3, 163 | 4 164 | ] 165 | } 166 | } 167 | } 168 | }, 169 | { 170 | "type": "cell-sets", 171 | "fileType": "anndata-cell-sets.zarr", 172 | "url": "http://localhost/anndata.zarr", 173 | "options": [ 174 | { 175 | "groupName": "Tissue", 176 | "setName": "obs/tissue" 177 | }, 178 | { 179 | "groupName": "Celltype", 180 | "setName": "obs/celltype" 181 | } 182 | ] 183 | }, 184 | { 185 | "type": "expression-matrix", 186 | "fileType": "anndata-expression-matrix.zarr", 187 | "url": "http://localhost/anndata.zarr", 188 | "options": { 189 | "matrix": "X" 190 | } 191 | } 192 | ] 193 | } 194 | ], 195 | "coordinationSpace": { 196 | "dataset": { 197 | "A": "test" 198 | }, 199 | "embeddingType": { 200 | "X_UMAP": "X_UMAP", 201 | "X_PCA": "X_PCA" 202 | } 203 | }, 204 | "layout": [ 205 | { 206 | "component": "spatial", 207 | "coordinationScopes": { 208 | "dataset": "A" 209 | }, 210 | "x": 0, 211 | "y": 0, 212 | "w": 6, 213 | "h": 6 214 | }, 215 | { 216 | "component": "heatmap", 217 | "coordinationScopes": { 218 | "dataset": "A" 219 | }, 220 | "x": 0, 221 | "y": 6, 222 | "w": 6, 223 | "h": 6 224 | }, 225 | { 226 | "component": "genes", 227 | "coordinationScopes": { 228 | "dataset": "A" 229 | }, 230 | "x": 6, 231 | "y": 0, 232 | "w": 6, 233 | "h": 6 234 | }, 235 | { 236 | "component": "scatterplot", 237 | "coordinationScopes": { 238 | "dataset": "A", 239 | "embeddingType": "X_UMAP" 240 | }, 241 | "x": 6, 242 | "y": 6, 243 | "w": 3, 244 | "h": 6 245 | }, 246 | { 247 | "component": "cellSets", 248 | "coordinationScopes": { 249 | "dataset": "A" 250 | }, 251 | "x": 9, 252 | "y": 6, 253 | "w": 3, 254 | "h": 6 255 | } 256 | ], 257 | "initStrategy": "auto" 258 | } -------------------------------------------------------------------------------- /tests/expected_output/minimal_test_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0.7", 3 | "name": "", 4 | "description": "", 5 | "datasets": [ 6 | { 7 | "uid": "test", 8 | "name": "minimal", 9 | "files": [ 10 | { 11 | "type": "cells", 12 | "fileType": "anndata-cells.zarr", 13 | "url": "http://localhost/anndata.zarr", 14 | "options": { 15 | "xy": "obsm/spatial", 16 | "mappings": { 17 | "X_UMAP": { 18 | "key": "obsm/X_umap", 19 | "dims": [ 20 | 0, 21 | 1 22 | ] 23 | } 24 | }, 25 | "factors": [ 26 | "obs/sample" 27 | ] 28 | } 29 | }, 30 | { 31 | "type": "cell-sets", 32 | "fileType": "anndata-cell-sets.zarr", 33 | "url": "http://localhost/anndata.zarr", 34 | "options": [ 35 | { 36 | "groupName": "Sample", 37 | "setName": "obs/sample" 38 | } 39 | ] 40 | }, 41 | { 42 | "type": "expression-matrix", 43 | "fileType": "anndata-expression-matrix.zarr", 44 | "url": "http://localhost/anndata.zarr", 45 | "options": { 46 | "matrix": "X" 47 | } 48 | } 49 | ] 50 | } 51 | ], 52 | "coordinationSpace": { 53 | "dataset": { 54 | "A": "test" 55 | }, 56 | "embeddingType": { 57 | "X_UMAP": "X_UMAP" 58 | } 59 | }, 60 | "layout": [ 61 | { 62 | "component": "spatial", 63 | "coordinationScopes": { 64 | "dataset": "A" 65 | }, 66 | "x": 0, 67 | "y": 0, 68 | "w": 6, 69 | "h": 12 70 | }, 71 | { 72 | "component": "layerController", 73 | "coordinationScopes": { 74 | "dataset": "A" 75 | }, 76 | "x": 6, 77 | "y": 0, 78 | "w": 6, 79 | "h": 12 80 | } 81 | ], 82 | "initStrategy": "auto" 83 | } -------------------------------------------------------------------------------- /tests/expected_output/simple_test_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0.7", 3 | "name": "", 4 | "description": "", 5 | "datasets": [ 6 | { 7 | "uid": "test", 8 | "name": "simple", 9 | "files": [ 10 | { 11 | "type": "raster", 12 | "fileType": "raster.json", 13 | "options": { 14 | "renderLayers": [ 15 | "image_raw", 16 | "image_label" 17 | ], 18 | "schemaVersion": "0.0.2", 19 | "images": [ 20 | { 21 | "name": "image_raw", 22 | "url": "http://localhost/image_raw.zarr", 23 | "type": "zarr", 24 | "metadata": { 25 | "isBitmask": false, 26 | "dimensions": [ 27 | { 28 | "field": "t", 29 | "type": "quantitative", 30 | "values": null 31 | }, 32 | { 33 | "field": "channel", 34 | "type": "nominal", 35 | "values": [] 36 | }, 37 | { 38 | "field": "y", 39 | "type": "quantitative", 40 | "values": null 41 | }, 42 | { 43 | "field": "x", 44 | "type": "quantitative", 45 | "values": null 46 | } 47 | ], 48 | "isPyramid": true, 49 | "transform": { 50 | "translate": { 51 | "y": 0, 52 | "x": 0 53 | }, 54 | "scale": 1 55 | } 56 | } 57 | }, 58 | { 59 | "name": "image_label", 60 | "url": "http://localhost/image_label.zarr", 61 | "type": "zarr", 62 | "metadata": { 63 | "isBitmask": true, 64 | "dimensions": [ 65 | { 66 | "field": "t", 67 | "type": "quantitative", 68 | "values": null 69 | }, 70 | { 71 | "field": "channel", 72 | "type": "nominal", 73 | "values": [ 74 | "Labels" 75 | ] 76 | }, 77 | { 78 | "field": "y", 79 | "type": "quantitative", 80 | "values": null 81 | }, 82 | { 83 | "field": "x", 84 | "type": "quantitative", 85 | "values": null 86 | } 87 | ], 88 | "isPyramid": true, 89 | "transform": { 90 | "translate": { 91 | "y": 0, 92 | "x": 0 93 | }, 94 | "scale": 1 95 | } 96 | } 97 | } 98 | ] 99 | } 100 | }, 101 | { 102 | "type": "cells", 103 | "fileType": "anndata-cells.zarr", 104 | "url": "http://localhost/anndata.zarr", 105 | "options": { 106 | "xy": "obsm/spatial", 107 | "mappings": { 108 | "X_UMAP": { 109 | "key": "obsm/X_umap", 110 | "dims": [ 111 | 0, 112 | 1 113 | ] 114 | } 115 | } 116 | } 117 | }, 118 | { 119 | "type": "cell-sets", 120 | "fileType": "anndata-cell-sets.zarr", 121 | "url": "http://localhost/anndata.zarr", 122 | "options": [ 123 | { 124 | "groupName": "Tissue", 125 | "setName": "obs/tissue" 126 | }, 127 | { 128 | "groupName": "Celltype", 129 | "setName": "obs/celltype" 130 | } 131 | ] 132 | }, 133 | { 134 | "type": "expression-matrix", 135 | "fileType": "anndata-expression-matrix.zarr", 136 | "url": "http://localhost/anndata.zarr", 137 | "options": { 138 | "matrix": "X" 139 | } 140 | } 141 | ] 142 | } 143 | ], 144 | "coordinationSpace": { 145 | "dataset": { 146 | "A": "test" 147 | }, 148 | "embeddingType": { 149 | "X_UMAP": "X_UMAP" 150 | } 151 | }, 152 | "layout": [ 153 | { 154 | "component": "spatial", 155 | "coordinationScopes": { 156 | "dataset": "A" 157 | }, 158 | "x": 0, 159 | "y": 0, 160 | "w": 6, 161 | "h": 12 162 | }, 163 | { 164 | "component": "layerController", 165 | "coordinationScopes": { 166 | "dataset": "A" 167 | }, 168 | "x": 6, 169 | "y": 0, 170 | "w": 3, 171 | "h": 12 172 | }, 173 | { 174 | "component": "genes", 175 | "coordinationScopes": { 176 | "dataset": "A" 177 | }, 178 | "x": 9, 179 | "y": 0, 180 | "w": 3, 181 | "h": 6 182 | }, 183 | { 184 | "component": "cellSets", 185 | "coordinationScopes": { 186 | "dataset": "A" 187 | }, 188 | "x": 9, 189 | "y": 6, 190 | "w": 3, 191 | "h": 6 192 | } 193 | ], 194 | "initStrategy": "auto" 195 | } -------------------------------------------------------------------------------- /tests/input/advanced_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "advanced", 3 | "dataset": "test", 4 | "file_paths": [ 5 | "anndata.zarr" 6 | ], 7 | "image_zarr": { 8 | "image_raw.zarr": { 9 | "channel_names": ["Channel 1", "Channel 2"] 10 | }, 11 | "image_label.zarr": "", 12 | "image2_label.zarr": "" 13 | }, 14 | "url": "http://localhost/", 15 | "options": { 16 | "spatial": { 17 | "xy": "obsm/spatial" 18 | }, 19 | "mappings": { 20 | "obsm/X_umap": [0,1], 21 | "obsm/X_pca": [3,4] 22 | }, 23 | "sets": [ 24 | "obs/tissue", 25 | "obs/celltype" 26 | ], 27 | "matrix": "X" 28 | }, 29 | "layout": "advanced" 30 | } 31 | -------------------------------------------------------------------------------- /tests/input/custom_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "custom", 3 | "dataset": "test", 4 | "file_paths": [ 5 | "anndata.zarr" 6 | ], 7 | "image_zarr": { 8 | "image_raw.zarr": { 9 | "channel_names": ["Channel 1", "Channel 2"] 10 | }, 11 | "image_label.zarr": "", 12 | "image2_label.zarr": "" 13 | }, 14 | "url": "http://localhost/", 15 | "options": { 16 | "spatial": { 17 | "xy": "obsm/spatial" 18 | }, 19 | "mappings": { 20 | "obsm/X_umap": [0,1], 21 | "obsm/X_pca": [3,4] 22 | }, 23 | "sets": [ 24 | "obs/tissue", 25 | "obs/celltype" 26 | ], 27 | "matrix": "X" 28 | }, 29 | "custom_layout": "(spatial/heatmap)|(genes/(scatterplot|cellSets))" 30 | } 31 | -------------------------------------------------------------------------------- /tests/input/minimal_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "minimal", 3 | "dataset": "test", 4 | "file_paths": [ 5 | "anndata.zarr" 6 | ], 7 | "url": "http://localhost/" 8 | } -------------------------------------------------------------------------------- /tests/input/simple_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "simple", 3 | "dataset": "test", 4 | "file_paths": [ 5 | "anndata.zarr" 6 | ], 7 | "image_zarr": { 8 | "image_raw.zarr": "", 9 | "image_label.zarr": "" 10 | }, 11 | "url": "http://localhost/", 12 | "options": { 13 | "spatial": { 14 | "xy": "obsm/spatial" 15 | }, 16 | "mappings": { 17 | "obsm/X_umap": [0,1] 18 | }, 19 | "sets": [ 20 | "obs/tissue", 21 | "obs/celltype" 22 | ], 23 | "matrix": "X" 24 | }, 25 | "layout": "simple" 26 | } -------------------------------------------------------------------------------- /tests/test_class_multimodal.py: -------------------------------------------------------------------------------- 1 | import operator 2 | import os 3 | from functools import reduce 4 | 5 | from bin.build_config_multimodal import write_json as write_json_multimodal 6 | 7 | 8 | def iss_dataset(name="iss_dataset"): 9 | dataset = { 10 | f"{name}": { 11 | "file_paths": [f"test_project-{name}-anndata.zarr"], 12 | "images": { 13 | "raw": [ 14 | { 15 | "path": f"/path/to/iss/{name}-raw-image.zarr", 16 | "md": { 17 | "dimOrder": "XYZT", 18 | "channel_names": ["Channel_1"], 19 | "X": 10, 20 | "Y": 10, 21 | "Z": 1, 22 | "C": 1, 23 | "T": 0, 24 | }, 25 | } 26 | ], 27 | "label": [ 28 | { 29 | "path": f"/path/to/iss/{name}-label-image.zarr", 30 | "md": { 31 | "dimOrder": "XYZT", 32 | "channel_names": ["Channel_1"], 33 | "X": 10, 34 | "Y": 10, 35 | "Z": 1, 36 | "C": 1, 37 | "T": 0, 38 | }, 39 | } 40 | ], 41 | }, 42 | "options": { 43 | "matrix": "X", 44 | "factors": ["obs/sample"], 45 | "mappings": {"obsm/X_umap": [0, 1]}, 46 | "sets": ["obs/cluster", "obs/celltype"], 47 | "spatial": {"xy": "obsm/spatial"}, 48 | }, 49 | "obs_type": "cell", 50 | "is_spatial": True, 51 | } 52 | } 53 | return dataset 54 | 55 | 56 | def visium_dataset(name="visium_dataset"): 57 | dataset = { 58 | f"{name}": { 59 | "file_paths": [f"test_project-{name}-anndata.zarr"], 60 | "images": { 61 | "raw": [ 62 | { 63 | "path": f"/path/to/visium/{name}-raw-image.zarr", 64 | "md": { 65 | "dimOrder": "XYZT", 66 | "channel_names": ["Channel_1"], 67 | "X": 10, 68 | "Y": 10, 69 | "Z": 1, 70 | "C": 1, 71 | "T": 0, 72 | }, 73 | } 74 | ], 75 | "label": [ 76 | { 77 | "path": f"/path/to/visium/{name}-label-image.zarr", 78 | "md": { 79 | "dimOrder": "XYZT", 80 | "channel_names": ["Channel_1"], 81 | "X": 10, 82 | "Y": 10, 83 | "Z": 1, 84 | "C": 1, 85 | "T": 0, 86 | }, 87 | } 88 | ], 89 | }, 90 | "options": { 91 | "matrix": "X", 92 | "factors": ["obs/sample"], 93 | "mappings": {"obsm/X_umap": [0, 1]}, 94 | "spatial": {"xy": "obsm/spatial"}, 95 | }, 96 | "obs_type": "spot", 97 | "is_spatial": True, 98 | } 99 | } 100 | return dataset 101 | 102 | 103 | def scrnaseq_dataset(name="scrnaseq_dataset"): 104 | dataset = { 105 | f"{name}": { 106 | "file_paths": [f"test_project-{name}-anndata.zarr"], 107 | "options": { 108 | "matrix": "X", 109 | "factors": ["obs/sample"], 110 | "sets": ["obs/celltype"], 111 | "mappings": {"obsm/X_umap": [0, 1]}, 112 | "spatial": {"xy": "obsm/spatial"}, 113 | }, 114 | "obs_type": "cell", 115 | "is_spatial": False, 116 | } 117 | } 118 | return dataset 119 | 120 | 121 | class TestClass: 122 | def test_build_config_multimodal( 123 | self, 124 | ): 125 | tests = [ 126 | ( 127 | "test-iss_visium_sc", 128 | [iss_dataset(), visium_dataset(), scrnaseq_dataset()], 129 | ), 130 | ( 131 | "test-visium_visium", 132 | [ 133 | visium_dataset("visium_dataset_1"), 134 | visium_dataset("visium_dataset_2"), 135 | ], 136 | ), 137 | ( 138 | "test-sc_sc", 139 | [scrnaseq_dataset("sc_dataset_1"), scrnaseq_dataset("sc_dataset_2")], 140 | ), 141 | ( 142 | "test-iss_iss_visium", 143 | [ 144 | iss_dataset("iss_dataset_1"), 145 | iss_dataset("iss_dataset_2"), 146 | visium_dataset(), 147 | ], 148 | ), 149 | ("test-sc", [scrnaseq_dataset()]), 150 | ("test-iss", [iss_dataset()]), 151 | ("test-visium", [visium_dataset()]), 152 | ] 153 | 154 | for test in tests: 155 | input = { 156 | "project": test[0], 157 | "extended_features": "celltype", 158 | "url": "http://localhost/", 159 | "config_filename_suffix": "config.json", 160 | "datasets": reduce(operator.ior, test[1], {}), 161 | } 162 | 163 | write_json_multimodal(**input) 164 | 165 | assert os.path.exists( 166 | f"{input['project']}-multimodal-{input['config_filename_suffix']}" 167 | ) 168 | --------------------------------------------------------------------------------