├── geosynth ├── py.typed ├── cli │ ├── __init__.py │ ├── main.py │ └── download.py ├── __main__.py ├── mappings │ ├── __init__.py │ ├── utils.py │ ├── semantic.py │ └── _turbo.py ├── common.py ├── __init__.py ├── progress.py ├── _plt_to_numpy.py ├── models │ └── lighting.py ├── dataset.py ├── helpers.py ├── scene.py ├── _download.py ├── _visualize_instances.py └── data.py ├── docs ├── source │ ├── _static │ │ ├── .gitkeep │ │ └── custom.css │ ├── api.rst │ ├── index.md │ ├── conf.py │ └── data.rst ├── README.md ├── Makefile └── make.bat ├── poetry.toml ├── assets ├── banner.jpg └── scenes │ └── AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7 │ ├── rgb.jpg │ ├── depth.jpg │ ├── normals.jpg │ ├── residual.jpg │ ├── shading.jpg │ ├── reflectance.jpg │ ├── instance_segmentation.jpg │ └── semantic_segmentation.jpg ├── .codecov.yml ├── tests ├── conftest.py ├── test_zip.py ├── test_scene.py └── models │ └── test_lighting.py ├── .github ├── workflows │ ├── deploy.yaml │ └── tests.yaml ├── dependabot.yml └── contributing.md ├── .readthedocs.yaml ├── .pre-commit-config.yaml ├── pyproject.toml ├── .gitignore ├── README.md └── LICENSE /geosynth/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/source/_static/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /geosynth/cli/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /poetry.toml: -------------------------------------------------------------------------------- 1 | [virtualenvs] 2 | in-project = true 3 | -------------------------------------------------------------------------------- /assets/banner.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geomagical/GeoSynth/HEAD/assets/banner.jpg -------------------------------------------------------------------------------- /geosynth/__main__.py: -------------------------------------------------------------------------------- 1 | from .cli.main import run_app 2 | 3 | run_app(prog_name="geosynth") 4 | -------------------------------------------------------------------------------- /docs/source/api.rst: -------------------------------------------------------------------------------- 1 | API 2 | === 3 | 4 | .. automodule:: geosynth 5 | :members: 6 | :undoc-members: 7 | -------------------------------------------------------------------------------- /geosynth/mappings/__init__.py: -------------------------------------------------------------------------------- 1 | from ._turbo import turbo 2 | from .semantic import SemanticClassesMixin 3 | from .utils import apply_palette, to_uint8 4 | -------------------------------------------------------------------------------- /assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/rgb.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geomagical/GeoSynth/HEAD/assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/rgb.jpg -------------------------------------------------------------------------------- /assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/depth.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geomagical/GeoSynth/HEAD/assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/depth.jpg -------------------------------------------------------------------------------- /assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/normals.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geomagical/GeoSynth/HEAD/assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/normals.jpg -------------------------------------------------------------------------------- /assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/residual.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geomagical/GeoSynth/HEAD/assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/residual.jpg -------------------------------------------------------------------------------- /assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/shading.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geomagical/GeoSynth/HEAD/assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/shading.jpg -------------------------------------------------------------------------------- /assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/reflectance.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geomagical/GeoSynth/HEAD/assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/reflectance.jpg -------------------------------------------------------------------------------- /geosynth/common.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Union 3 | 4 | # Typing 5 | PathLike = Union[str, Path] 6 | 7 | 8 | # Defaults 9 | DEFAULT_DATASET_PATH = Path("~/data/geosynth") 10 | -------------------------------------------------------------------------------- /docs/source/index.md: -------------------------------------------------------------------------------- 1 | ```{include} ../../README.md 2 | :relative-docs: docs/ 3 | :relative-images: 4 | ``` 5 | 6 | ```{toctree} 7 | :hidden: 8 | :maxdepth: 2 9 | 10 | self 11 | data 12 | api 13 | ``` 14 | -------------------------------------------------------------------------------- /assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/instance_segmentation.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geomagical/GeoSynth/HEAD/assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/instance_segmentation.jpg -------------------------------------------------------------------------------- /assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/semantic_segmentation.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/geomagical/GeoSynth/HEAD/assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/semantic_segmentation.jpg -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | Documentation is available online [here](https://geosynth.readthedocs.io/en/latest/). 2 | 3 | # Building Documentation 4 | To locally build the documentation, run the following command in this directory (`docs/`): 5 | 6 | .. code-block:: bash 7 | 8 | make html 9 | 10 | The generated html documentation will be available in `build/html/` 11 | -------------------------------------------------------------------------------- /docs/source/_static/custom.css: -------------------------------------------------------------------------------- 1 | /* Have contents take up entire browser window width. */ 2 | .wy-nav-content { 3 | max-width: none !important 4 | } 5 | 6 | /* override table width restrictions */ 7 | .wy-table-responsive table td, .wy-table-responsive table th { 8 | white-space: normal !important; 9 | } 10 | 11 | .wy-table-responsive { 12 | overflow: visible !important; 13 | } 14 | -------------------------------------------------------------------------------- /.codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | status: 3 | project: 4 | default: 5 | # Commits pushed to main should not make the overall 6 | # project coverage decrease by more than 1% 7 | target: auto 8 | threshold: 1% 9 | patch: 10 | default: 11 | # Be tolerant on code coverage diff on PRs to limit 12 | # noisy red coverage status on github PRs. 13 | target: auto 14 | threshold: 20% 15 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | def pytest_addoption(parser): 5 | parser.addoption( 6 | "--network", 7 | action="store_true", 8 | help="Include tests that interact with network (marked with marker @network)", 9 | ) 10 | 11 | 12 | def pytest_runtest_setup(item): 13 | if "network" in item.keywords and not item.config.getoption("--network"): 14 | pytest.skip("need --network option to run this test") 15 | -------------------------------------------------------------------------------- /geosynth/__init__.py: -------------------------------------------------------------------------------- 1 | # Don't manually change, let poetry-dynamic-versioning-plugin handle it. 2 | __version__ = "0.0.0" 3 | 4 | __all__ = [ 5 | "GeoSynth", 6 | "Scene", 7 | "data", 8 | "download", 9 | "instance_bbox", 10 | "instance_segmentation_bboxes", 11 | ] 12 | 13 | 14 | from . import data 15 | from ._download import download 16 | from .dataset import GeoSynth 17 | from .helpers import instance_bbox, instance_segmentation_bboxes 18 | from .scene import Scene 19 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yaml: -------------------------------------------------------------------------------- 1 | name: Build package and push to PyPi 2 | on: 3 | push: 4 | tags: 5 | - "v*.*.*" 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - name: Build and publish to pypi 13 | uses: JRubics/poetry-publish@v1.13 14 | with: 15 | pypi_token: ${{ secrets.PYPI_TOKEN }} 16 | ignore_dev_requirements: "yes" 17 | plugins: "poetry-dynamic-versioning-plugin" 18 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /tests/test_zip.py: -------------------------------------------------------------------------------- 1 | import filecmp 2 | import zipfile 3 | from pathlib import Path 4 | 5 | import pytest 6 | 7 | from geosynth.data import Gravity 8 | 9 | 10 | @pytest.mark.network 11 | def test_download_zip(tmp_path): 12 | expected_zip_file = Path("tests/zip/gravity.zip") 13 | actual_zip_file = Gravity.download_zip(tmp_path, variant="demo") 14 | assert filecmp.cmp(expected_zip_file, actual_zip_file) 15 | 16 | 17 | def test_download_zip_invalid_variant(tmp_path): 18 | with pytest.raises(ValueError) as e: 19 | Gravity.download_zip(tmp_path, variant="foobar") 20 | assert ( 21 | str(e.value) 22 | == "Variant \"foobar\" not in valid. Choose one of: ['demo', 'full']." 23 | ) 24 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= poetry run sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /geosynth/cli/main.py: -------------------------------------------------------------------------------- 1 | import typer 2 | from typer import Option 3 | 4 | import geosynth 5 | 6 | from .download import download 7 | 8 | app = typer.Typer( 9 | no_args_is_help=True, pretty_exceptions_enable=False, add_completion=False 10 | ) 11 | app.command()(download) 12 | 13 | 14 | def version_callback(value: bool): 15 | if not value: 16 | return 17 | print(geosynth.__version__) 18 | raise typer.Exit() 19 | 20 | 21 | @app.callback() 22 | def common( 23 | ctx: typer.Context, 24 | version: bool = Option( 25 | None, 26 | "--version", 27 | "-v", 28 | callback=version_callback, 29 | help="Display GeoSynth version.", 30 | ), 31 | ): 32 | pass 33 | 34 | 35 | def run_app(*args, **kwargs): 36 | app(*args, **kwargs) 37 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | # Required 5 | version: 2 6 | 7 | build: 8 | os: "ubuntu-22.04" 9 | tools: 10 | python: "3.8" 11 | jobs: 12 | post_create_environment: 13 | # Install poetry 14 | # https://python-poetry.org/docs/#installing-manually 15 | - pip install poetry 16 | # Tell poetry to not use a virtual environment 17 | - poetry config virtualenvs.create false 18 | post_install: 19 | # Install dependencies with 'docs' dependency group 20 | # https://python-poetry.org/docs/managing-dependencies/#dependency-groups 21 | - poetry install --with docs 22 | 23 | # Build documentation in the docs/ directory with Sphinx 24 | sphinx: 25 | configuration: docs/source/conf.py 26 | fail_on_warning: false 27 | 28 | # If using Sphinx, optionally build your docs in additional formats such as PDF 29 | formats: 30 | - pdf 31 | -------------------------------------------------------------------------------- /tests/test_scene.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | import pytest 4 | 5 | from geosynth import Scene 6 | from geosynth.data import Rgb 7 | 8 | 9 | @pytest.fixture 10 | def tmp_scene_folder(tmp_path): 11 | root = tmp_path / "scene_id" 12 | root.mkdir() 13 | 14 | img = np.zeros((48, 64, 3), dtype=np.uint8) 15 | img[..., 0] = 1 16 | img[..., 1] = 2 17 | img[..., 2] = 3 18 | Rgb.write_file(root / "rgb.png", img) 19 | 20 | def npz_save(name, data): 21 | kwargs = {name: data} 22 | np.savez_compressed(root / f"{name}.npz", **kwargs) 23 | 24 | npz_save("depth", np.ones((64, 48)).astype(np.float16)) 25 | npz_save( 26 | "intrinsics", 27 | np.eye(3), 28 | ) 29 | 30 | return root 31 | 32 | 33 | def test_scene_basic(tmp_scene_folder): 34 | scene = Scene(tmp_scene_folder) 35 | 36 | rgb = scene.rgb.read() 37 | assert rgb.shape == (48, 64, 3) 38 | 39 | depth = scene.depth.read() 40 | assert depth.dtype == np.float32 41 | 42 | intrinsics = scene.intrinsics.read() 43 | assert intrinsics.shape == (3, 3) 44 | assert intrinsics.dtype == float 45 | -------------------------------------------------------------------------------- /geosynth/progress.py: -------------------------------------------------------------------------------- 1 | from rich.progress import Progress 2 | 3 | 4 | class UrlRetrieveProgressBar: 5 | def __init__(self, progress: Progress, message: str): 6 | self.progress = progress 7 | self.task_id = self.progress.add_task(message, start=False, total=None) 8 | self.n_calls = 0 9 | 10 | def __call__(self, block_num, block_size, total_size): 11 | self.n_calls += 1 12 | if self.n_calls == 1: 13 | self.update(total=total_size) 14 | self.start() 15 | 16 | downloaded = block_num * block_size 17 | self.update(completed=downloaded) 18 | 19 | def update(self, *args, **kwargs): 20 | return self.progress.update(self.task_id, *args, **kwargs) 21 | 22 | def start(self): 23 | self.progress.start_task(self.task_id) 24 | 25 | def remove(self): 26 | self.progress.remove_task(self.task_id) 27 | 28 | def stop(self, description=None): 29 | if description is not None: 30 | self.update(description=description) 31 | self.progress.stop_task(self.task_id) 32 | 33 | def reset(self, *args, **kwargs): 34 | self.progress.reset(self.task_id, *args, **kwargs) 35 | -------------------------------------------------------------------------------- /geosynth/mappings/utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def to_uint8(data: np.ndarray): 5 | return np.clip(np.round(data * 255), 0, 255).astype(np.uint8) 6 | 7 | 8 | def apply_palette(palette: np.ndarray, data: np.ndarray) -> np.ndarray: 9 | """Colormap data by a pre-defined palette. 10 | 11 | Parameters 12 | ---------- 13 | palette: np.ndarray 14 | (n, 3) array where each row is an RGB value. 15 | data: np.ndarray 16 | An int or float array that will index into ``palette``. 17 | If float, data must be in range ``[0, 1]``. 18 | If int, data must have max value `` np.ndarray: 24 | return np.array(v, dtype=np.float32) 25 | 26 | 27 | class AmbientLight(LightSource): 28 | pass 29 | 30 | 31 | class PointLight(LightSource): 32 | position: np.ndarray # (3,) xyz position in the camera's coordinate system in meters. 33 | 34 | @validator("position", pre=True) 35 | def _validate_position_numpy(cls, v: _Vector3) -> np.ndarray: 36 | return np.array(v, dtype=np.float32) 37 | 38 | 39 | class DirectionalLight(LightSource): 40 | direction: np.ndarray # (3,) unit-norm xyz vector in the camera's coordinate system 41 | # (origin at light position) 42 | volume: np.ndarray # (3, 3) un-normalized rotation matrix. 43 | # Row norm is axis length/scale in meters. 44 | 45 | @validator("direction", pre=True) 46 | def _validate_direction_numpy(cls, v: _Vector3) -> np.ndarray: 47 | return np.array(v, dtype=np.float32) 48 | 49 | @validator("volume", pre=True) 50 | def _validate_volume_numpy(cls, v: Any) -> np.ndarray: 51 | v = np.array(v, dtype=np.float32) 52 | if v.shape != (3, 3): 53 | raise ValueError 54 | return v 55 | 56 | 57 | class Lighting(GeoSynthBaseModel): 58 | ambient: AmbientLight 59 | points: List[PointLight] 60 | directionals: List[DirectionalLight] 61 | -------------------------------------------------------------------------------- /geosynth/dataset.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from .common import DEFAULT_DATASET_PATH, PathLike 4 | from .scene import Scene 5 | 6 | _BLOCKLIST = set() # TODO: populate, if necessary 7 | 8 | 9 | class GeoSynth: 10 | def __init__( 11 | self, 12 | path: PathLike = DEFAULT_DATASET_PATH, 13 | variant: str = "full", 14 | ): 15 | """Create a GeoSynth dataset instance. 16 | 17 | Parameters 18 | ---------- 19 | path : PathLike 20 | Same path that data was downloaded to. 21 | Should contain subfolders representing ``variants``. 22 | Defaults to ``~/data/geosynth/``. 23 | variant: str 24 | GeoSynth variant to use. To disable this feature, where the 25 | ``path`` folder directly contains scene folders, set to an empty 26 | string. Defaults to ``"full"``. 27 | """ 28 | self.path = path = Path(path).expanduser() 29 | self.variant = variant 30 | self._scenes = [ 31 | Scene(x) 32 | for x in (path / variant).glob("*/") 33 | if x.is_dir() and not x.stem.startswith(".") and x.name not in _BLOCKLIST 34 | ] 35 | 36 | def __len__(self): 37 | """Return the number of exemplars in the dataset.""" 38 | return len(self._scenes) 39 | 40 | def __repr__(self): 41 | keywords = ", ".join( 42 | f"{key}={value!r}" 43 | for key, value in self.__dict__.items() 44 | if not key.startswith("_") 45 | ) 46 | class_name = type(self).__name__ 47 | return f"{class_name}({keywords})" 48 | 49 | def __getitem__(self, index): 50 | """Get exemplar at ``index``. 51 | 52 | Parameters 53 | ---------- 54 | index : int 55 | Index to get scene at. 56 | 57 | Returns 58 | ------- 59 | Scene 60 | Scene at ``index`` 61 | """ 62 | return self._scenes[index] 63 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | exclude: ^(poetry.lock|.idea/) 2 | repos: 3 | - repo: https://github.com/charliermarsh/ruff-pre-commit 4 | rev: "v0.0.259" 5 | hooks: 6 | - id: ruff 7 | args: [--fix, --exit-non-zero-on-fix] 8 | 9 | - repo: https://github.com/psf/black 10 | rev: 23.1.0 11 | hooks: 12 | - id: black 13 | args: 14 | - "--target-version=py38" 15 | - "--target-version=py39" 16 | - "--target-version=py310" 17 | - "--target-version=py311" 18 | types: [python] 19 | 20 | - repo: https://github.com/asottile/blacken-docs 21 | rev: 1.13.0 22 | hooks: 23 | - id: blacken-docs 24 | 25 | - repo: https://github.com/pre-commit/pre-commit-hooks 26 | rev: v4.4.0 27 | hooks: 28 | - id: check-added-large-files 29 | - id: check-ast 30 | - id: check-builtin-literals 31 | - id: check-case-conflict 32 | - id: check-docstring-first 33 | - id: check-shebang-scripts-are-executable 34 | - id: check-merge-conflict 35 | - id: check-json 36 | - id: check-toml 37 | - id: check-xml 38 | - id: check-yaml 39 | - id: debug-statements 40 | - id: destroyed-symlinks 41 | - id: detect-private-key 42 | - id: end-of-file-fixer 43 | exclude: ^LICENSE|\.(html|csv|txt|svg|py)$ 44 | - id: pretty-format-json 45 | args: ["--autofix", "--no-ensure-ascii", "--no-sort-keys"] 46 | - id: requirements-txt-fixer 47 | - id: trailing-whitespace 48 | args: [--markdown-linebreak-ext=md] 49 | exclude: \.(html|svg)$ 50 | 51 | - repo: https://github.com/fredrikaverpil/creosote.git 52 | rev: v2.4.0 53 | hooks: 54 | - id: creosote 55 | args: 56 | - "--venv=.venv" 57 | - "--paths=geosynth" 58 | - "--deps-file=pyproject.toml" 59 | - "--sections=tool.poetry.dependencies" 60 | 61 | - repo: https://github.com/codespell-project/codespell 62 | rev: v2.2.4 63 | hooks: 64 | - id: codespell 65 | 66 | - repo: https://github.com/RobertCraigie/pyright-python 67 | rev: v1.1.300 68 | hooks: 69 | - id: pyright 70 | -------------------------------------------------------------------------------- /geosynth/helpers.py: -------------------------------------------------------------------------------- 1 | from typing import Dict 2 | 3 | import numpy as np 4 | 5 | 6 | def _first_last_nonzero(mask, axis): 7 | if mask.ndim != 2: 8 | raise ValueError 9 | 10 | res = np.nonzero(mask.any(axis=axis))[0] 11 | return res[0], res[-1] 12 | 13 | 14 | def instance_bbox(mask: np.ndarray) -> np.ndarray: 15 | """Compute the normalized inclusive bounding box for a binary mask. 16 | 17 | Parameters 18 | ---------- 19 | mask: numpy.ndarray 20 | (H, W) binary mask. 21 | 22 | Returns 23 | ------- 24 | numpy.ndarray 25 | A numpy array of shape ``(4)`` representing 26 | ``[top_left_x, top_left_y, bottom_right_x, bottom_right_y]``. 27 | All values are in range ``[0, 1]``. 28 | If there are no ``True`` pixels, then all values are ``nan``. 29 | """ 30 | if mask.any(): 31 | h, w = mask.shape 32 | top_left_y, bottom_right_y = _first_last_nonzero(mask, 1) 33 | top_left_x, bottom_right_x = _first_last_nonzero(mask, 0) 34 | bbox = np.array( 35 | [top_left_x, top_left_y, bottom_right_x, bottom_right_y], 36 | dtype=np.float32, 37 | ) 38 | bbox[0] /= w 39 | bbox[1] /= h 40 | bbox[2] /= w 41 | bbox[3] /= h 42 | else: 43 | bbox = np.array([0, 0, 0, 0], dtype=np.float32) 44 | bbox[:] = np.nan 45 | return bbox 46 | 47 | 48 | def instance_segmentation_bboxes( 49 | instances_dict: Dict[str, np.ndarray] 50 | ) -> Dict[str, np.ndarray]: 51 | """Compute all bounding boxes for an instance segmentation dictionary. 52 | 53 | Parameters 54 | ---------- 55 | instances_dict: Dict[str, np.ndarray] 56 | Instances mapping string labels to ``(N, H, W)`` binary masks. 57 | 58 | Returns 59 | ------- 60 | Dict[str, np.ndarray] 61 | Dictionary with same keys as ``instances_dict``, but each value is an ``(N, 4)`` 62 | array where each row represents the normalized coordinates 63 | ``[top_left_x, top_left_y, bottom_right_x, bottom_right_y]``. 64 | """ 65 | bboxes = {} 66 | for label, masks in instances_dict.items(): 67 | bboxes[label] = [] 68 | for mask in masks: 69 | bboxes[label].append(instance_bbox(mask)) 70 | bboxes[label] = np.stack(bboxes[label], axis=0) 71 | return bboxes 72 | -------------------------------------------------------------------------------- /geosynth/scene.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from geosynth.common import PathLike 4 | from geosynth.data import ( 5 | CubeEnvironmentMap, 6 | Data, 7 | Depth, 8 | Extrinsics, 9 | Gravity, 10 | HdrCubeEnvironmentMap, 11 | HdrReflectance, 12 | HdrResidual, 13 | HdrRgb, 14 | HdrShading, 15 | HdrSphereEnvironmentMap, 16 | InstanceSegmentation, 17 | Intrinsics, 18 | LayoutLinesFull, 19 | LayoutLinesOccluded, 20 | LayoutLinesVisible, 21 | Lighting, 22 | Normals, 23 | Reflectance, 24 | Residual, 25 | Rgb, 26 | SemanticSegmentation, 27 | Shading, 28 | SphereEnvironmentMap, 29 | ) 30 | 31 | 32 | class Scene: 33 | """Container for datatype readers for a single exemplar.""" 34 | 35 | cube_environment_map: CubeEnvironmentMap 36 | depth: Depth 37 | extrinsics: Extrinsics 38 | gravity: Gravity 39 | hdr_cube_environment_map: HdrCubeEnvironmentMap 40 | hdr_reflectance: HdrReflectance 41 | hdr_residual: HdrResidual 42 | hdr_rgb: HdrRgb 43 | hdr_shading: HdrShading 44 | hdr_sphere_environment_map: HdrSphereEnvironmentMap 45 | instance_segmentation: InstanceSegmentation 46 | intrinsics: Intrinsics 47 | layout_lines_full: LayoutLinesFull 48 | layout_lines_occluded: LayoutLinesOccluded 49 | layout_lines_visible: LayoutLinesVisible 50 | lighting: Lighting 51 | normals: Normals 52 | reflectance: Reflectance 53 | residual: Residual 54 | rgb: Rgb 55 | semantic_segmentation: SemanticSegmentation 56 | shading: Shading 57 | sphere_environment_map: SphereEnvironmentMap 58 | 59 | def __init__(self, path: PathLike): 60 | """Create a scene object. 61 | 62 | Parameters 63 | ---------- 64 | path: PathLike 65 | Path to where contents are downloaded to. 66 | Must contain a subfolder 67 | """ 68 | self.path = Path(path) 69 | 70 | def __getattr__(self, name) -> Data: 71 | try: 72 | # We could setattr here, but then memory-usage would increase over time. 73 | return Data[name](self.path) 74 | except KeyError as e: 75 | raise AttributeError from e 76 | 77 | def __repr__(self): 78 | keywords = ", ".join( 79 | f"{key}={value!r}" 80 | for key, value in self.__dict__.items() 81 | if not key.startswith("_") 82 | ) 83 | class_name = type(self).__name__ 84 | return f"{class_name}({keywords})" 85 | -------------------------------------------------------------------------------- /.github/contributing.md: -------------------------------------------------------------------------------- 1 | ## Environment Setup 2 | 3 | 1. We use [Poetry](https://python-poetry.org/docs/#installation) for managing virtual environments and dependencies. 4 | Once Poetry is installed, run `poetry install` in this repo to get started. 5 | 2. For managing linters, static-analysis, and other tools, we use [pre-commit](https://pre-commit.com/#installation). 6 | Once Pre-commit is installed, run `pre-commit install` in this repo to install the hooks. 7 | Using pre-commit ensures PRs match the linting requirements of the codebase. 8 | 9 | ## Documentation 10 | Whenever possible, please add docstrings to your code! 11 | We use [numpy-style napoleon docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/#google-vs-numpy). 12 | To confirm docstrings are valid, build the docs by running `poetry run make html` in the `docs/` folder. 13 | 14 | I typically write dosctrings first, it will act as a guide to limit scope and encourage unit-testable code. 15 | Good docstrings include information like: 16 | 17 | 1. If not immediately obvious, what is the intended use-case? When should this function be used? 18 | 2. What happens during errors/edge-cases. 19 | 3. When dealing with physical values, include units. 20 | 21 | ## Unit Tests 22 | We use the [pytest](https://docs.pytest.org/) framework for unit testing. Ideally, all new code is partners with 23 | new unit tests to exercise that code. If fixing a bug, consider writing the test first to confirm the existence of the 24 | bug, and to confirm that the new code fixes it. 25 | 26 | Unit tests should only test a single concise body of code. If this is hard to do, there are two solutions that can help: 27 | 1. Restructure the code. Keep inputs/outputs to be simple variables. Avoid complicated interactions with state. 28 | 2. Use [pytest-mock](https://pytest-mock.readthedocs.io/en/latest/) to mock out external interactions. 29 | 30 | ## Coding Style 31 | In an attempt to keep consistency and maintainability in the code-base, here are some high-level guidelines for code that might not be enforced by linters. 32 | 33 | * Use f-strings. 34 | * Keep/cast path variables as `pathlib.Path` objects. 35 | Do not use `os.path`. 36 | For public-facing functions, cast path arguments immediately to `Path`. 37 | * Use magic-methods when appropriate. It might be better to implement ``MyClass.__call__()`` instead of ``MyClass.run()``. 38 | * Do not return sentinel values for error-states like `-1` or `None`. Instead, raise an exception. 39 | * Avoid deeply nested code. Techniques like returning early and breaking up a complicated function into multiple functions results in easier to read and test code. 40 | * Consider if you are double-name-spacing and how modules are meant to be imported. 41 | E.g. it might be better to name a function `read` instead of `image_read` in the module `my_package/image.py`. 42 | Consider the module name-space and whether or not it's flattened in `__init__.py`. 43 | * Only use multiple-inheritance if using a mixin. Mixin classes should end in `"Mixin"`. 44 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | import sys 10 | from datetime import date 11 | from pathlib import Path 12 | 13 | sys.path.insert(0, str(Path("../..").absolute())) 14 | 15 | 16 | from geosynth import __version__ 17 | 18 | # -- Project information ----------------------------------------------------- 19 | 20 | project = "GeoSynth" 21 | copyright = f"{date.today().year}, Geomagical Labs" 22 | author = "Geomagical Labs" 23 | 24 | # The short X.Y version. 25 | version = __version__ 26 | # The full version, including alpha/beta/rc tags 27 | release = __version__ 28 | 29 | 30 | # -- General configuration --------------------------------------------------- 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 34 | # ones. 35 | extensions = [ 36 | "myst_parser", 37 | "sphinx_rtd_theme", 38 | "sphinx_copybutton", 39 | "sphinx.ext.autodoc", 40 | "sphinx.ext.napoleon", 41 | ] 42 | 43 | # Add any paths that contain templates here, relative to this directory. 44 | templates_path = ["_templates"] 45 | 46 | # List of patterns, relative to source directory, that match files and 47 | # directories to ignore when looking for source files. 48 | # This pattern also affects html_static_path and html_extra_path. 49 | exclude_patterns = [] 50 | 51 | smartquotes = False 52 | 53 | # Napoleon settings 54 | napoleon_google_docstring = True 55 | napoleon_numpy_docstring = True 56 | napoleon_include_init_with_doc = False 57 | napoleon_include_private_with_doc = False 58 | napoleon_include_special_with_doc = True 59 | napoleon_use_admonition_for_examples = False 60 | napoleon_use_admonition_for_notes = False 61 | napoleon_use_admonition_for_references = False 62 | napoleon_use_ivar = False 63 | napoleon_use_param = True 64 | napoleon_use_rtype = True 65 | napoleon_preprocess_types = False 66 | napoleon_type_aliases = None 67 | napoleon_attr_annotations = True 68 | 69 | # -- Options for HTML output ------------------------------------------------- 70 | 71 | # The theme to use for HTML and HTML Help pages. See the documentation for 72 | # a list of builtin themes. 73 | # 74 | html_theme = "sphinx_rtd_theme" 75 | 76 | # Add any paths that contain custom static files (such as style sheets) here, 77 | # relative to this directory. They are copied after the builtin static files, 78 | # so a file named "default.css" will overwrite the builtin "default.css". 79 | html_static_path = ["_static"] 80 | html_css_files = ["custom.css"] 81 | 82 | html_title = project 83 | # html_logo = "../../assets/logo_200w.png" 84 | html_favicon = None 85 | 86 | html_theme_options = { 87 | # "analytics_id": "G-XXXXXXXXXX", # Provided by Google in your dashboard 88 | # "analytics_anonymize_ip": False, 89 | "logo_only": False, 90 | "display_version": False, 91 | "prev_next_buttons_location": "bottom", 92 | "style_external_links": False, 93 | "vcs_pageview_mode": "", 94 | # "style_nav_header_background": "white", 95 | # Toc options 96 | "collapse_navigation": True, 97 | "sticky_navigation": True, 98 | "navigation_depth": 4, 99 | "includehidden": True, 100 | "titles_only": False, 101 | } 102 | -------------------------------------------------------------------------------- /.github/workflows/tests.yaml: -------------------------------------------------------------------------------- 1 | # Regular tests 2 | # 3 | # Use this to ensure your tests are passing on every push and PR (skipped on 4 | # pushes which only affect documentation). 5 | # 6 | # You should make sure you run jobs on at least the *oldest* and the *newest* 7 | # versions of python that your codebase is intended to support. 8 | 9 | name: tests 10 | 11 | on: 12 | push: 13 | branches: 14 | - main 15 | pull_request: 16 | 17 | jobs: 18 | test: 19 | runs-on: ${{ matrix.os }} 20 | strategy: 21 | matrix: 22 | os: [ubuntu-latest] 23 | python-version: ["3.8", "3.9", "3.10"] 24 | env: 25 | OS: ${{ matrix.os }} 26 | PYTHON: ${{ matrix.python-version }} 27 | 28 | steps: 29 | - name: Check out repository 30 | uses: actions/checkout@v3 31 | 32 | - name: Cache $HOME/.local # Significantly speeds up Poetry Install 33 | uses: actions/cache@v3 34 | with: 35 | path: ~/.local 36 | key: dotlocal-${{ runner.os }}-${{ hashFiles('.github/workflows/tests.yml') }} 37 | 38 | - name: Install poetry 39 | uses: snok/install-poetry@v1 40 | with: 41 | virtualenvs-create: true 42 | virtualenvs-in-project: true 43 | installer-parallel: true 44 | 45 | - name: Set up python ${{ matrix.python-version }} 46 | uses: actions/setup-python@v4 47 | with: 48 | python-version: ${{ matrix.python-version }} 49 | cache: "poetry" 50 | 51 | - name: Install library 52 | run: poetry install --no-interaction 53 | 54 | - name: Sanity check with flake8 55 | run: | 56 | source .venv/bin/activate 57 | # stop the build if there are Python syntax errors or undefined names 58 | python -m flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 59 | # exit-zero treats all errors as warnings 60 | python -m flake8 . --count --exit-zero --statistics 61 | 62 | - uses: actions/cache@v3 63 | with: 64 | path: ~/.cache/pre-commit/ 65 | key: pre-commit-4|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') }} 66 | 67 | - name: Pre-commit run 68 | run: | 69 | source .venv/bin/activate 70 | pre-commit run --show-diff-on-failure --color=always --all-files 71 | 72 | - name: Check tests folder existence 73 | id: check_test_files 74 | uses: andstor/file-existence-action@v2 75 | with: 76 | files: "tests" 77 | 78 | - name: Run tests 79 | if: steps.check_test_files.outputs.files_exists == 'true' 80 | run: | 81 | source .venv/bin/activate 82 | python -m pytest --cov=geosynth --cov-report term --cov-report xml --cov-config .coveragerc --junitxml=testresults.xml 83 | coverage report 84 | 85 | - name: Upload coverage to Codecov 86 | if: steps.check_test_files.outputs.files_exists == 'true' 87 | uses: codecov/codecov-action@v3 88 | with: 89 | token: ${{ secrets.CODECOV_TOKEN }} 90 | flags: unittests 91 | env_vars: OS,PYTHON 92 | name: Python ${{ matrix.python-version }} on ${{ runner.os }} 93 | 94 | #---------------------------------------------- 95 | # make sure docs build 96 | #---------------------------------------------- 97 | - name: Build HTML docs 98 | run: | 99 | source .venv/bin/activate 100 | sphinx-build -b html docs/source/ docs/build/html 101 | -------------------------------------------------------------------------------- /geosynth/_download.py: -------------------------------------------------------------------------------- 1 | import zipfile 2 | from pathlib import Path 3 | from typing import Iterable, Optional 4 | from urllib.error import HTTPError 5 | 6 | from rich import print 7 | from rich.progress import Progress 8 | 9 | from .common import DEFAULT_DATASET_PATH, PathLike 10 | from .data import Data 11 | from .progress import UrlRetrieveProgressBar 12 | 13 | 14 | def _validate_dtypes(dtypes: Iterable): 15 | for dtype in dtypes: 16 | if dtype not in Data: 17 | raise ValueError( 18 | f'Specifided dtype "{dtype}" is invalid. Must be one of: {list(Data).sort()}.' 19 | ) 20 | 21 | 22 | def download( 23 | dst: Optional[PathLike] = None, 24 | dtypes: Optional[Iterable[str]] = None, 25 | variant: str = "demo", 26 | force: bool = False, 27 | cleanup: bool = True, 28 | progress: Optional[Progress] = None, 29 | ) -> Path: 30 | """Download the GeoSynth dataset. 31 | 32 | Parameters 33 | ---------- 34 | dst : PathLike 35 | If ``None``, downloads to default location ``~/data/geosynth``. 36 | dtype : Iterable 37 | List of data types to download. For example:: 38 | 39 | ["rgb", "depth"] 40 | 41 | Defaults to downloading all non-hdr datatypes. 42 | variant: str 43 | Variant of the GeoSynth dataset to download. 44 | force: bool 45 | Force a redownload, despite cached files. 46 | Defaults to ``False``. 47 | cleanup: bool 48 | Delete downloaded zipfile(s) after downloading & unzipping. 49 | Defaults to ``True``. 50 | progress: Optional[rich.progress.Progress] 51 | Optional Rich Progress object to update with progress. 52 | 53 | Returns 54 | ------- 55 | dst: Path 56 | The destination download directory. 57 | """ 58 | if dst is None: 59 | dst = DEFAULT_DATASET_PATH 60 | dst = Path(dst).expanduser() 61 | 62 | if not dtypes or "non-hdr" in dtypes: 63 | # All non-hdr types 64 | dtypes = [x for x in Data if "hdr_" not in x] 65 | elif "all" in dtypes: 66 | dtypes = list(Data) 67 | 68 | _validate_dtypes(dtypes) 69 | 70 | progress_bars = {} 71 | 72 | if progress: 73 | # Start up all the Progress Bar Tasks 74 | for dtype in dtypes: 75 | progress_bars[dtype] = UrlRetrieveProgressBar( 76 | progress, f"{dtype} downloading" 77 | ) 78 | 79 | for dtype in dtypes: 80 | try: 81 | zip_path = Data[dtype].download_zip( 82 | dst, 83 | variant=variant, 84 | force=force, 85 | reporthook=progress_bars.get(dtype), 86 | ) 87 | except HTTPError as e: 88 | http_error_code = e.getcode() 89 | if http_error_code == 404: 90 | print( 91 | f'[bold red]{dtype} for variant "{variant}" has not been uploaded yet.\n' 92 | " Please check back later.[/bold red]" 93 | ) 94 | else: 95 | raise 96 | 97 | if progress: 98 | progress_bars[dtype].stop(f"[bold red]{dtype} Unavailable.[/bold red]") 99 | 100 | continue 101 | 102 | progress_bar = progress_bars.get(dtype) 103 | if zip_path.stat().st_size: 104 | with zipfile.ZipFile(zip_path, "r") as f: 105 | if progress_bar: 106 | progress_bar.update( 107 | description=f"{dtype} extracting", 108 | total=len(f.namelist()), 109 | completed=0, 110 | ) 111 | for member in f.namelist(): 112 | f.extract(member, path=dst / variant) 113 | if progress_bar: 114 | progress_bar.update(advance=1) 115 | 116 | if cleanup: 117 | zip_path.unlink() 118 | zip_path.touch() # So that subsequent download calls know to not download. 119 | 120 | if progress_bar: 121 | progress_bar.update(description=f"{dtype} complete") 122 | 123 | return dst / variant 124 | -------------------------------------------------------------------------------- /tests/models/test_lighting.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from geosynth.models.lighting import ( 4 | AmbientLight, 5 | DirectionalLight, 6 | Lighting, 7 | LightSource, 8 | PointLight, 9 | ) 10 | 11 | 12 | def test_light_source(): 13 | light = LightSource( 14 | color=(0.1, 0.2, 0.3), # pyright: ignore[reportGeneralTypeIssues] 15 | intensity=0.42, 16 | ) 17 | assert isinstance(light.color, np.ndarray) 18 | assert light.color.dtype == np.float32 19 | assert light.color.shape == (3,) 20 | 21 | 22 | def test_ambient_light(): 23 | light = AmbientLight( 24 | color=(0.1, 0.2, 0.3), # pyright: ignore[reportGeneralTypeIssues] 25 | intensity=0.42, 26 | ) 27 | assert isinstance(light.color, np.ndarray) 28 | assert light.color.dtype == np.float32 29 | assert light.color.shape == (3,) 30 | 31 | 32 | def test_point_light(): 33 | light = PointLight( 34 | color=(0.1, 0.2, 0.3), # pyright: ignore[reportGeneralTypeIssues] 35 | intensity=0.42, 36 | position=(1, 2, 3), # pyright: ignore[reportGeneralTypeIssues] 37 | ) 38 | 39 | assert isinstance(light.position, np.ndarray) 40 | assert light.position.dtype == np.float32 41 | assert light.position.shape == (3,) 42 | 43 | 44 | def test_directional_light(): 45 | light = DirectionalLight( 46 | color=(0.1, 0.2, 0.3), # pyright: ignore[reportGeneralTypeIssues] 47 | intensity=0.42, 48 | direction=(1, 0, 0), # pyright: ignore[reportGeneralTypeIssues] 49 | volume=[ 50 | [1, 0, 0], 51 | [0, 1, 0], 52 | [0, 0, 1], 53 | ], # pyright: ignore[reportGeneralTypeIssues] 54 | ) 55 | 56 | assert isinstance(light.direction, np.ndarray) 57 | assert light.direction.dtype == np.float32 58 | assert light.direction.shape == (3,) 59 | 60 | assert isinstance(light.volume, np.ndarray) 61 | assert light.volume.dtype == np.float32 62 | assert light.volume.shape == (3, 3) 63 | 64 | 65 | def test_lighting_whole_model(): 66 | definition = { 67 | "ambient": {"color": (0.0, 0.0, 0.0), "intensity": 0.0}, 68 | "directionals": [ 69 | { 70 | "color": (1.0, 0.4588235294117647, 0.21568627450980393), 71 | "intensity": 0.008176614881439084, 72 | "direction": [ 73 | -3.4646946005523205e-05, 74 | 0.9992689490318298, 75 | 0.03823035582900047, 76 | ], 77 | "volume": [ 78 | [1.3297369480133057, -0.0016325851902365685, 0.04387778043746948], 79 | [9.441948350286111e-05, -2.724630355834961, -0.10423829406499863], 80 | [0.0, 0.0, 0.0], 81 | ], 82 | }, 83 | { 84 | "color": (1.0, 0.4588235294117647, 0.21568627450980393), 85 | "intensity": 0.008176614881439084, 86 | "direction": [ 87 | -3.4646946005523205e-05, 88 | 0.9992689490318298, 89 | 0.03823035582900047, 90 | ], 91 | "volume": [ 92 | [-0.09387559443712234, -0.10869316756725311, 2.8409478664398193], 93 | [9.474962280364707e-05, -2.7769644260406494, -0.10624206066131592], 94 | [0.0, 0.0, 0.0], 95 | ], 96 | }, 97 | ], 98 | "points": [ 99 | { 100 | "color": (1.0, 0.7764705882352941, 0.7058823529411765), 101 | "intensity": 0.030662305805396566, 102 | "position": [ 103 | -0.8580608367919922, 104 | -0.28282231092453003, 105 | 5.766456604003906, 106 | ], 107 | }, 108 | { 109 | "color": (1.0, 0.7764705882352941, 0.7058823529411765), 110 | "intensity": 0.030662305805396566, 111 | "position": [ 112 | -0.864791750907898, 113 | -0.2906154692173004, 114 | 5.970149040222168, 115 | ], 116 | }, 117 | ], 118 | } 119 | Lighting(**definition) 120 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["poetry>=1.4.0", "poetry-dynamic-versioning-plugin"] 3 | build-backend = "poetry.core.masonry.api" 4 | 5 | [tool.poetry-dynamic-versioning] 6 | enable = true 7 | vcs = "git" 8 | style = "semver" 9 | 10 | [tool.poetry] 11 | name = "geosynth" 12 | version = "0.0.0" # Do not change, let poetry-dynamic-versioning handle it. 13 | homepage = "https://github.com/geomagical/geosynth" 14 | repository = "https://github.com/geomagical/geosynth" 15 | license = "Apache-2.0" 16 | description = "" 17 | authors = ["Geomagical Labs"] 18 | readme = "README.md" 19 | packages = [{include = "geosynth"}] 20 | 21 | [tool.poetry.scripts] 22 | geosynth = "geosynth.cli.main:run_app" 23 | 24 | [tool.poetry.dependencies] 25 | # Be as loose as possible if writing a library. 26 | python = "^3.8" 27 | numpy = "^1.19" 28 | autoregistry = ">=1.0.0" 29 | typer = ">=0.7.0" 30 | rich = ">=11.2.0" 31 | opencv-python-headless = "^4.4" 32 | pydantic = "^1.8" 33 | 34 | [tool.poetry.group.visualization.dependencies] 35 | matplotlib = "^3.7.1" 36 | 37 | [tool.poetry.group.docs.dependencies] 38 | sphinx = "~4.5.0" 39 | sphinx_rtd_theme = "~1.0.0" 40 | myst-parser = "^0.18.1" 41 | sphinx-copybutton = "^0.5.1" 42 | 43 | [tool.poetry.group.dev.dependencies] 44 | coverage = {extras = ["toml"], version = "^5.1"} 45 | poetry-dynamic-versioning = "^0.16.0" 46 | pre_commit = "^2.16.0" 47 | pytest = "^7.1.2" 48 | pytest-cov = "^3.0.0" 49 | pytest-mock = "^3.7.0" 50 | 51 | [tool.poetry.group.debug] 52 | optional = true 53 | 54 | [tool.poetry.group.debug.dependencies] 55 | ipdb = "^0.13.9" 56 | 57 | [tool.coverage.run] 58 | branch = true 59 | omit = [ 60 | "tests/*", 61 | ] 62 | 63 | [tool.coverage.report] 64 | exclude_lines = [ 65 | # Have to re-enable the standard pragma 66 | "pragma: no cover", 67 | 68 | # Don't complain about missing debug-only code: 69 | "def __repr__", 70 | "if self.debug:", 71 | "if debug:", 72 | "if DEBUG:", 73 | 74 | # Don't complain if tests don't hit defensive assertion code: 75 | "raise AssertionError", 76 | "raise NotImplementedError", 77 | 78 | # Don't complain if non-runnable code isn't run: 79 | "if 0:", 80 | "if False:", 81 | "if __name__ == .__main__.:", 82 | ] 83 | 84 | [tool.pyright] 85 | venvPath = "." 86 | venv = ".venv" 87 | 88 | [tool.pytest.ini_options] 89 | markers = [ 90 | "network: mark a test that requires network access.", 91 | ] 92 | 93 | [tool.ruff] 94 | target-version = 'py38' 95 | select = [ 96 | "B", # flake8-bugbear 97 | "C4", # flake8-comprehensions 98 | "D", # pydocstyle 99 | "E", # Error 100 | "F", # pyflakes 101 | "I", # isort 102 | "ISC", # flake8-implicit-str-concat 103 | "N", # pep8-naming 104 | "PGH", # pygrep-hooks 105 | "PTH", # flake8-use-pathlib 106 | "Q", # flake8-quotes 107 | "S", # bandit 108 | "SIM", # flake8-simplify 109 | "TRY", # tryceratops 110 | "UP", # pyupgrade 111 | "W", # Warning 112 | "YTT", # flake8-2020 113 | ] 114 | 115 | exclude = [ 116 | "migrations", 117 | "__pycache__", 118 | "manage.py", 119 | "settings.py", 120 | "env", 121 | ".env", 122 | "venv", 123 | ".venv", 124 | ] 125 | 126 | ignore = [ 127 | "B905", # zip strict=True; remove once python <3.10 support is dropped. 128 | "D100", 129 | "D101", 130 | "D102", 131 | "D103", 132 | "D104", 133 | "D105", 134 | "D106", 135 | "D107", 136 | "D200", 137 | "D401", 138 | "E402", 139 | "E501", 140 | "F401", 141 | "TRY003", # Avoid specifying messages outside exception class; overly strict, especially for ValueError 142 | ] 143 | line-length = 88 # Must agree with Black 144 | 145 | [tool.ruff.flake8-bugbear] 146 | extend-immutable-calls = [ 147 | "chr", 148 | "typer.Argument", 149 | "typer.Option", 150 | ] 151 | 152 | [tool.ruff.pydocstyle] 153 | convention = "numpy" 154 | 155 | [tool.ruff.per-file-ignores] 156 | "tests/*.py" = [ 157 | "D100", 158 | "D101", 159 | "D102", 160 | "D103", 161 | "D104", 162 | "D105", 163 | "D106", 164 | "D107", 165 | "S101", # use of "assert" 166 | "S102", # use of "exec" 167 | "S106", # possible hardcoded password. 168 | "PGH001", # use of "eval" 169 | ] 170 | 171 | [tool.ruff.pep8-naming] 172 | staticmethod-decorators = [ 173 | "pydantic.validator", 174 | "pydantic.root_validator", 175 | ] 176 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ##--------------------------------------------------- 2 | # Automated documentation .gitignore files 3 | ##--------------------------------------------------- 4 | 5 | # Automatically generated API documentation stubs from sphinx-apidoc 6 | docs/source/packages 7 | 8 | # Automatically converting README from markdown to rST 9 | docs/bin 10 | docs/source/readme.rst 11 | docs/source/assets 12 | 13 | 14 | ##--------------------------------------------------- 15 | # Continuous Integration .gitignore files 16 | ##--------------------------------------------------- 17 | 18 | # Ignore test result XML files 19 | testresults.xml 20 | coverage.xml 21 | 22 | 23 | ##--------------------------------------------------- 24 | # Python default .gitignore 25 | ##--------------------------------------------------- 26 | 27 | # Byte-compiled / optimized / DLL files 28 | __pycache__/ 29 | *.py[cod] 30 | *$py.class 31 | 32 | # C extensions 33 | *.so 34 | 35 | # Distribution / packaging 36 | .Python 37 | build/ 38 | develop-eggs/ 39 | dist/ 40 | downloads/ 41 | eggs/ 42 | .eggs/ 43 | lib/ 44 | lib64/ 45 | parts/ 46 | sdist/ 47 | var/ 48 | wheels/ 49 | pip-wheel-metadata/ 50 | share/python-wheels/ 51 | *.egg-info/ 52 | .installed.cfg 53 | *.egg 54 | MANIFEST 55 | 56 | # PyInstaller 57 | # Usually these files are written by a python script from a template 58 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 59 | *.manifest 60 | *.spec 61 | 62 | # Installer logs 63 | pip-log.txt 64 | pip-delete-this-directory.txt 65 | 66 | # Unit test / coverage reports 67 | htmlcov/ 68 | .tox/ 69 | .nox/ 70 | .coverage 71 | .coverage.* 72 | .cache 73 | nosetests.xml 74 | coverage.xml 75 | *.cover 76 | .hypothesis/ 77 | .pytest_cache/ 78 | 79 | # Translations 80 | *.mo 81 | *.pot 82 | 83 | # Django stuff: 84 | *.log 85 | local_settings.py 86 | db.sqlite3 87 | 88 | # Flask stuff: 89 | instance/ 90 | .webassets-cache 91 | 92 | # Scrapy stuff: 93 | .scrapy 94 | 95 | # Sphinx documentation 96 | /docs/_build/ 97 | /docs/build/ 98 | 99 | # PyBuilder 100 | target/ 101 | 102 | # Jupyter Notebook 103 | .ipynb_checkpoints 104 | 105 | # IPython 106 | profile_default/ 107 | ipython_config.py 108 | 109 | # pyenv 110 | .python-version 111 | 112 | # pipenv 113 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 114 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 115 | # having no cross-platform support, pipenv may install dependencies that don’t work, or not 116 | # install all needed dependencies. 117 | #Pipfile.lock 118 | 119 | # celery beat schedule file 120 | celerybeat-schedule 121 | 122 | # SageMath parsed files 123 | *.sage.py 124 | 125 | # Environments 126 | .env 127 | .venv 128 | env/ 129 | venv/ 130 | ENV/ 131 | env.bak/ 132 | venv.bak/ 133 | 134 | # Spyder project settings 135 | .spyderproject 136 | .spyproject 137 | 138 | # Rope project settings 139 | .ropeproject 140 | 141 | # mkdocs documentation 142 | /site 143 | 144 | # mypy 145 | .mypy_cache/ 146 | .dmypy.json 147 | dmypy.json 148 | 149 | # Pyre type checker 150 | .pyre/ 151 | 152 | # pytype static type analyzer 153 | .pytype/ 154 | 155 | 156 | ##--------------------------------------------------- 157 | # Windows default .gitignore 158 | ##--------------------------------------------------- 159 | 160 | # Windows thumbnail cache files 161 | Thumbs.db 162 | ehthumbs.db 163 | ehthumbs_vista.db 164 | 165 | # Dump file 166 | *.stackdump 167 | 168 | # Folder config file 169 | [Dd]esktop.ini 170 | 171 | # Recycle Bin used on file shares 172 | $RECYCLE.BIN/ 173 | 174 | # Windows Installer files 175 | *.cab 176 | *.msi 177 | *.msix 178 | *.msm 179 | *.msp 180 | 181 | # Windows shortcuts 182 | *.lnk 183 | 184 | 185 | ##--------------------------------------------------- 186 | # Linux default .gitignore 187 | ##--------------------------------------------------- 188 | 189 | # Editor backup files 190 | *~ 191 | 192 | # temporary files which can be created if a process still has a handle open of a deleted file 193 | .fuse_hidden* 194 | 195 | # KDE directory preferences 196 | .directory 197 | 198 | # Linux trash folder which might appear on any partition or disk 199 | .Trash-* 200 | 201 | # .nfs files are created when an open file is removed but is still being accessed 202 | .nfs* 203 | 204 | 205 | ##--------------------------------------------------- 206 | # Mac OSX default .gitignore 207 | ##--------------------------------------------------- 208 | 209 | # General 210 | .DS_Store 211 | .AppleDouble 212 | .LSOverride 213 | 214 | # Icon must end with two \r 215 | Icon 216 | 217 | # Thumbnails 218 | ._* 219 | 220 | # Files that might appear in the root of a volume 221 | .DocumentRevisions-V100 222 | .fseventsd 223 | .Spotlight-V100 224 | .TemporaryItems 225 | .Trashes 226 | .VolumeIcon.icns 227 | .com.apple.timemachine.donotpresent 228 | 229 | # Directories potentially created on remote AFP share 230 | .AppleDB 231 | .AppleDesktop 232 | Network Trash Folder 233 | Temporary Items 234 | .apdisk 235 | 236 | *.zip 237 | -------------------------------------------------------------------------------- /geosynth/mappings/semantic.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | class SemanticClassesMixin: 5 | CLASSES = ( 6 | "unknown", 7 | "wall", 8 | "floor", 9 | "cabinet", 10 | "bed", 11 | "chair", 12 | "sofa", 13 | "table", 14 | "door", 15 | "window", 16 | "bookshelf", 17 | "picture", 18 | "counter", 19 | "blinds", 20 | "desk", 21 | "shelves", 22 | "curtain", 23 | "dresser", 24 | "pillow", 25 | "mirror", 26 | "floor mat", 27 | "clothes", 28 | "ceiling", 29 | "books", 30 | "refrigerator", 31 | "television", 32 | "paper", 33 | "towel", 34 | "shower curtain", 35 | "box", 36 | "whiteboard", 37 | "person", 38 | "night stand", 39 | "toilet", 40 | "sink", 41 | "lamp", 42 | "bathtub", 43 | "bag", 44 | "otherstructure", 45 | "otherfurniture", 46 | "otherprop", 47 | "plant", 48 | "doorframe", 49 | "windowframe", 50 | "baseboard", 51 | "lighton", 52 | "lightoff", 53 | "curtainrod", 54 | ) 55 | 56 | PALETTE = ( 57 | (120, 120, 120), 58 | (180, 120, 120), 59 | (6, 230, 230), 60 | (80, 50, 50), 61 | (4, 200, 3), 62 | (120, 120, 80), 63 | (140, 140, 140), 64 | (204, 5, 255), 65 | (230, 230, 230), 66 | (4, 250, 7), 67 | (224, 5, 255), 68 | (235, 255, 7), 69 | (150, 5, 61), 70 | (120, 120, 70), 71 | (8, 255, 51), 72 | (255, 6, 82), 73 | (143, 255, 140), 74 | (204, 255, 4), 75 | (255, 51, 7), 76 | (204, 70, 3), 77 | (0, 102, 200), 78 | (61, 230, 250), 79 | (255, 6, 51), 80 | (11, 102, 255), 81 | (255, 7, 71), 82 | (255, 9, 224), 83 | (9, 7, 230), 84 | (220, 220, 220), 85 | (255, 9, 92), 86 | (112, 9, 255), 87 | (8, 255, 214), 88 | (7, 255, 224), 89 | (255, 184, 6), 90 | (10, 255, 71), 91 | (255, 41, 10), 92 | (7, 255, 255), 93 | (224, 255, 8), 94 | (102, 8, 255), 95 | (255, 61, 6), 96 | (255, 194, 7), 97 | (255, 122, 8), 98 | (0, 255, 20), 99 | (255, 8, 41), 100 | (255, 5, 153), 101 | (6, 51, 255), 102 | (235, 12, 255), 103 | (160, 150, 20), 104 | (0, 163, 255), 105 | (140, 140, 140), 106 | (250, 10, 15), 107 | (20, 255, 0), 108 | (31, 255, 0), 109 | (255, 31, 0), 110 | (255, 224, 0), 111 | (153, 255, 0), 112 | (0, 0, 255), 113 | (255, 71, 0), 114 | (0, 235, 255), 115 | (0, 173, 255), 116 | (31, 0, 255), 117 | (11, 200, 200), 118 | (255, 82, 0), 119 | (0, 255, 245), 120 | (0, 61, 255), 121 | (0, 255, 112), 122 | (0, 255, 133), 123 | (255, 0, 0), 124 | (255, 163, 0), 125 | (255, 102, 0), 126 | (194, 255, 0), 127 | (0, 143, 255), 128 | (51, 255, 0), 129 | (0, 82, 255), 130 | (0, 255, 41), 131 | (0, 255, 173), 132 | (10, 0, 255), 133 | (173, 255, 0), 134 | (0, 255, 153), 135 | (255, 92, 0), 136 | (255, 0, 255), 137 | (255, 0, 245), 138 | (255, 0, 102), 139 | (255, 173, 0), 140 | (255, 0, 20), 141 | (255, 184, 184), 142 | (0, 31, 255), 143 | (0, 255, 61), 144 | (0, 71, 255), 145 | (255, 0, 204), 146 | (0, 255, 194), 147 | (0, 255, 82), 148 | (0, 10, 255), 149 | (0, 112, 255), 150 | (51, 0, 255), 151 | (0, 194, 255), 152 | (0, 122, 255), 153 | (0, 255, 163), 154 | (255, 153, 0), 155 | (0, 255, 10), 156 | (255, 112, 0), 157 | (143, 255, 0), 158 | (82, 0, 255), 159 | (163, 255, 0), 160 | (255, 235, 0), 161 | (8, 184, 170), 162 | (133, 0, 255), 163 | (0, 255, 92), 164 | (184, 0, 255), 165 | (255, 0, 31), 166 | (0, 184, 255), 167 | (0, 214, 255), 168 | (255, 0, 112), 169 | (92, 255, 0), 170 | (0, 224, 255), 171 | (112, 224, 255), 172 | (70, 184, 160), 173 | (163, 0, 255), 174 | (153, 0, 255), 175 | (71, 255, 0), 176 | (255, 0, 163), 177 | (255, 204, 0), 178 | (255, 0, 143), 179 | (0, 255, 235), 180 | (133, 255, 0), 181 | (255, 0, 235), 182 | (245, 0, 255), 183 | (255, 0, 122), 184 | (255, 245, 0), 185 | (10, 190, 212), 186 | (214, 255, 0), 187 | (0, 204, 255), 188 | (20, 0, 255), 189 | (255, 255, 0), 190 | (0, 153, 255), 191 | (0, 41, 255), 192 | (0, 255, 204), 193 | (41, 0, 255), 194 | (41, 255, 0), 195 | (173, 0, 255), 196 | (0, 245, 255), 197 | (71, 0, 255), 198 | (122, 0, 255), 199 | (0, 255, 184), 200 | (0, 92, 255), 201 | (184, 255, 0), 202 | (0, 133, 255), 203 | (255, 214, 0), 204 | (25, 194, 194), 205 | (102, 255, 0), 206 | (92, 0, 255), 207 | (204, 204, 255), 208 | (255, 255, 204), 209 | (204, 255, 204), 210 | (204, 255, 153), 211 | (102, 153, 255), 212 | (102, 204, 102), 213 | (143, 255, 200), 214 | ) 215 | -------------------------------------------------------------------------------- /geosynth/_visualize_instances.py: -------------------------------------------------------------------------------- 1 | """Instance segmentation visualization tools. 2 | 3 | Some of this code has been heavily modified from MMSegmentation. 4 | """ 5 | import contextlib 6 | from typing import Dict, Optional 7 | 8 | import numpy as np 9 | 10 | with contextlib.suppress(ImportError): 11 | import matplotlib.pyplot as plt 12 | from matplotlib.collections import PatchCollection 13 | from matplotlib.patches import Polygon 14 | 15 | from ._plt_to_numpy import plt_to_numpy 16 | from .helpers import instance_segmentation_bboxes 17 | 18 | 19 | def plot_instances( 20 | img: np.ndarray, 21 | bboxes: np.ndarray, 22 | labels, 23 | segms=None, 24 | class_names=None, 25 | score_thr=0, 26 | thickness=1, 27 | font_size=10, 28 | min_area=0, 29 | show=False, 30 | ): 31 | """Draw bboxes and class labels (with scores) on an image. 32 | 33 | Parameters 34 | ---------- 35 | img: np.ndarray 36 | RGB image. 37 | bboxes: np.ndarray 38 | Bounding boxes (with scores), shaped (n, 4) or (n, 5). 39 | labels: np.ndarray 40 | Labels of bboxes. 41 | segms: (ndarray or None) 42 | Masks, shaped (n,h,w) or None 43 | class_names: list[str] 44 | Names of each classes. 45 | score_thr: float 46 | Minimum score of bboxes to be shown. Default: 0 47 | thickness: int 48 | Thickness of lines. Default: 1 49 | font_size: int 50 | Font size of texts. Default: 10 51 | show: bool 52 | Whether to show the image. Default: False 53 | 54 | Returns 55 | ------- 56 | ndarray: The image with bboxes drawn on it. 57 | """ 58 | if bboxes.ndim != 2: 59 | raise ValueError(f"bboxes ndim should be 2, but its ndim is {bboxes.ndim}.") 60 | if labels.ndim != 1: 61 | raise ValueError(f"labels ndim should be 1, but its ndim is {labels.ndim}.") 62 | if len(bboxes) != len(labels): 63 | raise ValueError("bboxes and labels must have the same length.") 64 | 65 | img = np.ascontiguousarray(img) 66 | 67 | if score_thr > 0: 68 | if bboxes.shape[1] != 5: 69 | raise ValueError("bbox scores must be provided if thresholding.") 70 | scores = bboxes[:, -1] 71 | inds = scores > score_thr 72 | bboxes = bboxes[inds, :] 73 | labels = labels[inds] 74 | if segms is not None: 75 | segms = segms[inds, ...] 76 | 77 | mask_colors = [] 78 | if labels.shape[0] > 0: 79 | # random color 80 | np.random.seed(42) 81 | mask_colors = [ 82 | np.random.randint(0, 256, (1, 3), dtype=np.uint8) 83 | for _ in range(max(labels) + 1) 84 | ] 85 | 86 | bbox_color = (0, 1, 0) 87 | text_color = (0, 1, 0) 88 | 89 | # remove white edges by set subplot margin 90 | plt.subplots_adjust(left=0, right=1, bottom=0, top=1) 91 | ax = plt.gca() 92 | ax.axis("off") 93 | ax.get_xaxis().set_visible(False) 94 | ax.get_yaxis().set_visible(False) 95 | 96 | polygons = [] 97 | color = [] 98 | for i, (bbox, label) in enumerate(zip(bboxes, labels)): 99 | if np.isnan(bbox).any(): 100 | continue 101 | 102 | bbox_area = (bbox[2] - bbox[0]) * (bbox[3] - bbox[1]) 103 | if bbox_area < min_area: 104 | continue 105 | 106 | bbox_int = bbox.astype(np.int32) 107 | poly = [ 108 | [bbox_int[0], bbox_int[1]], 109 | [bbox_int[0], bbox_int[3]], 110 | [bbox_int[2], bbox_int[3]], 111 | [bbox_int[2], bbox_int[1]], 112 | ] 113 | np_poly = np.array(poly).reshape((4, 2)) 114 | polygons.append(Polygon(np_poly)) 115 | color.append(bbox_color) 116 | label_text = class_names[label] if class_names is not None else f"class {label}" 117 | if len(bbox) > 4: 118 | label_text += f"|{bbox[-1]:.02f}" 119 | ax.text( 120 | bbox_int[0], 121 | bbox_int[1], 122 | f"{label_text}", 123 | bbox={"facecolor": "black", "alpha": 0.8, "pad": 0.7, "edgecolor": "none"}, 124 | color=text_color, 125 | fontsize=font_size, 126 | verticalalignment="top", 127 | horizontalalignment="left", 128 | ) 129 | if segms is not None: 130 | color_mask = mask_colors[labels[i]] 131 | mask = segms[i].astype(bool) 132 | img[mask] = img[mask] * 0.5 + color_mask * 0.5 133 | 134 | plt.imshow(img) 135 | 136 | p = PatchCollection( 137 | polygons, facecolor="none", edgecolors=color, linewidths=thickness 138 | ) 139 | ax.add_collection(p) 140 | 141 | if show: 142 | # We do not use cv2 for display because in some cases, opencv will 143 | # conflict with Qt, it will output a warning: Current thread 144 | # is not the object's thread. You can refer to 145 | # https://github.com/opencv/opencv-python/issues/46 for details 146 | plt.show() 147 | 148 | return img 149 | 150 | 151 | def visualize_instances( 152 | instances_dict: Dict[str, np.ndarray], 153 | bboxes_dict: Optional[Dict[str, np.ndarray]] = None, 154 | rgb: Optional[np.ndarray] = None, 155 | dpi: int = 300, 156 | **kwargs, 157 | ) -> np.ndarray: 158 | """Plot object masks and bounding boxes over rgb image. 159 | 160 | Parameters 161 | ---------- 162 | instances_dict : dict 163 | Dictionary mapping object label to a (N, H, W) boolean mask. 164 | bboxes_dict : dict 165 | If not provided, computed from ssegs. 166 | rgb : numpy.ndarray 167 | uint8 rgb image. 168 | If not provided, defaults to a black background. 169 | dpi: int 170 | DPI to render output figure at. 171 | 300 is good for a high resolution visualization. 172 | 100 is good for a low resolution visualization. 173 | Defaults 300. 174 | **kwargs 175 | Passed along to ``plot_instances``. See ``plot_instances``. 176 | 177 | Returns 178 | ------- 179 | np.ndarray 180 | RGB visualization of instance segmentation masks and bboxes. 181 | """ 182 | if rgb is None: 183 | # Make a dummy black background 184 | h, w = list(instances_dict.values())[0].shape[1:3] 185 | rgb = np.zeros((h, w, 3), dtype=np.uint8) 186 | else: 187 | rgb = rgb.copy() 188 | 189 | if not instances_dict: 190 | return rgb 191 | 192 | if bboxes_dict is None: 193 | bboxes_dict = instance_segmentation_bboxes(instances_dict) 194 | 195 | classes = list(instances_dict.keys()) 196 | h, w = rgb.shape[:2] 197 | 198 | segms_list = list(instances_dict.values()) 199 | segms = np.concatenate(segms_list, axis=0) 200 | 201 | bboxes = [] 202 | for cls in classes: 203 | bboxes.append(bboxes_dict[cls]) 204 | bboxes = np.vstack(bboxes) 205 | bboxes[:, 0] *= w 206 | bboxes[:, 1] *= h 207 | bboxes[:, 2] *= w 208 | bboxes[:, 3] *= h 209 | 210 | labels = [np.full(s.shape[0], i, dtype=np.int32) for i, s in enumerate(segms_list)] 211 | labels = np.concatenate(labels) 212 | 213 | plot_instances(rgb, bboxes, labels, segms=segms, class_names=classes, **kwargs) 214 | viz = plt_to_numpy(dpi=dpi) 215 | return viz 216 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |


4 | 5 | # GeoSynth: A Photorealistic Synthetic Indoor Dataset for Scene Understanding 6 | 7 | [![pypi](https://img.shields.io/pypi/v/geosynth.svg)](https://pypi.python.org/pypi/geosynth) 8 | [![paper](https://img.shields.io/badge/ieee%20vr-paper-green)](https://ieeexplore.ieee.org/document/10050341) 9 | [![Documentation Status](https://readthedocs.org/projects/geosynth/badge/?version=latest)](https://geosynth.readthedocs.io/en/latest/?badge=latest) 10 | 11 | 12 | Deep learning has revolutionized many scene perception tasks 13 | over the past decade. Some of these improvements can be attributed 14 | to the development of large labeled datasets. The creation of such 15 | datasets can be an expensive, time-consuming, and is an imperfect 16 | process. To address these issues, we introduce GeoSynth, a diverse 17 | photorealistic synthetic dataset for indoor scene perception tasks. 18 | Each GeoSynth exemplar contains rich labels, including segmentation, 19 | geometry, camera parameters, surface material, lighting, and 20 | more. We demonstrate that supplementing real training data with 21 | GeoSynth can significantly improve network performance on 22 | perception tasks, like semantic segmentation. 23 | 24 | Watch our [presentation from IEEE VR 2023](https://www.youtube.com/watch?v=_M1WL3oP8mQ). 25 | 26 | GeoSynth is used internally at [Geomagical Labs](https://www.geomagical.com) to help power the [IKEA Kreativ](https://www.ikea.com/us/en/home-design/) home design experience. 27 | 28 | 29 | # Installation 30 | GeoSynth requires Python ``>=3.8`` and can be installed via: 31 | 32 | ```bash 33 | pip install geosynth 34 | ``` 35 | 36 | This installs: 37 | 1. The `geosynth` python library, providing a pythonic interface for 38 | reading and processing GeoSynth data. 39 | 2. The `geosynth` command-line tool, which offers a convenient way of 40 | downloading the geosynth dataset. 41 | 42 | Some optional visualization tools will require `matplotlib`. 43 | 44 | # Dataset Download 45 | 46 | **Attention!** Currently only the `demo` variant is available, the full dataset and additional datatypes will be released in the near future. 47 | 48 | 55 | 56 | To download just a few scenes of the dataset, download the `demo` variant. 57 | The `demo` variant is the default `--variant` option: 58 | 59 | 60 | ```bash 61 | geosynth download non-hdr --variant=demo 62 | ``` 63 | 64 | If you also wish to include HDR data, specify `all`, instead. 65 | The HDR data more than doubles the size of the download, so only download it if you need it. 66 | It is recommended to only specify the data types you need. 67 | 68 | By default, the contents will be downloaded to `~/data/geosynth/`. 69 | To specify an alternative download location, specify the `--dst` argument. 70 | 71 | See all download options by running `geosynth download --help`: 72 | 73 | ```bash 74 | $ geosynth download --help 75 | 76 | Usage: geosynth download [OPTIONS] DTYPES... 77 | 78 | Download the GeoSynth data. 79 | 80 | ╭─ Arguments ──────────────────────────────────────────────────────────────────────────╮ 81 | │ * dtypes DTYPES... Assets to download. Either specify "non-hdr", "all", or │ 82 | │ a subset of: [cube_environment_map, depth, extrinsics, │ 83 | │ gravity, hdr_cube_environment_map, hdr_reflectance, │ 84 | │ hdr_residual, hdr_rgb, hdr_shading, │ 85 | │ hdr_sphere_environment_map, instance_segmentation, │ 86 | │ intrinsics, layout_lines_full, layout_lines_occluded, │ 87 | │ layout_lines_visible, lighting, normals, reflectance, │ 88 | │ residual, rgb, semantic_segmentation, shading, │ 89 | │ sphere_environment_map]). │ 90 | │ [required] │ 91 | ╰──────────────────────────────────────────────────────────────────────────────────────╯ 92 | ╭─ Options ────────────────────────────────────────────────────────────────────────────╮ 93 | │ --dst PATH GeoSynth download directory. │ 94 | │ [default: ~/data/geosynth] │ 95 | │ --variant [demo|full] Variant of dataset to download. │ 96 | │ [default: demo] │ 97 | │ --force --no-force Force a re-download, despite locally │ 98 | │ cached files. │ 99 | │ [default: no-force] │ 100 | │ --cleanup --no-cleanup Delete zip files after unzipping. │ 101 | │ [default: cleanup] │ 102 | │ --help Show this message and exit. │ 103 | ╰──────────────────────────────────────────────────────────────────────────────────────╯ 104 | ``` 105 | 106 | 124 | 125 | 126 | # Usage 127 | Once the dataset has been downloaded, data can be accessed in python: 128 | 129 | ```python 130 | from geosynth import GeoSynth 131 | 132 | geosynth = GeoSynth("PATH_TO_DATA") # or leave empty for default "~/data/geosynth/". 133 | 134 | print(f"GeoSynth has {len(geosynth)} scenes.") 135 | 136 | scene = geosynth[100] # Data can be accessed via indexing like a list. 137 | 138 | # or iterated over in a for loop: 139 | for scene in dataset: 140 | # Each Scene object contains attributres for each datatype. 141 | # Contents can be read from disk via the ``read`` method. 142 | rgb = scene.rgb.read() # (H, W, 3) np.ndarray 143 | depth = scene.depth.read() # (H, W) np.ndarray 144 | intrinsics = scene.intrinsics.read() # (3, 3) camera intrinsics 145 | instances = scene.instance_segmentation.read() # dictionary of instance masks. 146 | 147 | # many datatypes have a ``visualize`` method 148 | depth_viz = scene.depth.visualize(depth) # Returns a (H,W,3) turbo-colorized image. 149 | instances_viz = scene.instance_segmentation.visualize(instances, rgb=rgb) 150 | ``` 151 | 152 | # License 153 | The GeoSynth **code** is released under the [Apache-2.0 License](https://www.apache.org/licenses/LICENSE-2.0.html). 154 | 155 | The GeoSynth **data** provided at `storage.googleapis.com/geomagical-geosynth-public` is available under the [Attribution-NonCommercial-ShareAlike 4.0 International (CC BY-NC-SA 4.0) License](https://creativecommons.org/licenses/by-nc-sa/4.0/). 156 | -------------------------------------------------------------------------------- /geosynth/mappings/_turbo.py: -------------------------------------------------------------------------------- 1 | """Turbo colormap. 2 | 3 | https://ai.googleblog.com/2019/08/turbo-improved-rainbow-colormap-for.html 4 | """ 5 | import numpy as np 6 | 7 | from .utils import apply_palette, to_uint8 8 | 9 | turbo_colormap_data_np = np.array( 10 | [ 11 | [0.18995, 0.07176, 0.23217], 12 | [0.19483, 0.08339, 0.26149], 13 | [0.19956, 0.09498, 0.29024], 14 | [0.20415, 0.10652, 0.31844], 15 | [0.20860, 0.11802, 0.34607], 16 | [0.21291, 0.12947, 0.37314], 17 | [0.21708, 0.14087, 0.39964], 18 | [0.22111, 0.15223, 0.42558], 19 | [0.22500, 0.16354, 0.45096], 20 | [0.22875, 0.17481, 0.47578], 21 | [0.23236, 0.18603, 0.50004], 22 | [0.23582, 0.19720, 0.52373], 23 | [0.23915, 0.20833, 0.54686], 24 | [0.24234, 0.21941, 0.56942], 25 | [0.24539, 0.23044, 0.59142], 26 | [0.24830, 0.24143, 0.61286], 27 | [0.25107, 0.25237, 0.63374], 28 | [0.25369, 0.26327, 0.65406], 29 | [0.25618, 0.27412, 0.67381], 30 | [0.25853, 0.28492, 0.69300], 31 | [0.26074, 0.29568, 0.71162], 32 | [0.26280, 0.30639, 0.72968], 33 | [0.26473, 0.31706, 0.74718], 34 | [0.26652, 0.32768, 0.76412], 35 | [0.26816, 0.33825, 0.78050], 36 | [0.26967, 0.34878, 0.79631], 37 | [0.27103, 0.35926, 0.81156], 38 | [0.27226, 0.36970, 0.82624], 39 | [0.27334, 0.38008, 0.84037], 40 | [0.27429, 0.39043, 0.85393], 41 | [0.27509, 0.40072, 0.86692], 42 | [0.27576, 0.41097, 0.87936], 43 | [0.27628, 0.42118, 0.89123], 44 | [0.27667, 0.43134, 0.90254], 45 | [0.27691, 0.44145, 0.91328], 46 | [0.27701, 0.45152, 0.92347], 47 | [0.27698, 0.46153, 0.93309], 48 | [0.27680, 0.47151, 0.94214], 49 | [0.27648, 0.48144, 0.95064], 50 | [0.27603, 0.49132, 0.95857], 51 | [0.27543, 0.50115, 0.96594], 52 | [0.27469, 0.51094, 0.97275], 53 | [0.27381, 0.52069, 0.97899], 54 | [0.27273, 0.53040, 0.98461], 55 | [0.27106, 0.54015, 0.98930], 56 | [0.26878, 0.54995, 0.99303], 57 | [0.26592, 0.55979, 0.99583], 58 | [0.26252, 0.56967, 0.99773], 59 | [0.25862, 0.57958, 0.99876], 60 | [0.25425, 0.58950, 0.99896], 61 | [0.24946, 0.59943, 0.99835], 62 | [0.24427, 0.60937, 0.99697], 63 | [0.23874, 0.61931, 0.99485], 64 | [0.23288, 0.62923, 0.99202], 65 | [0.22676, 0.63913, 0.98851], 66 | [0.22039, 0.64901, 0.98436], 67 | [0.21382, 0.65886, 0.97959], 68 | [0.20708, 0.66866, 0.97423], 69 | [0.20021, 0.67842, 0.96833], 70 | [0.19326, 0.68812, 0.96190], 71 | [0.18625, 0.69775, 0.95498], 72 | [0.17923, 0.70732, 0.94761], 73 | [0.17223, 0.71680, 0.93981], 74 | [0.16529, 0.72620, 0.93161], 75 | [0.15844, 0.73551, 0.92305], 76 | [0.15173, 0.74472, 0.91416], 77 | [0.14519, 0.75381, 0.90496], 78 | [0.13886, 0.76279, 0.89550], 79 | [0.13278, 0.77165, 0.88580], 80 | [0.12698, 0.78037, 0.87590], 81 | [0.12151, 0.78896, 0.86581], 82 | [0.11639, 0.79740, 0.85559], 83 | [0.11167, 0.80569, 0.84525], 84 | [0.10738, 0.81381, 0.83484], 85 | [0.10357, 0.82177, 0.82437], 86 | [0.10026, 0.82955, 0.81389], 87 | [0.09750, 0.83714, 0.80342], 88 | [0.09532, 0.84455, 0.79299], 89 | [0.09377, 0.85175, 0.78264], 90 | [0.09287, 0.85875, 0.77240], 91 | [0.09267, 0.86554, 0.76230], 92 | [0.09320, 0.87211, 0.75237], 93 | [0.09451, 0.87844, 0.74265], 94 | [0.09662, 0.88454, 0.73316], 95 | [0.09958, 0.89040, 0.72393], 96 | [0.10342, 0.89600, 0.71500], 97 | [0.10815, 0.90142, 0.70599], 98 | [0.11374, 0.90673, 0.69651], 99 | [0.12014, 0.91193, 0.68660], 100 | [0.12733, 0.91701, 0.67627], 101 | [0.13526, 0.92197, 0.66556], 102 | [0.14391, 0.92680, 0.65448], 103 | [0.15323, 0.93151, 0.64308], 104 | [0.16319, 0.93609, 0.63137], 105 | [0.17377, 0.94053, 0.61938], 106 | [0.18491, 0.94484, 0.60713], 107 | [0.19659, 0.94901, 0.59466], 108 | [0.20877, 0.95304, 0.58199], 109 | [0.22142, 0.95692, 0.56914], 110 | [0.23449, 0.96065, 0.55614], 111 | [0.24797, 0.96423, 0.54303], 112 | [0.26180, 0.96765, 0.52981], 113 | [0.27597, 0.97092, 0.51653], 114 | [0.29042, 0.97403, 0.50321], 115 | [0.30513, 0.97697, 0.48987], 116 | [0.32006, 0.97974, 0.47654], 117 | [0.33517, 0.98234, 0.46325], 118 | [0.35043, 0.98477, 0.45002], 119 | [0.36581, 0.98702, 0.43688], 120 | [0.38127, 0.98909, 0.42386], 121 | [0.39678, 0.99098, 0.41098], 122 | [0.41229, 0.99268, 0.39826], 123 | [0.42778, 0.99419, 0.38575], 124 | [0.44321, 0.99551, 0.37345], 125 | [0.45854, 0.99663, 0.36140], 126 | [0.47375, 0.99755, 0.34963], 127 | [0.48879, 0.99828, 0.33816], 128 | [0.50362, 0.99879, 0.32701], 129 | [0.51822, 0.99910, 0.31622], 130 | [0.53255, 0.99919, 0.30581], 131 | [0.54658, 0.99907, 0.29581], 132 | [0.56026, 0.99873, 0.28623], 133 | [0.57357, 0.99817, 0.27712], 134 | [0.58646, 0.99739, 0.26849], 135 | [0.59891, 0.99638, 0.26038], 136 | [0.61088, 0.99514, 0.25280], 137 | [0.62233, 0.99366, 0.24579], 138 | [0.63323, 0.99195, 0.23937], 139 | [0.64362, 0.98999, 0.23356], 140 | [0.65394, 0.98775, 0.22835], 141 | [0.66428, 0.98524, 0.22370], 142 | [0.67462, 0.98246, 0.21960], 143 | [0.68494, 0.97941, 0.21602], 144 | [0.69525, 0.97610, 0.21294], 145 | [0.70553, 0.97255, 0.21032], 146 | [0.71577, 0.96875, 0.20815], 147 | [0.72596, 0.96470, 0.20640], 148 | [0.73610, 0.96043, 0.20504], 149 | [0.74617, 0.95593, 0.20406], 150 | [0.75617, 0.95121, 0.20343], 151 | [0.76608, 0.94627, 0.20311], 152 | [0.77591, 0.94113, 0.20310], 153 | [0.78563, 0.93579, 0.20336], 154 | [0.79524, 0.93025, 0.20386], 155 | [0.80473, 0.92452, 0.20459], 156 | [0.81410, 0.91861, 0.20552], 157 | [0.82333, 0.91253, 0.20663], 158 | [0.83241, 0.90627, 0.20788], 159 | [0.84133, 0.89986, 0.20926], 160 | [0.85010, 0.89328, 0.21074], 161 | [0.85868, 0.88655, 0.21230], 162 | [0.86709, 0.87968, 0.21391], 163 | [0.87530, 0.87267, 0.21555], 164 | [0.88331, 0.86553, 0.21719], 165 | [0.89112, 0.85826, 0.21880], 166 | [0.89870, 0.85087, 0.22038], 167 | [0.90605, 0.84337, 0.22188], 168 | [0.91317, 0.83576, 0.22328], 169 | [0.92004, 0.82806, 0.22456], 170 | [0.92666, 0.82025, 0.22570], 171 | [0.93301, 0.81236, 0.22667], 172 | [0.93909, 0.80439, 0.22744], 173 | [0.94489, 0.79634, 0.22800], 174 | [0.95039, 0.78823, 0.22831], 175 | [0.95560, 0.78005, 0.22836], 176 | [0.96049, 0.77181, 0.22811], 177 | [0.96507, 0.76352, 0.22754], 178 | [0.96931, 0.75519, 0.22663], 179 | [0.97323, 0.74682, 0.22536], 180 | [0.97679, 0.73842, 0.22369], 181 | [0.98000, 0.73000, 0.22161], 182 | [0.98289, 0.72140, 0.21918], 183 | [0.98549, 0.71250, 0.21650], 184 | [0.98781, 0.70330, 0.21358], 185 | [0.98986, 0.69382, 0.21043], 186 | [0.99163, 0.68408, 0.20706], 187 | [0.99314, 0.67408, 0.20348], 188 | [0.99438, 0.66386, 0.19971], 189 | [0.99535, 0.65341, 0.19577], 190 | [0.99607, 0.64277, 0.19165], 191 | [0.99654, 0.63193, 0.18738], 192 | [0.99675, 0.62093, 0.18297], 193 | [0.99672, 0.60977, 0.17842], 194 | [0.99644, 0.59846, 0.17376], 195 | [0.99593, 0.58703, 0.16899], 196 | [0.99517, 0.57549, 0.16412], 197 | [0.99419, 0.56386, 0.15918], 198 | [0.99297, 0.55214, 0.15417], 199 | [0.99153, 0.54036, 0.14910], 200 | [0.98987, 0.52854, 0.14398], 201 | [0.98799, 0.51667, 0.13883], 202 | [0.98590, 0.50479, 0.13367], 203 | [0.98360, 0.49291, 0.12849], 204 | [0.98108, 0.48104, 0.12332], 205 | [0.97837, 0.46920, 0.11817], 206 | [0.97545, 0.45740, 0.11305], 207 | [0.97234, 0.44565, 0.10797], 208 | [0.96904, 0.43399, 0.10294], 209 | [0.96555, 0.42241, 0.09798], 210 | [0.96187, 0.41093, 0.09310], 211 | [0.95801, 0.39958, 0.08831], 212 | [0.95398, 0.38836, 0.08362], 213 | [0.94977, 0.37729, 0.07905], 214 | [0.94538, 0.36638, 0.07461], 215 | [0.94084, 0.35566, 0.07031], 216 | [0.93612, 0.34513, 0.06616], 217 | [0.93125, 0.33482, 0.06218], 218 | [0.92623, 0.32473, 0.05837], 219 | [0.92105, 0.31489, 0.05475], 220 | [0.91572, 0.30530, 0.05134], 221 | [0.91024, 0.29599, 0.04814], 222 | [0.90463, 0.28696, 0.04516], 223 | [0.89888, 0.27824, 0.04243], 224 | [0.89298, 0.26981, 0.03993], 225 | [0.88691, 0.26152, 0.03753], 226 | [0.88066, 0.25334, 0.03521], 227 | [0.87422, 0.24526, 0.03297], 228 | [0.86760, 0.23730, 0.03082], 229 | [0.86079, 0.22945, 0.02875], 230 | [0.85380, 0.22170, 0.02677], 231 | [0.84662, 0.21407, 0.02487], 232 | [0.83926, 0.20654, 0.02305], 233 | [0.83172, 0.19912, 0.02131], 234 | [0.82399, 0.19182, 0.01966], 235 | [0.81608, 0.18462, 0.01809], 236 | [0.80799, 0.17753, 0.01660], 237 | [0.79971, 0.17055, 0.01520], 238 | [0.79125, 0.16368, 0.01387], 239 | [0.78260, 0.15693, 0.01264], 240 | [0.77377, 0.15028, 0.01148], 241 | [0.76476, 0.14374, 0.01041], 242 | [0.75556, 0.13731, 0.00942], 243 | [0.74617, 0.13098, 0.00851], 244 | [0.73661, 0.12477, 0.00769], 245 | [0.72686, 0.11867, 0.00695], 246 | [0.71692, 0.11268, 0.00629], 247 | [0.70680, 0.10680, 0.00571], 248 | [0.69650, 0.10102, 0.00522], 249 | [0.68602, 0.09536, 0.00481], 250 | [0.67535, 0.08980, 0.00449], 251 | [0.66449, 0.08436, 0.00424], 252 | [0.65345, 0.07902, 0.00408], 253 | [0.64223, 0.07380, 0.00401], 254 | [0.63082, 0.06868, 0.00401], 255 | [0.61923, 0.06367, 0.00410], 256 | [0.60746, 0.05878, 0.00427], 257 | [0.59550, 0.05399, 0.00453], 258 | [0.58336, 0.04931, 0.00486], 259 | [0.57103, 0.04474, 0.00529], 260 | [0.55852, 0.04028, 0.00579], 261 | [0.54583, 0.03593, 0.00638], 262 | [0.53295, 0.03169, 0.00705], 263 | [0.51989, 0.02756, 0.00780], 264 | [0.50664, 0.02354, 0.00863], 265 | [0.49321, 0.01963, 0.00955], 266 | [0.47960, 0.01583, 0.01055], 267 | ] 268 | ) 269 | 270 | 271 | def turbo(x: np.ndarray, min, max) -> np.ndarray: 272 | """Apply turbo colormap to data. 273 | 274 | Modified from: 275 | 276 | https://gist.github.com/mikhailov-work/ee72ba4191942acecc03fe6da94fc73f?permalink_comment_id=3122026#gistcomment-3122026 277 | """ 278 | x = x.clip(min, max) / max # normalize to be in range [0, 1] 279 | return apply_palette(turbo_colormap_data_np, x) 280 | -------------------------------------------------------------------------------- /docs/source/data.rst: -------------------------------------------------------------------------------- 1 | Data 2 | ==== 3 | 4 | The ``geosynth`` command-line tool offers a convenient way of 5 | downloading the geosynth dataset. 6 | 7 | **Attention!** Currently only the `demo` variant is available, the full dataset and additional datatypes will be released in the near future. 8 | 9 | To download all non-hdr dataset, run: 10 | 11 | .. code-block:: bash 12 | 13 | geosynth download non-hdr --variant=full 14 | 15 | If you also wish to include HDR data, specify ``all``, instead. 16 | The HDR data more than doubles the size of the download, so only download it if you need it. 17 | It is recommended to only specify the data types you need. 18 | 19 | To download just a few scenes of the dataset, download the ``demo`` variant. 20 | The ``demo`` variant is the default ``--variant`` option: 21 | 22 | .. code-block:: bash 23 | 24 | geosynth download non-hdr --variant=demo 25 | 26 | By default, the contents will be downloaded to ``~/data/geosynth/``. 27 | To specify an alternative download location, specify the ``--dst`` argument. 28 | 29 | See all download options by running ``geosynth download --help``: 30 | 31 | 32 | .. code-block:: bash 33 | 34 | $ geosynth download --help 35 | 36 | Usage: geosynth download [OPTIONS] DTYPES... 37 | 38 | Download the GeoSynth data. 39 | 40 | ╭─ Arguments ──────────────────────────────────────────────────────────────────────────╮ 41 | │ * dtypes DTYPES... Assets to download. Either specify "non-hdr", "all", or │ 42 | │ a subset of: [cube_environment_map, depth, extrinsics, │ 43 | │ gravity, hdr_cube_environment_map, hdr_reflectance, │ 44 | │ hdr_residual, hdr_rgb, hdr_shading, │ 45 | │ hdr_sphere_environment_map, instance_segmentation, │ 46 | │ intrinsics, layout_lines_full, layout_lines_occluded, │ 47 | │ layout_lines_visible, lighting, normals, reflectance, │ 48 | │ residual, rgb, semantic_segmentation, shading, │ 49 | │ sphere_environment_map]). │ 50 | │ [required] │ 51 | ╰──────────────────────────────────────────────────────────────────────────────────────╯ 52 | ╭─ Options ────────────────────────────────────────────────────────────────────────────╮ 53 | │ --dst PATH GeoSynth download directory. │ 54 | │ [default: ~/data/geosynth] │ 55 | │ --variant [demo|full] Variant of dataset to download. │ 56 | │ [default: demo] │ 57 | │ --force --no-force Force a re-download, despite locally │ 58 | │ cached files. │ 59 | │ [default: no-force] │ 60 | │ --cleanup --no-cleanup Delete zip files after unzipping. │ 61 | │ [default: cleanup] │ 62 | │ --help Show this message and exit. │ 63 | ╰──────────────────────────────────────────────────────────────────────────────────────╯ 64 | 65 | Once downloaded, it's recommend to access data in the following manner: 66 | 67 | .. code-block:: python 68 | 69 | from geosynth import GeoSynth 70 | 71 | geosynth = GeoSynth("PATH_TO_DATA") # or leave empty for default "~/data/geosynth/". 72 | 73 | print(f"GeoSynth has {len(geosynth)} scenes.") 74 | 75 | scene = geosynth[100] # Data can be accessed via indexing like a list. 76 | 77 | # or iterated over in a for loop: 78 | for scene in dataset: 79 | # Each Scene object contains attributres for each datatype. 80 | # Contents can be read from disk via the ``read`` method. 81 | rgb = scene.rgb.read() # (H, W, 3) np.ndarray 82 | depth = scene.depth.read() # (H, W) np.ndarray 83 | intrinsics = scene.intrinsics.read() # (3, 3) camera intrinsics 84 | instances = scene.instance_segmentation.read() # dictionary of instance masks. 85 | 86 | # many datatypes have a ``visualize`` method 87 | depth_viz = scene.depth.visualize(depth) # Returns a (H,W,3) turbo-colorized image. 88 | instances_viz = scene.instance_segmentation.visualize(instances, rgb=rgb) 89 | 90 | 91 | In python snippets below, the ``scene`` variable in this example will be referenced. 92 | 93 | When data involves a coordinate system, GeoSynth abides by the following right-hand convention: 94 | 95 | +-------+-----------+ 96 | | Label | Direction | 97 | +=======+===========+ 98 | | +X | Right | 99 | +-------+-----------+ 100 | | +Y | Down | 101 | +-------+-----------+ 102 | | +Z | Forward | 103 | +-------+-----------+ 104 | 105 | Images in subsequent sections represent visualizations and may not be a direct 106 | representation of the data itself. 107 | 108 | 109 | rgb 110 | ^^^ 111 | 112 | Image of the scene in RGB-channel order. 113 | Data is a ``uint8`` numpy array of shape ``(720, 1280, 3)``. 114 | 115 | .. image:: ../../assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/rgb.jpg 116 | :width: 800 117 | :alt: rgb 118 | 119 | hdr_rgb 120 | ^^^^^^^ 121 | High dynamic range version of `rgb`_. 122 | 123 | depth 124 | ^^^^^ 125 | 126 | Depthmap of the scene in meters. 127 | Data is a ``float32`` numpy array of shape ``(720, 1280)``. 128 | 129 | .. image:: ../../assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/depth.jpg 130 | :width: 800 131 | :alt: depth 132 | 133 | normals 134 | ^^^^^^^ 135 | 136 | Surface normals of the scene. 137 | Data is a ``float32`` numpy array of shape ``(720, 1280, 3)``. 138 | Each pixel represents a unit-norm ``(x, y, z)`` vector pointing away from the camera. 139 | 140 | .. image:: ../../assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/normals.jpg 141 | :width: 800 142 | :alt: normals 143 | 144 | intrinsics 145 | ^^^^^^^^^^ 146 | Standard ``(3, 3)`` camera intrinsics. 147 | 148 | 149 | .. code-block:: python 150 | 151 | array( 152 | [ 153 | [1.0319166e03, 0.0000000e00, 6.4000000e02], 154 | [0.0000000e00, 1.0319166e03, 3.6000000e02], 155 | [0.0000000e00, 0.0000000e00, 1.0000000e00], 156 | ], 157 | dtype=float32, 158 | ) 159 | 160 | 161 | gravity 162 | ^^^^^^^ 163 | A unit-norm ``float32`` numpy array of shape ``(3)``. 164 | Points in the direction of gravity, and will typically agree with floor surface normals. 165 | Usually, gravity will be mostly ``+Y`` component. 166 | 167 | .. code-block:: python 168 | 169 | array([-0.07105822, 0.9611216, 0.26682544], dtype=float32) 170 | 171 | 172 | semantic_segmentation 173 | ^^^^^^^^^^^^^^^^^^^^^ 174 | GeoSynth semantic classes are an extension of NYU40 labels. 175 | A tuple of these strings exists at ``scene.semantic_segmentation.CLASSES``. 176 | For clarity, the classes and their indices are reproduced here: 177 | 178 | +-------+-----------+-------+----------------+-------+----------------+ 179 | | Index | Label | Index | Label | Index | Label | 180 | +=======+===========+=======+================+=======+================+ 181 | | 0 | unknown | 16 | curtain | 32 | night stand | 182 | +-------+-----------+-------+----------------+-------+----------------+ 183 | | 1 | wall | 17 | dresser | 33 | toilet | 184 | +-------+-----------+-------+----------------+-------+----------------+ 185 | | 2 | floor | 18 | pillow | 34 | sink | 186 | +-------+-----------+-------+----------------+-------+----------------+ 187 | | 3 | cabinet | 19 | mirror | 35 | lamp | 188 | +-------+-----------+-------+----------------+-------+----------------+ 189 | | 4 | bed | 20 | floor mat | 36 | bathtub | 190 | +-------+-----------+-------+----------------+-------+----------------+ 191 | | 5 | chair | 21 | clothes | 37 | bag | 192 | +-------+-----------+-------+----------------+-------+----------------+ 193 | | 6 | sofa | 22 | ceiling | 38 | otherstructure | 194 | +-------+-----------+-------+----------------+-------+----------------+ 195 | | 7 | table | 23 | books | 39 | otherfurniture | 196 | +-------+-----------+-------+----------------+-------+----------------+ 197 | | 8 | door | 24 | refrigerator | 40 | otherprop | 198 | +-------+-----------+-------+----------------+-------+----------------+ 199 | | 9 | window | 25 | television | 41 | plant | 200 | +-------+-----------+-------+----------------+-------+----------------+ 201 | | 10 | bookshelf | 26 | paper | 42 | doorframe | 202 | +-------+-----------+-------+----------------+-------+----------------+ 203 | | 11 | picture | 27 | towel | 43 | windowframe | 204 | +-------+-----------+-------+----------------+-------+----------------+ 205 | | 12 | counter | 28 | shower curtain | 44 | baseboard | 206 | +-------+-----------+-------+----------------+-------+----------------+ 207 | | 13 | blinds | 29 | box | 45 | lighton | 208 | +-------+-----------+-------+----------------+-------+----------------+ 209 | | 14 | desk | 30 | whiteboard | 46 | lightoff | 210 | +-------+-----------+-------+----------------+-------+----------------+ 211 | | 15 | shelves | 31 | person | 47 | curtainrod | 212 | +-------+-----------+-------+----------------+-------+----------------+ 213 | 214 | 215 | .. image:: ../../assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/semantic_segmentation.jpg 216 | :width: 800 217 | :alt: semantic_segmentation 218 | 219 | 220 | instance_segmentation 221 | ^^^^^^^^^^^^^^^^^^^^^ 222 | Instance segmentation masks with same labels as `semantic_segmentation`_. 223 | Data is represented as a dictionary, mapping the string labels (like ``"chair"``) 224 | to ``(N, H, W)`` boolean numpy masks, where ``N`` is the number of those instances in the scene. 225 | If bounding boxes are needed, see functions ``geosynth.instance_bbox`` and ``geosynth.instance_segmentation_bboxes``. 226 | 227 | 228 | .. image:: ../../assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/instance_segmentation.jpg 229 | :width: 800 230 | :alt: instance_segmentation 231 | 232 | shading 233 | ^^^^^^^ 234 | Intrinsic image decomposition component that represents elements coming from **direct** lighting. 235 | 236 | .. image:: ../../assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/shading.jpg 237 | :width: 800 238 | :alt: shading 239 | 240 | reflectance 241 | ^^^^^^^^^^^ 242 | Intrinsic image decomposition component that represents the intrinsic color of the surface (i.e. albedo). 243 | 244 | .. image:: ../../assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/reflectance.jpg 245 | :width: 800 246 | :alt: reflectance 247 | 248 | residual 249 | ^^^^^^^^ 250 | Intrinsic image decomposition component that represents elements coming from **indirect** lighting. 251 | 252 | .. image:: ../../assets/scenes/AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7/residual.jpg 253 | :width: 800 254 | :alt: instance_segmentation 255 | 256 | hdr_shading 257 | ^^^^^^^^^^^ 258 | High dynamic range version of `shading`_. 259 | 260 | hdr_reflectance 261 | ^^^^^^^^^^^^^^^ 262 | High dynamic range version of `reflectance`_. 263 | 264 | hdr_residual 265 | ^^^^^^^^^^^^ 266 | High dynamic range version of `residual`_. 267 | -------------------------------------------------------------------------------- /geosynth/data.py: -------------------------------------------------------------------------------- 1 | import errno 2 | import json 3 | import os 4 | import urllib.request 5 | from abc import abstractmethod 6 | from enum import Enum 7 | from pathlib import Path 8 | from typing import Any, Dict, Optional, Protocol, Union 9 | 10 | import cv2 11 | import numpy as np 12 | from autoregistry import Registry 13 | 14 | from ._visualize_instances import visualize_instances 15 | from .common import PathLike 16 | from .mappings import SemanticClassesMixin, apply_palette, to_uint8, turbo 17 | from .models.lighting import Lighting as LightingModel 18 | 19 | _DOWNLOAD_PREFIX = "https://storage.googleapis.com/geomagical-geosynth-public" 20 | 21 | 22 | class OpenCVSaveError(Exception): 23 | """Error saving file with opencv.""" 24 | 25 | 26 | class UrlRetrieveReportHook(Protocol): 27 | def __call__(self, block_num: int, block_size: int, total_size: int) -> Any: 28 | ... 29 | 30 | 31 | class DatasetVariant(str, Enum): 32 | """Available dataset variants for download. 33 | 34 | The "demo" variant contains the following scenes: 35 | * AI043_007_v001-8e009bbdcbffb624b8d86b0005a01915 36 | * AI043_008_v001-43f091c0ab99ee97f02204db92babad3 37 | * AI043_010_v001-2b71d64e5d04563b56e0d3e5725307d3 38 | * AI48_003_v001-0a825c69869524ed2518d04de356504d 39 | * AI48_006_v001-6b752db1da84a977212a6dd18f3cddf7 40 | * AI48_009_v001-2d5dc4fb7323f2aae0a91430bdadf5ee 41 | """ 42 | 43 | demo = "demo" 44 | full = "full" 45 | 46 | def __str__(self): 47 | # https://github.com/tiangolo/typer/issues/290#issuecomment-860275264 48 | return self.value 49 | 50 | 51 | class Data(Registry, snake_case=True): 52 | """Abstract Base Class for all data types.""" 53 | 54 | ext: str 55 | 56 | @classmethod 57 | @abstractmethod 58 | def read_file(cls, fn: Path): 59 | raise NotImplementedError 60 | 61 | @classmethod 62 | @abstractmethod 63 | def write_file(cls, fn: Path, data: Any) -> None: 64 | raise NotImplementedError 65 | 66 | def __init_subclass__(cls, **kwargs): 67 | super().__init_subclass__(**kwargs) 68 | 69 | if not hasattr(cls, "ext"): 70 | raise ValueError(f"{cls.__name__}.ext must define ``ext``.") 71 | 72 | if not cls.ext.startswith("."): 73 | raise ValueError(f"{cls.__name__}.ext must start with '.'") 74 | 75 | def __init__(self, scene_path: PathLike): 76 | self.scene_path = Path(scene_path) 77 | 78 | @property 79 | def stem(self) -> str: 80 | """Stem of expected file on-disk.""" 81 | return type(self).__registry__.name 82 | 83 | @property 84 | def path(self) -> Path: 85 | """Path to file on-disk.""" 86 | ext = self.ext 87 | return self.scene_path / (self.stem + ext) 88 | 89 | def exists(self) -> bool: 90 | """Whether or not the file exists on-disk.""" 91 | return self.path.exists() 92 | 93 | @classmethod 94 | def visualize(cls, data) -> np.ndarray: 95 | """Produce a uint8 RGB visualization of data.""" 96 | raise NotImplementedError 97 | 98 | def read(self, *args, **kwargs): 99 | if not self.exists(): 100 | # Explicitly check for file existence here so that a consistent 101 | # exception is raised instead of letting downstream readers decide. 102 | raise OSError(errno.ENOENT, os.strerror(errno.ENOENT), str(self.path)) 103 | return self.read_file(self.path, *args, **kwargs) 104 | 105 | def write(self, data: Any, *args, **kwargs) -> None: 106 | self.scene_path.mkdir(parents=True, exist_ok=True) 107 | return self.write_file(self.path, data, *args, *kwargs) 108 | 109 | @classmethod 110 | def download_zip( 111 | cls, 112 | output_dir: PathLike, 113 | variant: str = "full", 114 | force: bool = False, 115 | reporthook: Optional[UrlRetrieveReportHook] = None, 116 | ) -> Path: 117 | """Download a GeoSynth variant zip file. 118 | 119 | Parameters 120 | ---------- 121 | output_dir: PathLike 122 | Output folder to download contents to. 123 | If it doesn't exist, it will be created. 124 | variant: str 125 | Variant of GeoSynth to download. 126 | A variant subfolder in ``output_dir`` will be created. 127 | Defaults to ``"full"``. 128 | force: bool 129 | Force a redownload, despite cached files. 130 | Defaults to ``False``. 131 | reporthook: Optional[UrlRetrieveReportHook] 132 | Optional callable to pass to ``urlretrieve``. 133 | Commonly used for progress updates. 134 | 135 | Returns 136 | ------- 137 | Path 138 | Path to local zip file. 139 | """ 140 | # Argument Preprocessing 141 | variant = variant.lower() 142 | try: 143 | variant = DatasetVariant[variant] 144 | except KeyError as e: 145 | raise ValueError( 146 | f'Variant "{variant}" not in valid. Choose one of: ' 147 | f"{[x.value for x in DatasetVariant]}." 148 | ) from e 149 | 150 | output_dir = Path(output_dir).expanduser() / str(variant.value) 151 | output_dir.mkdir(exist_ok=True, parents=True) 152 | 153 | zip_name = f"{cls.__registry__.name}.zip" 154 | zip_path = output_dir / zip_name 155 | zip_path_tmp = zip_path.with_suffix(".tmp") 156 | 157 | if zip_path_tmp.exists(): # Delete a previous incomplete download. 158 | zip_path_tmp.unlink() 159 | 160 | if force or not zip_path.exists(): 161 | zip_url = f"{_DOWNLOAD_PREFIX}/{variant.value}/{zip_name}" 162 | urllib.request.urlretrieve( # noqa: S310 163 | zip_url, 164 | filename=zip_path_tmp, 165 | reporthook=reporthook, 166 | ) 167 | zip_path_tmp.rename(zip_path) 168 | elif reporthook: 169 | reporthook(1, 1, 1) # Will set reporthook to done. 170 | 171 | return zip_path 172 | 173 | 174 | class PngMixin: 175 | ext = ".png" 176 | 177 | @classmethod 178 | def read_file(cls, fn: Path) -> np.ndarray: 179 | """Read a png file. 180 | 181 | Returns 182 | ------- 183 | np.ndarray 184 | (H, W, 3) RGB or (H, W) grayscale image. 185 | """ 186 | img = cv2.imread(str(fn), cv2.IMREAD_UNCHANGED) 187 | if img.ndim == 3: 188 | img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) 189 | return img 190 | 191 | @classmethod 192 | def write_file(cls, fn: Path, data: np.ndarray) -> None: 193 | """Write a png file. 194 | 195 | Parameters 196 | ---------- 197 | data: np.ndarray 198 | (H, W, 3) RGB uint8 image. 199 | """ 200 | if data.ndim == 3: 201 | data = cv2.cvtColor(data, cv2.COLOR_RGB2BGR) 202 | cv2.imwrite(str(fn), data) 203 | 204 | 205 | class NpzMixin: 206 | """Stores/Reads npz data as-is.""" 207 | 208 | ext = ".npz" 209 | 210 | @classmethod 211 | def read_file(cls, fn: Path) -> Union[Dict[str, np.ndarray], np.ndarray]: 212 | with np.load(fn) as data: 213 | data: Dict[str, np.ndarray] = dict(data) 214 | if len(data) == 1: 215 | return list(data.values())[0] 216 | else: 217 | return data 218 | 219 | @classmethod 220 | def write_file( 221 | cls, 222 | fn: Path, 223 | data: Union[Dict[str, np.ndarray], np.ndarray], 224 | ) -> None: 225 | if isinstance(data, dict): 226 | pass 227 | elif isinstance(data, np.ndarray): 228 | key: str = cls.__registry__.name # type: ignore[reportGeneralTypeIssues] 229 | data = { 230 | key: data, 231 | } 232 | else: 233 | raise TypeError 234 | np.savez_compressed(fn, **data) 235 | 236 | 237 | class NpzFloat16Mixin(NpzMixin): 238 | """Converts stored float16 -> float32 for easier standard processing.""" 239 | 240 | @classmethod 241 | def read_file(cls, fn: Path) -> Union[Dict[str, np.ndarray], np.ndarray]: 242 | out = super().read_file(fn) 243 | if isinstance(out, np.ndarray): 244 | out = out.astype(np.float32) 245 | elif isinstance(out, dict): 246 | out = {k: v.astype(np.float32) for k, v in out.items()} 247 | else: 248 | raise TypeError 249 | return out 250 | 251 | @classmethod 252 | def write_file(cls, fn: Path, data: np.ndarray) -> None: 253 | data = data.astype(np.float16) 254 | super().write_file(fn, data) 255 | 256 | 257 | class HdrMixin: 258 | ext = ".hdr" 259 | 260 | @classmethod 261 | def read_file(cls, fn: Path) -> np.ndarray: 262 | img = cv2.imread(str(fn), cv2.IMREAD_UNCHANGED) 263 | img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) 264 | return img 265 | 266 | @classmethod 267 | def write_file(cls, fn: Path, data: np.ndarray) -> None: 268 | data = data.astype(np.float32) 269 | bgr = cv2.cvtColor(data, cv2.COLOR_RGB2BGR) 270 | if not cv2.imwrite(str(fn), bgr): 271 | raise OpenCVSaveError 272 | 273 | 274 | class JsonMixin: 275 | ext = ".json" 276 | 277 | @classmethod 278 | def read_file(cls, fn: Path) -> Dict: 279 | with fn.open("r") as f: 280 | return json.load(f) 281 | 282 | @classmethod 283 | def write_file(cls, fn: Path, data: dict) -> None: 284 | with fn.open("w") as f: 285 | json.dump(data, f) 286 | 287 | 288 | class CubeEnvironmentMap(NpzMixin, Data): 289 | pass 290 | 291 | 292 | class Depth(NpzFloat16Mixin, Data): 293 | @classmethod 294 | def visualize(cls, data, min=0.0, max=10.0) -> np.ndarray: 295 | return turbo(data, min=min, max=max) 296 | 297 | 298 | class Extrinsics(NpzMixin, Data): 299 | pass 300 | 301 | 302 | class Gravity(NpzMixin, Data): 303 | pass 304 | 305 | 306 | class HdrCubeEnvironmentMap(NpzMixin, Data): 307 | pass 308 | 309 | 310 | class HdrReflectance(HdrMixin, Data): 311 | pass 312 | 313 | 314 | class HdrResidual(HdrMixin, Data): 315 | pass 316 | 317 | 318 | class HdrRgb(HdrMixin, Data): 319 | pass 320 | 321 | 322 | class HdrShading(HdrMixin, Data): 323 | pass 324 | 325 | 326 | class HdrSphereEnvironmentMap(HdrMixin, Data): 327 | pass 328 | 329 | 330 | class InstanceSegmentation(NpzMixin, SemanticClassesMixin, Data): 331 | @classmethod 332 | def visualize(cls, data, **kwargs) -> np.ndarray: 333 | return visualize_instances(data, **kwargs) 334 | 335 | 336 | class Intrinsics(NpzMixin, Data): 337 | pass 338 | 339 | 340 | class LayoutLinesFull(NpzMixin, Data): 341 | pass 342 | 343 | 344 | class LayoutLinesOccluded(NpzMixin, Data): 345 | pass 346 | 347 | 348 | class LayoutLinesVisible(NpzMixin, Data): 349 | pass 350 | 351 | 352 | class Lighting(JsonMixin, Data): 353 | @classmethod 354 | def read_file(cls, fn: Path) -> LightingModel: 355 | data = super().read_file(fn) 356 | return LightingModel(**data) 357 | 358 | 359 | class Normals(NpzFloat16Mixin, Data): 360 | @classmethod 361 | def visualize(cls, data) -> np.ndarray: 362 | """Visualize normals with RGB representing XYZ.""" 363 | return to_uint8(data / 2 + 0.5) 364 | 365 | 366 | class Reflectance(PngMixin, Data): 367 | pass 368 | 369 | 370 | class Residual(PngMixin, Data): 371 | pass 372 | 373 | 374 | class Rgb(PngMixin, Data): 375 | pass 376 | 377 | 378 | class SemanticSegmentation(PngMixin, SemanticClassesMixin, Data): 379 | @classmethod 380 | def visualize(cls, data) -> np.ndarray: 381 | return apply_palette(np.array(cls.PALETTE), data / 255) 382 | 383 | 384 | class Shading(PngMixin, Data): 385 | pass 386 | 387 | 388 | class SphereEnvironmentMap(PngMixin, Data): 389 | pass 390 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright CURRENT_YEAR_HERE YOUR_NAME_HERE 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | --------------------------------------------------------------------------------