├── .github ├── dependabot.yml └── workflows │ ├── deploy.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── CHANGES.rst ├── CREDITS.txt ├── DEPENDENCIES.txt ├── FAQ.txt ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── benchmarks └── benchmarks.py ├── docs ├── Makefile ├── requirements.txt └── source │ ├── changes.rst │ ├── class.rst │ ├── conf.py │ ├── history.rst │ ├── index.rst │ ├── install.rst │ ├── misc.rst │ ├── performance.rst │ └── tutorial.rst ├── environment.yml ├── pyproject.toml ├── rtree ├── __init__.py ├── core.py ├── exceptions.py ├── finder.py ├── index.py └── py.typed ├── scripts ├── install_libspatialindex.bat ├── install_libspatialindex.sh ├── repair_wheel.py └── visualize.py ├── setup.py ├── tests ├── __init__.py ├── boxes_15x15.data ├── common.py ├── conftest.py ├── rungrind.dist ├── test_finder.py ├── test_index.py └── test_tpr.py └── tox.ini /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for GitHub Actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | # Check for updates to GitHub Actions every week 8 | interval: "weekly" 9 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Build and upload to PyPI 2 | 3 | on: 4 | workflow_dispatch: 5 | pull_request: 6 | paths: 7 | - '.github/workflows/deploy.yml' 8 | push: 9 | branches: 10 | - main 11 | paths: 12 | - '.github/workflows/deploy.yml' 13 | release: 14 | types: 15 | - published 16 | 17 | jobs: 18 | build_wheels: 19 | name: Build wheel on ${{ matrix.os }} 20 | runs-on: ${{ matrix.os }} 21 | strategy: 22 | matrix: 23 | os: 24 | - windows-latest 25 | - ubuntu-latest 26 | - ubuntu-24.04-arm 27 | - macos-latest 28 | 29 | steps: 30 | - uses: actions/checkout@v4 31 | 32 | - uses: actions/setup-python@v5 33 | name: Install Python 34 | with: 35 | python-version: '3.11' 36 | 37 | - uses: ilammy/msvc-dev-cmd@v1 38 | if: startsWith(matrix.os, 'windows') 39 | 40 | - name: Build wheels 41 | uses: pypa/cibuildwheel@v2.23.2 42 | 43 | - uses: actions/upload-artifact@v4 44 | with: 45 | name: cibw-wheels-${{ matrix.os }} 46 | path: ./wheelhouse/*.whl 47 | 48 | build_sdist: 49 | name: Build source distribution 50 | runs-on: ubuntu-latest 51 | steps: 52 | - uses: actions/checkout@v4 53 | 54 | - name: Build sdist 55 | run: pipx run build --sdist 56 | 57 | - uses: actions/upload-artifact@v4 58 | with: 59 | name: cibw-sdist 60 | path: dist/*.tar.gz 61 | 62 | upload_pypi: 63 | needs: [build_wheels, build_sdist] 64 | runs-on: ubuntu-latest 65 | environment: pypi 66 | permissions: 67 | id-token: write 68 | if: github.event_name == 'release' && github.event.action == 'published' 69 | steps: 70 | - uses: actions/download-artifact@v4 71 | with: 72 | pattern: cibw-* 73 | path: dist 74 | merge-multiple: true 75 | 76 | - uses: pypa/gh-action-pypi-publish@release/v1 77 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | workflow_dispatch: 9 | schedule: 10 | - cron: '0 6 * * 1' 11 | 12 | jobs: 13 | conda: 14 | name: Conda Python ${{ matrix.python-version }}, SIDX-${{ matrix.sidx-version }}, ${{ matrix.os }} 15 | defaults: 16 | run: 17 | shell: bash -l {0} 18 | runs-on: ${{ matrix.os }} 19 | strategy: 20 | fail-fast: false 21 | matrix: 22 | os: ['ubuntu-latest', 'macos-latest', 'windows-latest'] 23 | # test oldest and newest versions of python and libspatialindex 24 | python-version: ['3.9', '3.13'] 25 | sidx-version: ['1.8.5', '2.1.0'] 26 | exclude: 27 | - os: 'macos-latest' 28 | sidx-version: '1.8.5' 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | - uses: conda-incubator/setup-miniconda@v3 33 | with: 34 | channels: conda-forge 35 | auto-update-conda: true 36 | python-version: ${{ matrix.python-version }} 37 | 38 | - name: Setup 39 | run: conda install -c conda-forge pip numpy pytest libspatialindex=${{ matrix.sidx-version }} -y 40 | 41 | - name: Install 42 | run: pip install -e . 43 | 44 | - name: Test with pytest 45 | run: pytest -Werror -v --doctest-modules rtree tests 46 | 47 | ubuntu: 48 | name: Ubuntu Python ${{ matrix.python-version }} 49 | defaults: 50 | run: 51 | shell: bash -l {0} 52 | runs-on: ubuntu-latest 53 | strategy: 54 | fail-fast: false 55 | matrix: 56 | python-version: ['3.9', '3.10', '3.11', '3.12', '3.13'] 57 | 58 | steps: 59 | - uses: actions/checkout@v4 60 | - uses: actions/setup-python@v5 61 | name: Install Python 62 | with: 63 | python-version: ${{ matrix.python-version }} 64 | allow-prereleases: true 65 | 66 | - name: Setup 67 | run: | 68 | sudo apt-get -y install libspatialindex-c6 69 | pip install --upgrade pip 70 | pip install numpy pytest 71 | 72 | - name: Build 73 | run: pip install --user . 74 | 75 | - name: Test with pytest 76 | run: pytest -Werror -v --doctest-modules rtree tests 77 | 78 | - name: Run doctests 79 | run: pytest -Werror -v --doctest-modules docs/source/*.rst 80 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.egg-info/ 2 | *.pyc 3 | docs/build 4 | build/ 5 | dist/ 6 | *.idx 7 | *.dat 8 | include 9 | lib 10 | .coverage 11 | .tox 12 | wheelhouse 13 | .vscode/ 14 | *venv* 15 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | autoupdate_schedule: quarterly 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v5.0.0 6 | hooks: 7 | - id: check-yaml 8 | - id: end-of-file-fixer 9 | - id: trailing-whitespace 10 | - repo: https://github.com/python-jsonschema/check-jsonschema 11 | rev: 0.32.1 12 | hooks: 13 | - id: check-github-workflows 14 | args: ["--verbose"] 15 | - repo: https://github.com/astral-sh/ruff-pre-commit 16 | rev: v0.11.4 17 | hooks: 18 | # Run the linter 19 | - id: ruff 20 | args: [ --fix ] 21 | # Run the formatter 22 | - id: ruff-format 23 | - repo: https://github.com/pre-commit/mirrors-mypy 24 | rev: v1.15.0 25 | hooks: 26 | - id: mypy 27 | exclude: 'docs/.' 28 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | apt_packages: 11 | - libspatialindex-dev 12 | os: ubuntu-lts-latest 13 | tools: 14 | python: latest 15 | 16 | # Build documentation in the docs/source directory with Sphinx 17 | sphinx: 18 | configuration: docs/source/conf.py 19 | fail_on_warning: true 20 | 21 | # Optionally build your docs in additional formats such as PDF 22 | formats: 23 | - pdf 24 | 25 | # Declare the Python requirements required to build your docs 26 | python: 27 | install: 28 | - requirements: docs/requirements.txt 29 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | 1.4.0: 2025-03-06 2 | ================= 3 | 4 | - Python 3.9+ is now required (:PR:`321`) 5 | - Add support for array-based bulk insert with NumPy (:PR:`340` by :user:`FreddieWitherden`) 6 | - Upgrade binary wheels with libspatialindex-2.1.0 (:PR:`353`) 7 | - Rename project and other build components to "rtree" (:PR:`350`) 8 | 9 | 1.3.0: 2024-07-10 10 | ================= 11 | 12 | - Upgrade binary wheels with libspatialindex-2.0.0 (:PR:`316`) 13 | - Fix binary wheels for musllinux wheels (:PR:`316`) 14 | - Update code style, replace isort and black with ruff, modern numpy rng (:PR:`319`) 15 | - Remove libsidx version testing (:PR:`313`) 16 | 17 | 1.2.0: 2024-01-19 18 | ================= 19 | 20 | - Fix test failure with built library (:PR:`291` by :user:`sebastic`) 21 | - Include spatialindex headers and add :py:meth:`~rtree.finder.get_include` (:PR:`292` by :user:`JDBetteridge`) 22 | 23 | 1.1.0: 2023-10-17 24 | ================= 25 | 26 | - Python 3.8+ is now required (:PR:`273`) 27 | - Move project metadata to pyproject.toml (:PR:`269`) 28 | - Refactor built wheels for PyPI (:PR:`276`) 29 | - Fix memory leak when breaking mid-way in _get_objects and _get_ids (:PR:`266`) (thanks :user:`akariv`!) 30 | 31 | 1.0.1: 2022-10-12 32 | ================= 33 | 34 | - Fix up type hints :PR:`243` (thanks :user:`oderby`) 35 | - Python 3.11 wheels :PR:`250` (thanks :user:`ewouth`) 36 | 37 | 1.0.0: 2022-04-05 38 | ================= 39 | 40 | - Python 3.7+ is now required (:PR:`212`) (thanks :user:`adamjstewart`!) 41 | - Type hints (:PR:`215` and others) (thanks :user:`adamjstewart`!) 42 | - Python 3.10 wheels, including osx-arm64 :PR:`224` 43 | - Clean up libspatialindex C API mismatches :PR:`222` (thanks :user:`musicinmybrain`!) 44 | - Many doc updates, fixes, and type hints (thanks :user:`adamjstewart`!) :PR:`212` :PR:`221` :PR:`217` :PR:`215` 45 | - __len__ method for index :PR:`194` 46 | - Prevent get_coordinate_pointers from mutating inputs #205 (thanks :user:`sjones94549`!) 47 | - linux-aarch64 wheels :PR:`183` (thanks :user:`odidev`!) 48 | - black (:PR:`218`) and flake8 (:PR:`145`) linting 49 | 50 | 0.9.3: 2019-12-10 51 | ================= 52 | 53 | - find_library and libspatialindex library loading :PR:`131` 54 | 55 | 0.9.2: 2019-12-09 56 | ================= 57 | 58 | - Refactored tests to be based on unittest :PR:`129` 59 | - Update libspatialindex library loading code to adapt previous behavior :PR:`128` 60 | - Empty data streams throw exceptions and do not partially construct indexes :PR:`127` 61 | 62 | 0.9.0: 2019-11-24 63 | ================= 64 | 65 | - Add Index.GetResultSetOffset() 66 | - Add Index.contains() method for object and id (requires libspatialindex 1.9.3+) :PR:`116` 67 | - Add Index.Flush() :PR:`107` 68 | - Add TPRTree index support (thanks :user:`sdhiscocks` :PR:`117`) 69 | - Return container sizes without returning objects :PR:`90` 70 | - Add set_result_limit and set_result_offset for Index paging :commit:`44ad21aecd3f7b49314b9be12f3334d8bae7e827` 71 | 72 | Bug fixes: 73 | 74 | - Better exceptions in cases where stream functions throw :PR:`80` 75 | - Migrated CI platform to Azure Pipelines https://dev.azure.com/hobuinc/rtree/_build?definitionId=5 76 | - Minor test enhancements and fixups. Both libspatialindex 1.8.5 and libspatialindex 1.9.3 are tested with CI 77 | 78 | 79 | 0.8: 2014-07-17 80 | =============== 81 | 82 | - Support for Python 3 added. 83 | 84 | 0.7.0: 2011-12-29 85 | ================= 86 | 87 | - 0.7.0 relies on libspatialindex 1.7.1+. 88 | - int64_t's should be used for IDs instead of uint64_t (requires libspatialindex 1.7.1 C API changes) 89 | - Fix __version__ 90 | - More documentation at http://toblerity.github.com/rtree/ 91 | - Class documentation at http://toblerity.github.com/rtree/class.html 92 | - Tweaks for PyPy compatibility. Still not compatible yet, however. 93 | - Custom storage support by Mattias (requires libspatialindex 1.7.1) 94 | 95 | 0.6.0: 2010-04-13 96 | ================= 97 | 98 | - 0.6.0 relies on libspatialindex 1.5.0+. 99 | - :py:meth:`~rtree.index.Index.intersection` and :py:meth:`~rtree.index.Index.nearest` methods return iterators over results instead of 100 | lists. 101 | - Number of results for :py:meth:`~rtree.index.Index.nearest` defaults to 1. 102 | - libsidx C library of 0.5.0 removed and included in libspatialindex 103 | - objects="raw" in :py:meth:`~rtree.index.Index.intersection` to return the object sent in (for speed). 104 | - :py:meth:`~rtree.index.Index.count` method to return the intersection count without the overhead 105 | of returning a list (thanks Leonard Norrgård). 106 | - Improved bulk loading performance 107 | - Supposedly no memory leaks :) 108 | - Many other performance tweaks (see docs). 109 | - Bulk loader supports interleaved coordinates 110 | - Leaf queries. You can return the box and ids of the leaf nodes of the index. 111 | Useful for visualization, etc. 112 | - Many more docstrings, sphinx docs, etc 113 | 114 | 115 | 0.5.0: 2009-08-06 116 | ================= 117 | 118 | 0.5.0 was a complete refactoring to use libsidx - a C API for libspatialindex. 119 | The code is now ctypes over libsidx, and a number of new features are now 120 | available as a result of this refactoring. 121 | 122 | * ability to store pickles within the index (clustered index) 123 | * ability to use custom extension names for disk-based indexes 124 | * ability to modify many index parameters at instantiation time 125 | * storage of point data reduced by a factor of 4 126 | * bulk loading of indexes at instantiation time 127 | * ability to quickly return the bounds of the entire index 128 | * ability to return the bounds of index entries 129 | * much better windows support 130 | * libspatialindex 1.4.0 required. 131 | 132 | 0.4.3: 2009-06-05 133 | ================= 134 | - Fix reference counting leak #181 135 | 136 | 0.4.2: 2009-05-25 137 | ================= 138 | - Windows support 139 | 140 | 0.4.1: 2008-03-24 141 | ================= 142 | 143 | - Eliminate uncounted references in add, delete, nearestNeighbor (#157). 144 | 145 | 0.4: 2008-01-24 146 | =============== 147 | 148 | - Testing improvements. 149 | - Switch dependency to the single consolidated spatialindex library (1.3). 150 | 151 | 0.3: 26 November 2007 152 | ===================== 153 | - Change to Python long integer identifiers (#126). 154 | - Allow deletion of objects from indexes. 155 | - Reraise index query errors as Python exceptions. 156 | - Improved persistence. 157 | 158 | 0.2: 19 May 2007 159 | ================ 160 | - Link spatialindex system library. 161 | 162 | 0.1: 13 April 2007 163 | ================== 164 | - Add disk storage option for indexes (#320). 165 | - Change license to LGPL. 166 | - Moved from Pleiades to GIS-Python repo. 167 | - Initial release. 168 | -------------------------------------------------------------------------------- /CREDITS.txt: -------------------------------------------------------------------------------- 1 | 2 | Sean Gillies 3 | 4 | * Initial effort and basic API design based on QGIS' usage of libspatialindex C++ APIs 5 | 6 | Howard Butler 7 | 8 | * libspatialindex C API 9 | * rewrite to use ctypes and libspatialindex C API 10 | * Streaming/bulk loading support 11 | * Disk serialization of indexes 12 | * Pickle serialization and clustered index support 13 | * .count() and .intersection() methods 14 | * Windows support 15 | * Node fetching 16 | * Index property access 17 | 18 | Brent Pedersen 19 | 20 | * Pickle protocol support 21 | * Documentation, doctests 22 | * Variable coordinate ordering 23 | * Testing 24 | 25 | Matthias 26 | 27 | * Custom storage API (both Rtree and libspatialindex) 28 | 29 | Adam Stewart 30 | 31 | * intersection/union support 32 | * __len__ method 33 | 34 | Mike Taves 35 | 36 | * cibuildwheel configuration 37 | * general maintenance 38 | -------------------------------------------------------------------------------- /DEPENDENCIES.txt: -------------------------------------------------------------------------------- 1 | - python 3.9+ 2 | - setuptools 3 | - libspatialindex C library 1.8.5+: 4 | https://libspatialindex.org/ 5 | -------------------------------------------------------------------------------- /FAQ.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/rtree/76656fd252d0c85dbef87bbd940160c40a3f114c/FAQ.txt -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2018: Sean C. Gillies, Howard Butler and contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 17 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 18 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, 19 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 20 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE 21 | OR OTHER DEALINGS IN THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.md *.rst *.txt 2 | include MANIFEST.in 3 | recursive-include benchmarks * 4 | recursive-include tests * 5 | recursive-include docs * 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Rtree: Spatial indexing for Python 2 | 3 | [![Test](https://github.com/Toblerity/rtree/actions/workflows/test.yml/badge.svg?branch=main)](https://github.com/Toblerity/rtree/actions/workflows/test.yml) 4 | [![PyPI version](https://badge.fury.io/py/rtree.svg)](https://badge.fury.io/py/rtree) 5 | 6 | 7 | Rtree is a [ctypes](https://docs.python.org/3/library/ctypes.html) Python wrapper of [libspatialindex](https://libspatialindex.org/) that provides a 8 | number of advanced spatial indexing features for the spatially curious Python 9 | user. These features include: 10 | 11 | * Nearest neighbor search 12 | * Intersection search 13 | * Multi-dimensional indexes 14 | * Clustered indexes (store Python pickles directly with index entries) 15 | * Bulk loading 16 | * Deletion 17 | * Disk serialization 18 | * Custom storage implementation (to implement spatial indexing in ZODB, for example) 19 | 20 | 21 | Wheels are available for most major platforms, and `rtree` with bundled `libspatialindex` can be installed via pip: 22 | 23 | ``` 24 | pip install rtree 25 | ``` 26 | 27 | See [changes](https://rtree.readthedocs.io/en/latest/changes.html) for all versions. 28 | -------------------------------------------------------------------------------- /benchmarks/benchmarks.py: -------------------------------------------------------------------------------- 1 | # hobu's latest results on his 2006-era machine 2 | 3 | # Stream load: 4 | # 293710.04 usec/pass 5 | # 6 | # One-at-a-time load: 7 | # 527883.95 usec/pass 8 | # 9 | # 30000 points 10 | # Query box: (1240000, 1010000, 1400000, 1390000) 11 | # 12 | # Brute Force: 13 | # 46 hits 14 | # 13533.60 usec/pass 15 | # 16 | # Memory-based Rtree Intersection: 17 | # 46 hits 18 | # 7516.19 usec/pass 19 | # 20 | # Disk-based Rtree Intersection: 21 | # 46 hits 22 | # 7543.00 usec/pass 23 | # 24 | # Disk-based Rtree Intersection without Item() wrapper (objects='raw'): 25 | # 46 raw hits 26 | # 347.60 usec/pass 27 | 28 | import random 29 | import timeit 30 | from pathlib import Path 31 | 32 | import rtree 33 | from rtree import Rtree as _Rtree 34 | 35 | print(f"Benchmarking Rtree-{rtree.__version__} from {Path(rtree.__file__).parent}") 36 | print(f"Using {rtree.core.rt._name} version {rtree.core.rt.SIDX_Version().decode()}") 37 | print() 38 | 39 | TEST_TIMES = 20 40 | 41 | 42 | class Point: 43 | """A very basic Geometry.""" 44 | 45 | def __init__(self, x, y): 46 | self.x = x 47 | self.y = y 48 | 49 | 50 | class Rtree(_Rtree): 51 | pickle_protocol = -1 52 | 53 | 54 | # Scatter points randomly in a 1x1 box 55 | bounds = (0, 0, 6000000, 6000000) 56 | count = 30000 57 | points = [] 58 | 59 | insert_object = None 60 | insert_object = { 61 | "a": list(range(100)), 62 | "b": 10, 63 | "c": object(), 64 | "d": dict(x=1), 65 | "e": Point(2, 3), 66 | } 67 | 68 | index = Rtree() 69 | disk_index = Rtree("test", overwrite=1) 70 | 71 | coordinates = [] 72 | random.seed("Rtree", version=2) 73 | for i in range(count): 74 | x = random.randrange(bounds[0], bounds[2]) + random.random() 75 | y = random.randrange(bounds[1], bounds[3]) + random.random() 76 | point = Point(x, y) 77 | points.append(point) 78 | 79 | index.add(i, (x, y), insert_object) 80 | disk_index.add(i, (x, y), insert_object) 81 | coordinates.append((i, (x, y, x, y), insert_object)) 82 | 83 | s = """ 84 | bulk = Rtree(coordinates[:2000]) 85 | """ 86 | t = timeit.Timer(stmt=s, setup="from __main__ import coordinates, Rtree, insert_object") 87 | print("Stream load:") 88 | print(f"{1e6 * t.timeit(number=TEST_TIMES) / TEST_TIMES:.2f} usec/pass") 89 | print() 90 | 91 | s = """ 92 | idx = Rtree() 93 | i = 0 94 | for point in points[:2000]: 95 | idx.add(i, (point.x, point.y), insert_object) 96 | i+=1 97 | """ 98 | t = timeit.Timer(stmt=s, setup="from __main__ import points, Rtree, insert_object") 99 | print("One-at-a-time load:") 100 | print(f"{1e6 * t.timeit(number=TEST_TIMES) / TEST_TIMES:.2f} usec/pass") 101 | print() 102 | 103 | bbox = (1240000, 1010000, 1400000, 1390000) 104 | print(count, "points") 105 | print("Query box: ", bbox) 106 | print() 107 | 108 | # Brute force all points within a 0.1x0.1 box 109 | s = """ 110 | hits = [p for p in points 111 | if p.x >= bbox[0] and p.x <= bbox[2] 112 | and p.y >= bbox[1] and p.y <= bbox[3]] 113 | """ 114 | t = timeit.Timer(stmt=s, setup="from __main__ import points, bbox") 115 | print("Brute Force:") 116 | print( 117 | len( 118 | [ 119 | p 120 | for p in points 121 | if p.x >= bbox[0] and p.x <= bbox[2] and p.y >= bbox[1] and p.y <= bbox[3] 122 | ] 123 | ), 124 | "hits", 125 | ) 126 | print(f"{1e6 * t.timeit(number=TEST_TIMES) / TEST_TIMES:.2f} usec/pass") 127 | print() 128 | 129 | # 0.1x0.1 box using intersection 130 | 131 | if insert_object is None: 132 | s = """ 133 | hits = [points[id] for id in index.intersection(bbox)] 134 | """ 135 | else: 136 | s = """ 137 | hits = [p.object for p in index.intersection(bbox, objects=insert_object)] 138 | """ 139 | 140 | t = timeit.Timer( 141 | stmt=s, setup="from __main__ import points, index, bbox, insert_object" 142 | ) 143 | print("Memory-based Rtree Intersection:") 144 | print(len([points[id] for id in index.intersection(bbox)]), "hits") 145 | print(f"{1e6 * t.timeit(number=100) / 100:.2f} usec/pass") 146 | print() 147 | 148 | # run same test on disk_index. 149 | s = s.replace("index.", "disk_index.") 150 | 151 | t = timeit.Timer( 152 | stmt=s, setup="from __main__ import points, disk_index, bbox, insert_object" 153 | ) 154 | print("Disk-based Rtree Intersection:") 155 | hits = list(disk_index.intersection(bbox)) 156 | print(len(hits), "hits") 157 | print(f"{1e6 * t.timeit(number=TEST_TIMES) / TEST_TIMES:.2f} usec/pass") 158 | print() 159 | 160 | if insert_object: 161 | s = """ 162 | hits = disk_index.intersection(bbox, objects="raw") 163 | """ 164 | t = timeit.Timer( 165 | stmt=s, setup="from __main__ import points, disk_index, bbox, insert_object" 166 | ) 167 | print("Disk-based Rtree Intersection without Item() wrapper (objects='raw'):") 168 | result = list(disk_index.intersection(bbox, objects="raw")) 169 | print(len(result), "raw hits") 170 | print(f"{1e6 * t.timeit(number=TEST_TIMES) / TEST_TIMES:.2f} usec/pass") 171 | assert "a" in result[0], result[0] # type: ignore 172 | 173 | Path("test.dat").unlink() 174 | Path("test.idx").unlink() 175 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | 9 | # Internal variables. 10 | PAPEROPT_a4 = -D latex_paper_size=a4 11 | PAPEROPT_letter = -D latex_paper_size=letter 12 | ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 13 | 14 | .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest 15 | 16 | help: 17 | @echo "Please use \`make ' where is one of" 18 | @echo " html to make standalone HTML files" 19 | @echo " dirhtml to make HTML files named index.html in directories" 20 | @echo " pickle to make pickle files" 21 | @echo " json to make JSON files" 22 | @echo " htmlhelp to make HTML files and a HTML help project" 23 | @echo " qthelp to make HTML files and a qthelp project" 24 | @echo " devhelp to make HTML files and a Devhelp project" 25 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 26 | @echo " latex_paper_size to make LaTeX files and run them through pdflatex" 27 | @echo " changes to make an overview of all changed/added/deprecated items" 28 | @echo " linkcheck to check all external links for integrity" 29 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 30 | 31 | clean: 32 | -rm -rf build/* 33 | 34 | html: 35 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html 36 | @echo 37 | @echo "Build finished. The HTML pages are in build/html." 38 | 39 | dirhtml: 40 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) build/dirhtml 41 | @echo 42 | @echo "Build finished. The HTML pages are in build/dirhtml." 43 | 44 | pickle: 45 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle 46 | @echo 47 | @echo "Build finished; now you can process the pickle files." 48 | 49 | json: 50 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) build/json 51 | @echo 52 | @echo "Build finished; now you can process the JSON files." 53 | 54 | htmlhelp: 55 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp 56 | @echo 57 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 58 | ".hhp project file in build/htmlhelp." 59 | 60 | qthelp: 61 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) build/qthelp 62 | @echo 63 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 64 | ".qhcp project file in build/qthelp, like this:" 65 | @echo "# qcollectiongenerator build/qthelp/Rtree.qhcp" 66 | @echo "To view the help file:" 67 | @echo "# assistant -collectionFile build/qthelp/Rtree.qhc" 68 | 69 | devhelp: 70 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) build/devhelp 71 | @echo 72 | @echo "Build finished." 73 | @echo "To view the help file:" 74 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Rtree" 75 | @echo "# ln -s build/devhelp $$HOME/.local/share/devhelp/Rtree" 76 | @echo "# devhelp" 77 | 78 | latex: 79 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex 80 | @echo 81 | @echo "Build finished; the LaTeX files are in build/latex." 82 | @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ 83 | "run these through (pdf)latex." 84 | 85 | latexpdf: latex 86 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex 87 | @echo "Running LaTeX files through pdflatex..." 88 | make -C build/latex all-pdf 89 | @echo "pdflatex finished; the PDF files are in build/latex." 90 | 91 | changes: 92 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes 93 | @echo 94 | @echo "The overview file is in build/changes." 95 | 96 | linkcheck: 97 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck 98 | @echo 99 | @echo "Link check complete; look for any errors in the above output " \ 100 | "or in build/linkcheck/output.txt." 101 | 102 | doctest: 103 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) build/doctest 104 | @echo "Testing of doctests in the sources finished, look at the " \ 105 | "results in build/doctest/output.txt." 106 | 107 | pdf: 108 | $(SPHINXBUILD) -b pdf $(ALLSPHINXOPTS) build/pdf 109 | @echo 110 | @echo "Build finished; now you can process the PDF files." 111 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx>=4 2 | sphinx-issues 3 | -------------------------------------------------------------------------------- /docs/source/changes.rst: -------------------------------------------------------------------------------- 1 | .. _changes: 2 | 3 | Changes 4 | .............................................................................. 5 | 6 | .. include:: ../../CHANGES.rst 7 | -------------------------------------------------------------------------------- /docs/source/class.rst: -------------------------------------------------------------------------------- 1 | .. _class: 2 | 3 | Class Documentation 4 | ------------------------------------------------------------------------------ 5 | 6 | .. autoclass:: rtree.index.Index 7 | :members: __init__, insert, intersection, intersection_v, nearest, nearest_v, delete, bounds, count, close, dumps, loads 8 | 9 | .. autoclass:: rtree.index.Property 10 | :members: 11 | 12 | .. autoclass:: rtree.index.Item 13 | :members: __init__, bbox, object 14 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | import sys 6 | 7 | sys.path.append("../../") 8 | 9 | import rtree # noqa: E402 10 | 11 | # -- Project information ----------------------------------------------------- 12 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 13 | 14 | project = "Rtree" 15 | copyright = "2019, Sean Gilles, Howard Butler, and contributors" 16 | author = "Sean Gilles, Howard Butler, and contributors" 17 | version = release = rtree.__version__ 18 | 19 | # -- General configuration --------------------------------------------------- 20 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 21 | 22 | extensions = [ 23 | "sphinx.ext.autodoc", 24 | "sphinx.ext.doctest", 25 | "sphinx.ext.intersphinx", 26 | "sphinx.ext.todo", 27 | "sphinx.ext.coverage", 28 | "sphinx.ext.ifconfig", 29 | "sphinx_issues", 30 | ] 31 | 32 | templates_path = ["_templates"] 33 | exclude_patterns = [] 34 | 35 | # The name of the Pygments (syntax highlighting) style to use. 36 | pygments_style = "sphinx" 37 | 38 | # -- Options for HTML output ------------------------------------------------- 39 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 40 | 41 | 42 | html_theme = "nature" 43 | htmlhelp_basename = "Rtreedoc" 44 | 45 | # -- Options for LaTeX output -------------------------------------------- 46 | 47 | # Grouping the document tree into LaTeX files. List of tuples 48 | # (source start file, target name, title, author, documentclass [howto/manual]). 49 | latex_documents = [("index", "Rtree.tex", "Rtree Documentation", author, "manual")] 50 | 51 | pdf_documents = [("index", "Rtree", "Rtree Documentation", "The Rtree Team")] 52 | 53 | pdf_language = "en_US" 54 | pdf_fit_mode = "overflow" 55 | 56 | # -- Extension configuration ------------------------------------------------- 57 | 58 | intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} 59 | 60 | # sphinx.ext.autodoc 61 | autodoc_typehints = "description" 62 | autodoc_typehints_description_target = "documented" 63 | 64 | # sphinx-issues 65 | issues_github_path = "Toblerity/rtree" 66 | issues_commit_prefix = "" 67 | -------------------------------------------------------------------------------- /docs/source/history.rst: -------------------------------------------------------------------------------- 1 | .. _history: 2 | 3 | History of Rtree 4 | ------------------------------------------------------------------------------ 5 | 6 | `Rtree`_ was started by `Sean Gillies`_ as a port of the `libspatialindex`_ 7 | linkages that `QGIS`_ maintained to provide on-the-fly indexing support for 8 | GUI operations. A notable feature of `R-trees`_ is the ability to insert data 9 | into the structure without the need for a global partitioning bounds, and this 10 | drove Sean's adoption of this code. `Howard Butler`_ later picked up `Rtree`_ 11 | and added a number of features that `libspatialindex`_ provided including disk 12 | serialization and bulk loading by writing a C API for `libspatialindex`_ and 13 | re-writing `Rtree`_ as a `ctypes`_ wrapper to utilize this C API. `Brent 14 | Pedersen`_ came along and added features to support alternative coordinate 15 | ordering, augmentation of the pickle storage, and lots of documentation. 16 | Mattias (http://dr-code.org) added support for custom storage backends to 17 | support using `Rtree`_ as an indexing type in `ZODB`_. 18 | 19 | `Rtree`_ has gone through a number of iterations, and at 20 | 0.5.0, it was completely refactored to use a new internal architecture (ctypes 21 | + a C API over `libspatialindex`_). This refactoring has resulted in a number 22 | of new features and much more flexibility. See :ref:`changes` for more detail. 23 | 24 | .. note:: 25 | A significant bug in the 1.6.1+ `libspatialindex`_ C API was found where 26 | it was using unsigned integers for index entry IDs instead of signed 27 | integers. Because `Rtree`_ appeared to be the only significant user of the 28 | C API at this time, it was corrected immediately. You should update 29 | immediately and re-insert data into new indexes if this is an important 30 | consideration for your application. 31 | 32 | Rtree 0.5.0 included a C library that is now the C API for libspatialindex and 33 | is part of that source tree. The code bases are independent from each other 34 | and can now evolve separately. Rtree is pure Python as of 0.6.0+. 35 | 36 | 37 | .. _`Sean Gillies`: https://sgillies.net 38 | .. _`Howard Butler`: https://hobu.co 39 | .. _`Brent Pedersen`: https://github.com/brentp 40 | .. _`QGIS`: https://qgis.org 41 | 42 | 43 | .. _`ZODB`: https://zodb.org 44 | .. _`R-trees`: https://en.wikipedia.org/wiki/R-tree 45 | .. _`ctypes`: https://docs.python.org/3/library/ctypes.html 46 | .. _`libspatialindex`: https://libspatialindex.org 47 | .. _`Rtree`: https://rtree.readthedocs.io 48 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. _home: 2 | 3 | Rtree: Spatial indexing for Python 4 | ------------------------------------------------------------------------------ 5 | 6 | `Rtree`_ is a `ctypes`_ Python wrapper of `libspatialindex`_ that provides a 7 | number of advanced spatial indexing features for the spatially curious Python 8 | user. These features include: 9 | 10 | * Nearest neighbor search 11 | * Intersection search 12 | * Multi-dimensional indexes 13 | * Clustered indexes (store Python pickles directly with index entries) 14 | * Bulk loading 15 | * Deletion 16 | * Disk serialization 17 | * Custom storage implementation (to implement spatial indexing in ZODB, for example) 18 | 19 | Documentation 20 | .............................................................................. 21 | 22 | .. toctree:: 23 | :maxdepth: 2 24 | 25 | install 26 | tutorial 27 | class 28 | misc 29 | changes 30 | performance 31 | history 32 | 33 | * :ref:`genindex` 34 | * :ref:`modindex` 35 | * :ref:`search` 36 | 37 | .. _`R-trees`: https://en.wikipedia.org/wiki/R-tree 38 | .. _`ctypes`: https://docs.python.org/3/library/ctypes.html 39 | .. _`libspatialindex`: https://libspatialindex.org 40 | .. _`Rtree`: https://rtree.readthedocs.io 41 | -------------------------------------------------------------------------------- /docs/source/install.rst: -------------------------------------------------------------------------------- 1 | .. _installation: 2 | 3 | Installation 4 | ------------------------------------------------------------------------------ 5 | 6 | \*nix 7 | .............................................................................. 8 | 9 | First, download and install version 1.8.5+ of the `libspatialindex`_ library from: 10 | 11 | https://libspatialindex.org 12 | 13 | The library supports CMake builds, so it is a matter of: 14 | 15 | .. code-block:: console 16 | 17 | $ mkdir build && cd build 18 | $ cmake .. 19 | $ cmake --build . -j 20 | $ cmake --install . 21 | 22 | You may need to run the ``ldconfig`` command after installing the library to 23 | ensure that applications can find it at startup time. 24 | 25 | Rtree can be easily installed via pip: 26 | 27 | .. code-block:: console 28 | 29 | $ pip install rtree 30 | 31 | or by running in a local source directory: 32 | 33 | .. code-block:: console 34 | 35 | $ pip install -e . 36 | 37 | You can build and test in place like: 38 | 39 | .. code-block:: console 40 | 41 | $ pytest 42 | 43 | Windows 44 | .............................................................................. 45 | 46 | The Windows DLLs of `libspatialindex`_ are pre-compiled in 47 | windows installers that are available from `PyPI`_. Installation on Windows 48 | is as easy as: 49 | 50 | .. code-block:: console 51 | 52 | $ pip install rtree 53 | 54 | 55 | .. _`PyPI`: https://pypi.org/project/rtree/ 56 | .. _`libspatialindex`: https://libspatialindex.org 57 | -------------------------------------------------------------------------------- /docs/source/misc.rst: -------------------------------------------------------------------------------- 1 | .. _misc: 2 | 3 | Miscellaneous Documentation 4 | ------------------------------------------------------------------------------ 5 | 6 | Exceptions 7 | ========== 8 | 9 | .. autoexception:: rtree.exceptions.RTreeError 10 | :members: 11 | 12 | Finder module 13 | ============= 14 | 15 | .. automodule:: rtree.finder 16 | :members: 17 | -------------------------------------------------------------------------------- /docs/source/performance.rst: -------------------------------------------------------------------------------- 1 | .. _performance: 2 | 3 | Performance 4 | ------------------------------------------------------------------------------ 5 | 6 | See the `benchmarks.py`_ file for a comparison of various query methods 7 | and how much acceleration can be obtained from using Rtree. 8 | 9 | .. _benchmarks.py: https://github.com/Toblerity/rtree/blob/main/benchmarks/benchmarks.py 10 | 11 | There are a few simple things that will improve performance. 12 | 13 | Use stream loading 14 | .............................................................................. 15 | 16 | This will substantially (orders of magnitude in many cases) improve 17 | performance over :py:meth:`~rtree.index.Index.insert` by allowing the data to 18 | be pre-sorted 19 | 20 | .. code-block:: pycon 21 | 22 | >>> from rtree import index 23 | >>> def generator_function(somedata): 24 | ... for i, obj in enumerate(somedata): 25 | ... yield (i, (obj.xmin, obj.ymin, obj.xmax, obj.ymax), obj) 26 | ... 27 | >>> r = index.Index(generator_function(somedata)) # doctest: +SKIP 28 | 29 | After bulk loading the index, you can then insert additional records into 30 | the index using :py:meth:`~rtree.index.Index.insert` 31 | 32 | Override :py:data:`~rtree.index.Index.dumps` to use the highest pickle protocol 33 | ............................................................................... 34 | 35 | .. code-block:: pycon 36 | 37 | >>> import pickle 38 | >>> import rtree 39 | >>> class FastRtree(rtree.Rtree): 40 | ... def dumps(self, obj): 41 | ... return pickle.dumps(obj, -1) 42 | ... 43 | >>> r = FastRtree() 44 | 45 | .. topic:: Update from January 2024 46 | 47 | Pickling is currently broken and awaiting a pull request to fix it. 48 | For more information, see the `pull request on GitHub`_. 49 | 50 | .. _pull request on GitHub: https://github.com/Toblerity/rtree/pull/197 51 | 52 | Use objects="raw" 53 | ............................................................................... 54 | 55 | In any :py:meth:`~rtree.index.Index.intersection` or 56 | :py:meth:`~rtree.index.Index.nearest` or query, use ``objects="raw"`` keyword 57 | argument: 58 | 59 | .. code-block:: pycon 60 | 61 | >>> xmin, ymin, xmax, ymax = 0.0, 0.0, 1.0, 1.0 62 | >>> objs = r.intersection((xmin, ymin, xmax, ymax), objects="raw") 63 | 64 | 65 | Adjust index properties 66 | ............................................................................... 67 | 68 | Adjust :py:class:`rtree.index.Property` appropriate to your index. 69 | 70 | * Set your :py:data:`~rtree.index.Property.leaf_capacity` to a higher value 71 | than the default 100. 1000+ is fine for the default pagesize of 4096 in 72 | many cases. 73 | 74 | * Increase the :py:data:`~rtree.index.Property.fill_factor` to something 75 | near 0.9. Smaller fill factors mean more splitting, which means more 76 | nodes. This may be bad or good depending on your usage. 77 | 78 | Limit dimensionality to the amount you need 79 | ............................................................................... 80 | 81 | Don't use more dimensions than you actually need. If you only need 2, only use 82 | two. Otherwise, you will waste lots of storage and add that many more floating 83 | point comparisons for each query, search, and insert operation of the index. 84 | 85 | Use the correct query method 86 | ............................................................................... 87 | 88 | Use :py:meth:`~rtree.index.Index.count` if you only need a count and 89 | :py:meth:`~rtree.index.Index.intersection` if you only need the ids. 90 | Otherwise, lots of data may potentially be copied. If possible also 91 | make use of the bulk query methods suffixed with `_v`. 92 | -------------------------------------------------------------------------------- /docs/source/tutorial.rst: -------------------------------------------------------------------------------- 1 | .. _tutorial: 2 | 3 | Tutorial 4 | ------------------------------------------------------------------------------ 5 | 6 | This tutorial demonstrates how to take advantage of :ref:`Rtree ` for 7 | querying data that have a spatial component that can be modeled as bounding 8 | boxes. 9 | 10 | 11 | Creating an index 12 | .............................................................................. 13 | 14 | The following section describes the basic instantiation and usage of 15 | :ref:`Rtree `. 16 | 17 | Import 18 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 19 | 20 | After :ref:`installing ` :ref:`Rtree `, you should be able to 21 | open up a Python prompt and issue the following: 22 | 23 | .. code-block:: pycon 24 | 25 | >>> from rtree import index 26 | 27 | :py:mod:`rtree` is organized as a Python package with a couple of modules 28 | and two major classes - :py:class:`rtree.index.Index` and 29 | :py:class:`rtree.index.Property`. Users manipulate these classes to interact 30 | with the index. 31 | 32 | Construct an instance 33 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 34 | 35 | After importing the index module, construct an index with the default 36 | construction: 37 | 38 | .. code-block:: pycon 39 | 40 | >>> idx = index.Index() 41 | 42 | .. note:: 43 | 44 | While the default construction is useful in many cases, if you want to 45 | manipulate how the index is constructed you will need pass in a 46 | :py:class:`rtree.index.Property` instance when creating the index. 47 | 48 | Create a bounding box 49 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 50 | 51 | After instantiating the index, create a bounding box that we can 52 | insert into the index: 53 | 54 | .. code-block:: pycon 55 | 56 | >>> left, bottom, right, top = (0.0, 0.0, 1.0, 1.0) 57 | 58 | .. note:: 59 | 60 | The coordinate ordering for all functions are sensitive the the index's 61 | :py:attr:`~rtree.index.Index.interleaved` data member. If 62 | :py:attr:`~rtree.index.Index.interleaved` is False, the coordinates must 63 | be in the form [xmin, xmax, ymin, ymax, ..., ..., kmin, kmax]. If 64 | :py:attr:`~rtree.index.Index.interleaved` is True, the coordinates must be 65 | in the form [xmin, ymin, ..., kmin, xmax, ymax, ..., kmax]. 66 | 67 | Insert records into the index 68 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 69 | 70 | Insert an entry into the index: 71 | 72 | .. code-block:: pycon 73 | 74 | >>> idx.insert(0, (left, bottom, right, top)) 75 | 76 | .. note:: 77 | 78 | Entries that are inserted into the index are not unique in either the 79 | sense of the `id` or of the bounding box that is inserted with index 80 | entries. If you need to maintain uniqueness, you need to manage that before 81 | inserting entries into the Rtree. 82 | 83 | .. note:: 84 | 85 | Inserting a point, i.e. where left == right && top == bottom, will 86 | essentially insert a single point entry into the index instead of copying 87 | extra coordinates and inserting them. There is no shortcut to explicitly 88 | insert a single point, however. 89 | 90 | Query the index 91 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 92 | 93 | There are three primary methods for querying the index. 94 | :py:meth:`rtree.index.Index.intersection` will return you index entries that 95 | *cross* or are *contained* within the given query window. 96 | :py:meth:`rtree.index.Index.intersection` 97 | 98 | Intersection 99 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 100 | 101 | Given a query window, return ids that are contained within the window: 102 | 103 | .. code-block:: pycon 104 | 105 | >>> list(idx.intersection((1.0, 1.0, 2.0, 2.0))) 106 | [0] 107 | 108 | Given a query window that is beyond the bounds of data we have in the 109 | index: 110 | 111 | .. code-block:: pycon 112 | 113 | >>> list(idx.intersection((1.0000001, 1.0000001, 2.0, 2.0))) 114 | [] 115 | 116 | Nearest Neighbors 117 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 118 | 119 | The following finds the 1 nearest item to the given bounds. If multiple items 120 | are of equal distance to the bounds, both are returned: 121 | 122 | .. code-block:: pycon 123 | 124 | >>> idx.insert(1, (left, bottom, right, top)) 125 | >>> list(idx.nearest((1.0000001, 1.0000001, 2.0, 2.0), 1)) 126 | [0, 1] 127 | 128 | 129 | .. _clustered: 130 | 131 | Using Rtree as a cheapo spatial database 132 | .............................................................................. 133 | 134 | Rtree also supports inserting any object you can pickle into the index (called 135 | a clustered index in `libspatialindex`_ parlance). The following inserts the 136 | picklable object ``42`` into the index with the given id ``2``: 137 | 138 | .. code-block:: pycon 139 | 140 | >>> idx.insert(id=2, coordinates=(left, bottom, right, top), obj=42) 141 | 142 | You can then return a list of objects by giving the ``objects=True`` flag 143 | to intersection: 144 | 145 | .. code-block:: pycon 146 | 147 | >>> [n.object for n in idx.intersection((left, bottom, right, top), objects=True)] 148 | [None, None, 42] 149 | 150 | .. warning:: 151 | `libspatialindex`_'s clustered indexes were not designed to be a database. 152 | You get none of the data integrity protections that a database would 153 | purport to offer, but this behavior of :ref:`Rtree ` can be useful 154 | nonetheless. Consider yourself warned. Now go do cool things with it. 155 | 156 | Serializing your index to a file 157 | .............................................................................. 158 | 159 | One of :ref:`Rtree `'s most useful properties is the ability to 160 | serialize Rtree indexes to disk. These include the clustered indexes 161 | described :ref:`here `: 162 | 163 | .. code-block:: pycon 164 | 165 | >>> import os 166 | >>> from tempfile import TemporaryDirectory 167 | >>> prev_dir = os.getcwd() 168 | >>> temp_dir = TemporaryDirectory() 169 | >>> os.chdir(temp_dir.name) 170 | >>> file_idx = index.Rtree("myidx") 171 | >>> file_idx.insert(1, (left, bottom, right, top)) 172 | >>> file_idx.insert(2, (left - 1.0, bottom - 1.0, right + 1.0, top + 1.0)) 173 | >>> [n for n in file_idx.intersection((left, bottom, right, top))] 174 | [1, 2] 175 | >>> sorted(os.listdir()) 176 | ['myidx.dat', 'myidx.idx'] 177 | >>> os.chdir(prev_dir) 178 | >>> temp_dir.cleanup() 179 | 180 | .. note:: 181 | 182 | By default, if an index file with the given name ``myidx`` in the example 183 | above already exists on the file system, it will be opened in append mode 184 | and not be re-created. You can control this behavior with the 185 | :py:attr:`rtree.index.Property.overwrite` property of the index property 186 | that can be given to the :py:class:`rtree.index.Index` constructor. 187 | 188 | .. seealso:: 189 | 190 | :ref:`performance` describes some parameters you can tune to make 191 | file-based indexes run a bit faster. The choices you make for the 192 | parameters is entirely dependent on your usage. 193 | 194 | Modifying file names 195 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 196 | 197 | Rtree uses the extensions `dat` and `idx` by default for the two index files 198 | that are created when serializing index data to disk. These file extensions 199 | are controllable using the :py:attr:`rtree.index.Property.dat_extension` and 200 | :py:attr:`rtree.index.Property.idx_extension` index properties. 201 | 202 | .. code-block:: pycon 203 | 204 | >>> p = index.Property() 205 | >>> p.dat_extension = "data" 206 | >>> p.idx_extension = "index" 207 | >>> file_idx = index.Index("rtree", properties=p) # doctest: +SKIP 208 | 209 | 3D indexes 210 | .............................................................................. 211 | 212 | As of Rtree version 0.5.0, you can create 3D (actually kD) indexes. The 213 | following is a 3D index that is to be stored on disk. Persisted indexes are 214 | stored on disk using two files -- an index file (.idx) and a data (.dat) file. 215 | You can modify the extensions these files use by altering the properties of 216 | the index at instantiation time. The following creates a 3D index that is 217 | stored on disk as the files ``3d_index.data`` and ``3d_index.index``: 218 | 219 | .. code-block:: pycon 220 | 221 | >>> from rtree import index 222 | >>> temp_dir = TemporaryDirectory() 223 | >>> os.chdir(temp_dir.name) 224 | >>> p = index.Property() 225 | >>> p.dimension = 3 226 | >>> p.dat_extension = "data" 227 | >>> p.idx_extension = "index" 228 | >>> idx3d = index.Index("3d_index", properties=p) 229 | >>> idx3d.insert(1, (0, 60, 23.0, 0, 60, 42.0)) 230 | >>> list(idx3d.intersection((-1, 60, 22, 1, 62, 43))) 231 | [1] 232 | >>> os.chdir(prev_dir) 233 | >>> temp_dir.cleanup() 234 | 235 | ZODB and Custom Storages 236 | .............................................................................. 237 | 238 | https://mail.zope.org/pipermail/zodb-dev/2010-June/013491.html contains a custom 239 | storage backend for `ZODB`_ and you can find example python code `here`_. Note 240 | that the code was written in 2011, hasn't been updated and was only an alpha 241 | version. 242 | 243 | .. _`here`: https://github.com/Toblerity/zope.index.rtree 244 | .. _`ZODB`: https://zodb.org 245 | .. _`libspatialindex`: https://libspatialindex.org 246 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: _rtree 2 | channels: 3 | - defaults 4 | - conda-forge 5 | dependencies: 6 | - python>=3.9 7 | - libspatialindex>=1.8.5 8 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "rtree" 7 | authors = [ 8 | {name = "Sean Gillies", email = "sean.gillies@gmail.com"}, 9 | ] 10 | maintainers = [ 11 | {name = "Howard Butler", email = "howard@hobu.co"}, 12 | {name = "Mike Taves", email = "mwtoews@gmail.com"}, 13 | ] 14 | description = "R-Tree spatial index for Python GIS" 15 | readme = "README.md" 16 | requires-python = ">=3.9" 17 | keywords = ["gis", "spatial", "index", "r-tree"] 18 | license = {text = "MIT"} 19 | classifiers = [ 20 | "Development Status :: 5 - Production/Stable", 21 | "Intended Audience :: Developers", 22 | "Intended Audience :: Science/Research", 23 | "License :: OSI Approved :: MIT License", 24 | "Operating System :: OS Independent", 25 | "Programming Language :: Python :: 3", 26 | "Programming Language :: Python :: 3.9", 27 | "Programming Language :: Python :: 3.10", 28 | "Programming Language :: Python :: 3.11", 29 | "Programming Language :: Python :: 3.12", 30 | "Programming Language :: Python :: 3.13", 31 | "Topic :: Scientific/Engineering :: GIS", 32 | "Topic :: Database", 33 | ] 34 | dynamic = ["version"] 35 | 36 | [project.urls] 37 | Documentation = "https://rtree.readthedocs.io" 38 | Repository = "https://github.com/Toblerity/rtree" 39 | 40 | [tool.setuptools] 41 | packages = ["rtree"] 42 | zip-safe = false 43 | include-package-data = false 44 | 45 | [tool.setuptools.dynamic] 46 | version = {attr = "rtree.__version__"} 47 | 48 | [tool.setuptools.package-data] 49 | rtree = ["py.typed"] 50 | 51 | [tool.cibuildwheel] 52 | build = "cp39-*" 53 | build-verbosity = 3 54 | before-all = "pip install wheel" 55 | repair-wheel-command = "python scripts/repair_wheel.py -w {dest_dir} {wheel}" 56 | test-requires = "tox" 57 | test-command = "tox --conf {project} --installpkg {wheel}" 58 | test-skip = [ 59 | "*-macosx_arm64", 60 | ] 61 | 62 | [tool.cibuildwheel.linux] 63 | archs = ["auto"] 64 | before-build = [ 65 | "yum install -y cmake libffi-devel", 66 | "sh {project}/scripts/install_libspatialindex.sh", 67 | ] 68 | 69 | [[tool.cibuildwheel.overrides]] 70 | select = "*-musllinux*" 71 | before-build = [ 72 | "apk add cmake libffi-dev", 73 | "sh {project}/scripts/install_libspatialindex.sh", 74 | ] 75 | 76 | [tool.cibuildwheel.macos] 77 | archs = ["x86_64", "arm64"] 78 | environment = { MACOSX_DEPLOYMENT_TARGET="10.9" } 79 | before-build = [ 80 | "brew install coreutils cmake", 81 | "sh {project}/scripts/install_libspatialindex.sh", 82 | ] 83 | 84 | [tool.cibuildwheel.windows] 85 | archs = ["AMD64"] 86 | before-build = [ 87 | "call {project}\\scripts\\install_libspatialindex.bat", 88 | ] 89 | 90 | [tool.coverage.report] 91 | # Ignore warnings for overloads 92 | # https://github.com/nedbat/coveragepy/issues/970#issuecomment-612602180 93 | exclude_lines = [ 94 | "pragma: no cover", 95 | "@overload", 96 | ] 97 | 98 | [tool.pytest.ini_options] 99 | minversion = "6.0" 100 | addopts = "--import-mode=importlib" 101 | testpaths = ["tests"] 102 | 103 | [tool.ruff.lint] 104 | select = [ 105 | "E", "W", # pycodestyle 106 | "F", # Pyflakes 107 | "UP", # pyupgrade 108 | "I", # isort 109 | "NPY", # NumPy-specific 110 | ] 111 | 112 | [tool.mypy] 113 | exclude = ["docs", "build"] 114 | ignore_missing_imports = true 115 | show_error_codes = true 116 | -------------------------------------------------------------------------------- /rtree/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | # rtree 3 | 4 | Rtree provides Python bindings to libspatialindex for quick 5 | hyperrectangular intersection queries. 6 | """ 7 | 8 | from __future__ import annotations 9 | 10 | __version__ = "1.4.0" 11 | 12 | from .index import Index, Rtree # noqa 13 | -------------------------------------------------------------------------------- /rtree/core.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import ctypes 4 | 5 | from . import finder 6 | from .exceptions import RTreeError 7 | 8 | 9 | def check_return(result, func, cargs): 10 | "Error checking for Error calls" 11 | if result != 0: 12 | s = rt.Error_GetLastErrorMsg().decode() 13 | msg = f'Error in "{func.__name__}": {s}' 14 | rt.Error_Reset() 15 | raise RTreeError(msg) 16 | return True 17 | 18 | 19 | def check_void(result, func, cargs): 20 | "Error checking for void* returns" 21 | if not bool(result): 22 | s = rt.Error_GetLastErrorMsg().decode() 23 | msg = f'Error in "{func.__name__}": {s}' 24 | rt.Error_Reset() 25 | raise RTreeError(msg) 26 | return result 27 | 28 | 29 | def check_void_done(result, func, cargs): 30 | "Error checking for void* returns that might be empty with no error" 31 | if rt.Error_GetErrorCount(): 32 | s = rt.Error_GetLastErrorMsg().decode() 33 | msg = f'Error in "{func.__name__}": {s}' 34 | rt.Error_Reset() 35 | raise RTreeError(msg) 36 | return result 37 | 38 | 39 | def check_value(result, func, cargs): 40 | "Error checking proper value returns" 41 | count = rt.Error_GetErrorCount() 42 | if count != 0: 43 | s = rt.Error_GetLastErrorMsg().decode() 44 | msg = f'Error in "{func.__name__}": {s}' 45 | rt.Error_Reset() 46 | raise RTreeError(msg) 47 | return result 48 | 49 | 50 | def check_value_free(result, func, cargs): 51 | "Error checking proper value returns" 52 | count = rt.Error_GetErrorCount() 53 | if count != 0: 54 | s = rt.Error_GetLastErrorMsg().decode() 55 | msg = f'Error in "{func.__name__}": {s}' 56 | rt.Error_Reset() 57 | raise RTreeError(msg) 58 | return result 59 | 60 | 61 | def free_returned_char_p(result, func, cargs): 62 | retvalue = ctypes.string_at(result) 63 | p = ctypes.cast(result, ctypes.POINTER(ctypes.c_void_p)) 64 | rt.Index_Free(p) 65 | return retvalue 66 | 67 | 68 | def free_error_msg_ptr(result, func, cargs): 69 | retvalue = ctypes.string_at(result) 70 | p = ctypes.cast(result, ctypes.POINTER(ctypes.c_void_p)) 71 | rt.Index_Free(p) 72 | return retvalue 73 | 74 | 75 | # load the shared library by looking in likely places 76 | rt = finder.load() 77 | 78 | rt.SIDX_Version.argtypes = [] 79 | rt.SIDX_Version.restype = ctypes.POINTER(ctypes.c_char) 80 | rt.SIDX_Version.errcheck = free_returned_char_p # type: ignore 81 | 82 | rt.Error_GetLastErrorNum.argtypes = [] 83 | rt.Error_GetLastErrorNum.restype = ctypes.c_int 84 | 85 | rt.Error_GetLastErrorMsg.argtypes = [] 86 | rt.Error_GetLastErrorMsg.restype = ctypes.POINTER(ctypes.c_char) 87 | rt.Error_GetLastErrorMsg.errcheck = free_error_msg_ptr # type: ignore 88 | 89 | rt.Error_GetLastErrorMethod.argtypes = [] 90 | rt.Error_GetLastErrorMethod.restype = ctypes.POINTER(ctypes.c_char) 91 | rt.Error_GetLastErrorMethod.errcheck = free_returned_char_p # type: ignore 92 | 93 | rt.Error_GetErrorCount.argtypes = [] 94 | rt.Error_GetErrorCount.restype = ctypes.c_int 95 | 96 | rt.Error_Reset.argtypes = [] 97 | rt.Error_Reset.restype = None 98 | 99 | rt.Index_Create.argtypes = [ctypes.c_void_p] 100 | rt.Index_Create.restype = ctypes.c_void_p 101 | rt.Index_Create.errcheck = check_void # type: ignore 102 | 103 | _nDataLength_size_t = True 104 | try: 105 | _major, _minor, _patch = ( 106 | int(part) for part in rt.SIDX_Version().decode("ascii").split(".") 107 | ) 108 | except (ValueError, UnicodeDecodeError): 109 | pass # weird version; assume latest ABI 110 | else: 111 | if (_major, _minor, _patch) < (1, 9, 0): 112 | # Headers had size_t*, but implementation had uint32_t* 113 | _nDataLength_size_t = False 114 | NEXTFUNC = ctypes.CFUNCTYPE( 115 | ctypes.c_int, 116 | ctypes.POINTER(ctypes.c_int64), 117 | ctypes.POINTER(ctypes.POINTER(ctypes.c_double)), 118 | ctypes.POINTER(ctypes.POINTER(ctypes.c_double)), 119 | ctypes.POINTER(ctypes.c_uint32), 120 | ctypes.POINTER(ctypes.POINTER(ctypes.c_ubyte)), 121 | ctypes.POINTER(ctypes.c_size_t if _nDataLength_size_t else ctypes.c_uint32), 122 | ) 123 | 124 | rt.Index_CreateWithStream.argtypes = [ctypes.c_void_p, NEXTFUNC] 125 | rt.Index_CreateWithStream.restype = ctypes.c_void_p 126 | rt.Index_CreateWithStream.errcheck = check_void # type: ignore 127 | 128 | try: 129 | rt.Index_CreateWithArray.argtypes = [ 130 | ctypes.c_void_p, 131 | ctypes.c_uint64, 132 | ctypes.c_uint32, 133 | ctypes.c_uint64, 134 | ctypes.c_uint64, 135 | ctypes.c_uint64, 136 | ctypes.c_void_p, 137 | ctypes.c_void_p, 138 | ctypes.c_void_p, 139 | ] 140 | rt.Index_CreateWithArray.restype = ctypes.c_void_p 141 | rt.Index_CreateWithArray.errcheck = check_void # type: ignore 142 | except AttributeError: 143 | pass 144 | 145 | rt.Index_Destroy.argtypes = [ctypes.c_void_p] 146 | rt.Index_Destroy.restype = None 147 | rt.Index_Destroy.errcheck = check_void_done # type: ignore 148 | 149 | rt.Index_GetProperties.argtypes = [ctypes.c_void_p] 150 | rt.Index_GetProperties.restype = ctypes.c_void_p 151 | rt.Index_GetProperties.errcheck = check_void # type: ignore 152 | 153 | rt.Index_DeleteData.argtypes = [ 154 | ctypes.c_void_p, 155 | ctypes.c_int64, 156 | ctypes.POINTER(ctypes.c_double), 157 | ctypes.POINTER(ctypes.c_double), 158 | ctypes.c_uint32, 159 | ] 160 | rt.Index_DeleteData.restype = ctypes.c_int 161 | rt.Index_DeleteData.errcheck = check_return # type: ignore 162 | 163 | rt.Index_InsertData.argtypes = [ 164 | ctypes.c_void_p, 165 | ctypes.c_int64, 166 | ctypes.POINTER(ctypes.c_double), 167 | ctypes.POINTER(ctypes.c_double), 168 | ctypes.c_uint32, 169 | ctypes.POINTER(ctypes.c_ubyte), 170 | ctypes.c_uint32, 171 | ] 172 | rt.Index_InsertData.restype = ctypes.c_int 173 | rt.Index_InsertData.errcheck = check_return # type: ignore 174 | 175 | rt.Index_GetBounds.argtypes = [ 176 | ctypes.c_void_p, 177 | ctypes.POINTER(ctypes.POINTER(ctypes.c_double)), 178 | ctypes.POINTER(ctypes.POINTER(ctypes.c_double)), 179 | ctypes.POINTER(ctypes.c_uint32), 180 | ] 181 | rt.Index_GetBounds.restype = ctypes.c_int 182 | rt.Index_GetBounds.errcheck = check_value # type: ignore 183 | 184 | rt.Index_IsValid.argtypes = [ctypes.c_void_p] 185 | rt.Index_IsValid.restype = ctypes.c_int 186 | rt.Index_IsValid.errcheck = check_value # type: ignore 187 | 188 | rt.Index_Intersects_obj.argtypes = [ 189 | ctypes.c_void_p, 190 | ctypes.POINTER(ctypes.c_double), 191 | ctypes.POINTER(ctypes.c_double), 192 | ctypes.c_uint32, 193 | ctypes.POINTER(ctypes.POINTER(ctypes.c_void_p)), 194 | ctypes.POINTER(ctypes.c_uint64), 195 | ] 196 | rt.Index_Intersects_obj.restype = ctypes.c_int 197 | rt.Index_Intersects_obj.errcheck = check_return # type: ignore 198 | 199 | 200 | rt.Index_Intersects_id.argtypes = [ 201 | ctypes.c_void_p, 202 | ctypes.POINTER(ctypes.c_double), 203 | ctypes.POINTER(ctypes.c_double), 204 | ctypes.c_uint32, 205 | ctypes.POINTER(ctypes.POINTER(ctypes.c_int64)), 206 | ctypes.POINTER(ctypes.c_uint64), 207 | ] 208 | rt.Index_Intersects_id.restype = ctypes.c_int 209 | rt.Index_Intersects_id.errcheck = check_return # type: ignore 210 | 211 | rt.Index_Intersects_count.argtypes = [ 212 | ctypes.c_void_p, 213 | ctypes.POINTER(ctypes.c_double), 214 | ctypes.POINTER(ctypes.c_double), 215 | ctypes.c_uint32, 216 | ctypes.POINTER(ctypes.c_uint64), 217 | ] 218 | 219 | rt.Index_NearestNeighbors_obj.argtypes = [ 220 | ctypes.c_void_p, 221 | ctypes.POINTER(ctypes.c_double), 222 | ctypes.POINTER(ctypes.c_double), 223 | ctypes.c_uint32, 224 | ctypes.POINTER(ctypes.POINTER(ctypes.c_void_p)), 225 | ctypes.POINTER(ctypes.c_uint64), 226 | ] 227 | rt.Index_NearestNeighbors_obj.restype = ctypes.c_int 228 | rt.Index_NearestNeighbors_obj.errcheck = check_return # type: ignore 229 | 230 | rt.Index_NearestNeighbors_id.argtypes = [ 231 | ctypes.c_void_p, 232 | ctypes.POINTER(ctypes.c_double), 233 | ctypes.POINTER(ctypes.c_double), 234 | ctypes.c_uint32, 235 | ctypes.POINTER(ctypes.POINTER(ctypes.c_int64)), 236 | ctypes.POINTER(ctypes.c_uint64), 237 | ] 238 | rt.Index_NearestNeighbors_id.restype = ctypes.c_int 239 | rt.Index_NearestNeighbors_id.errcheck = check_return # type: ignore 240 | 241 | try: 242 | rt.Index_NearestNeighbors_id_v.argtypes = [ 243 | ctypes.c_void_p, 244 | ctypes.c_int64, 245 | ctypes.c_int64, 246 | ctypes.c_uint32, 247 | ctypes.c_uint64, 248 | ctypes.c_uint64, 249 | ctypes.c_uint64, 250 | ctypes.c_void_p, 251 | ctypes.c_void_p, 252 | ctypes.c_void_p, 253 | ctypes.c_void_p, 254 | ctypes.c_void_p, 255 | ctypes.POINTER(ctypes.c_int64), 256 | ] 257 | rt.Index_NearestNeighbors_id_v.restype = ctypes.c_int 258 | rt.Index_NearestNeighbors_id_v.errcheck = check_return # type: ignore 259 | 260 | rt.Index_Intersects_id_v.argtypes = [ 261 | ctypes.c_void_p, 262 | ctypes.c_int64, 263 | ctypes.c_uint32, 264 | ctypes.c_uint64, 265 | ctypes.c_uint64, 266 | ctypes.c_uint64, 267 | ctypes.c_void_p, 268 | ctypes.c_void_p, 269 | ctypes.c_void_p, 270 | ctypes.c_void_p, 271 | ctypes.POINTER(ctypes.c_int64), 272 | ] 273 | rt.Index_Intersects_id_v.restype = ctypes.c_int 274 | rt.Index_Intersects_id_v.errcheck = check_return # type: ignore 275 | except AttributeError: 276 | pass 277 | 278 | 279 | rt.Index_GetLeaves.argtypes = [ 280 | ctypes.c_void_p, 281 | ctypes.POINTER(ctypes.c_uint32), 282 | ctypes.POINTER(ctypes.POINTER(ctypes.c_uint32)), 283 | ctypes.POINTER(ctypes.POINTER(ctypes.c_int64)), 284 | ctypes.POINTER(ctypes.POINTER(ctypes.POINTER(ctypes.c_int64))), 285 | ctypes.POINTER(ctypes.POINTER(ctypes.POINTER(ctypes.c_double))), 286 | ctypes.POINTER(ctypes.POINTER(ctypes.POINTER(ctypes.c_double))), 287 | ctypes.POINTER(ctypes.c_uint32), 288 | ] 289 | rt.Index_GetLeaves.restype = ctypes.c_int 290 | rt.Index_GetLeaves.errcheck = check_return # type: ignore 291 | 292 | rt.Index_DestroyObjResults.argtypes = [ 293 | ctypes.POINTER(ctypes.POINTER(ctypes.c_void_p)), 294 | ctypes.c_uint32, 295 | ] 296 | rt.Index_DestroyObjResults.restype = None 297 | rt.Index_DestroyObjResults.errcheck = check_void_done # type: ignore 298 | 299 | rt.Index_ClearBuffer.argtypes = [ctypes.c_void_p] 300 | rt.Index_ClearBuffer.restype = None 301 | rt.Index_ClearBuffer.errcheck = check_void_done # type: ignore 302 | 303 | rt.Index_Free.argtypes = [ctypes.POINTER(ctypes.c_void_p)] 304 | rt.Index_Free.restype = None 305 | 306 | rt.IndexItem_Destroy.argtypes = [ctypes.c_void_p] 307 | rt.IndexItem_Destroy.restype = None 308 | rt.IndexItem_Destroy.errcheck = check_void_done # type: ignore 309 | 310 | rt.IndexItem_GetData.argtypes = [ 311 | ctypes.c_void_p, 312 | ctypes.POINTER(ctypes.POINTER(ctypes.c_ubyte)), 313 | ctypes.POINTER(ctypes.c_uint64), 314 | ] 315 | rt.IndexItem_GetData.restype = ctypes.c_int 316 | rt.IndexItem_GetData.errcheck = check_value # type: ignore 317 | 318 | rt.IndexItem_GetBounds.argtypes = [ 319 | ctypes.c_void_p, 320 | ctypes.POINTER(ctypes.POINTER(ctypes.c_double)), 321 | ctypes.POINTER(ctypes.POINTER(ctypes.c_double)), 322 | ctypes.POINTER(ctypes.c_uint32), 323 | ] 324 | rt.IndexItem_GetBounds.restype = ctypes.c_int 325 | rt.IndexItem_GetBounds.errcheck = check_value # type: ignore 326 | 327 | rt.IndexItem_GetID.argtypes = [ctypes.c_void_p] 328 | rt.IndexItem_GetID.restype = ctypes.c_int64 329 | rt.IndexItem_GetID.errcheck = check_value # type: ignore 330 | 331 | try: 332 | rt.Index_GetResultSetOffset.argtypes = [ctypes.c_void_p] 333 | rt.Index_GetResultSetOffset.restype = ctypes.c_int64 334 | rt.Index_GetResultSetOffset.errcheck = check_value # type: ignore 335 | 336 | rt.Index_SetResultSetOffset.argtypes = [ctypes.c_void_p, ctypes.c_int64] 337 | rt.Index_SetResultSetOffset.restype = ctypes.c_int 338 | rt.Index_SetResultSetOffset.errcheck = check_return # type: ignore 339 | 340 | rt.Index_GetResultSetLimit.argtypes = [ctypes.c_void_p] 341 | rt.Index_GetResultSetLimit.restype = ctypes.c_int64 342 | rt.Index_GetResultSetLimit.errcheck = check_value # type: ignore 343 | 344 | rt.Index_SetResultSetLimit.argtypes = [ctypes.c_void_p, ctypes.c_int64] 345 | rt.Index_SetResultSetLimit.restype = ctypes.c_int 346 | rt.Index_SetResultSetLimit.errcheck = check_return # type: ignore 347 | 348 | rt.Index_Flush.argtypes = [ctypes.c_void_p] 349 | rt.Index_Flush.restype = None 350 | rt.Index_Flush.errcheck = check_void_done # type: ignore 351 | 352 | rt.Index_Contains_obj.argtypes = [ 353 | ctypes.c_void_p, 354 | ctypes.POINTER(ctypes.c_double), 355 | ctypes.POINTER(ctypes.c_double), 356 | ctypes.c_uint32, 357 | ctypes.POINTER(ctypes.POINTER(ctypes.c_void_p)), 358 | ctypes.POINTER(ctypes.c_uint64), 359 | ] 360 | rt.Index_Contains_obj.restype = ctypes.c_int 361 | rt.Index_Contains_obj.errcheck = check_return # type: ignore 362 | 363 | rt.Index_Contains_id.argtypes = [ 364 | ctypes.c_void_p, 365 | ctypes.POINTER(ctypes.c_double), 366 | ctypes.POINTER(ctypes.c_double), 367 | ctypes.c_uint32, 368 | ctypes.POINTER(ctypes.POINTER(ctypes.c_int64)), 369 | ctypes.POINTER(ctypes.c_uint64), 370 | ] 371 | rt.Index_Contains_id.restype = ctypes.c_int 372 | rt.Index_Contains_id.errcheck = check_return # type: ignore 373 | 374 | except AttributeError: 375 | pass 376 | 377 | rt.IndexProperty_Create.argtypes = [] 378 | rt.IndexProperty_Create.restype = ctypes.c_void_p 379 | rt.IndexProperty_Create.errcheck = check_void # type: ignore 380 | 381 | rt.IndexProperty_Destroy.argtypes = [ctypes.c_void_p] 382 | rt.IndexProperty_Destroy.restype = None 383 | rt.IndexProperty_Destroy.errcheck = check_void_done # type: ignore 384 | 385 | rt.IndexProperty_SetIndexType.argtypes = [ctypes.c_void_p, ctypes.c_int] 386 | rt.IndexProperty_SetIndexType.restype = ctypes.c_int 387 | rt.IndexProperty_SetIndexType.errcheck = check_return # type: ignore 388 | 389 | rt.IndexProperty_GetIndexType.argtypes = [ctypes.c_void_p] 390 | rt.IndexProperty_GetIndexType.restype = ctypes.c_int 391 | rt.IndexProperty_GetIndexType.errcheck = check_value # type: ignore 392 | 393 | rt.IndexProperty_SetDimension.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 394 | rt.IndexProperty_SetDimension.restype = ctypes.c_int 395 | rt.IndexProperty_SetDimension.errcheck = check_return # type: ignore 396 | 397 | rt.IndexProperty_GetDimension.argtypes = [ctypes.c_void_p] 398 | rt.IndexProperty_GetDimension.restype = ctypes.c_uint32 399 | rt.IndexProperty_GetDimension.errcheck = check_value # type: ignore 400 | 401 | rt.IndexProperty_SetIndexVariant.argtypes = [ctypes.c_void_p, ctypes.c_int] 402 | rt.IndexProperty_SetIndexVariant.restype = ctypes.c_int 403 | rt.IndexProperty_SetIndexVariant.errcheck = check_return # type: ignore 404 | 405 | rt.IndexProperty_GetIndexVariant.argtypes = [ctypes.c_void_p] 406 | rt.IndexProperty_GetIndexVariant.restype = ctypes.c_int 407 | rt.IndexProperty_GetIndexVariant.errcheck = check_value # type: ignore 408 | 409 | rt.IndexProperty_SetIndexStorage.argtypes = [ctypes.c_void_p, ctypes.c_int] 410 | rt.IndexProperty_SetIndexStorage.restype = ctypes.c_int 411 | rt.IndexProperty_SetIndexStorage.errcheck = check_return # type: ignore 412 | 413 | rt.IndexProperty_GetIndexStorage.argtypes = [ctypes.c_void_p] 414 | rt.IndexProperty_GetIndexStorage.restype = ctypes.c_int 415 | rt.IndexProperty_GetIndexStorage.errcheck = check_value # type: ignore 416 | 417 | rt.IndexProperty_SetIndexCapacity.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 418 | rt.IndexProperty_SetIndexCapacity.restype = ctypes.c_int 419 | rt.IndexProperty_SetIndexCapacity.errcheck = check_return # type: ignore 420 | 421 | rt.IndexProperty_GetIndexCapacity.argtypes = [ctypes.c_void_p] 422 | rt.IndexProperty_GetIndexCapacity.restype = ctypes.c_uint32 423 | rt.IndexProperty_GetIndexCapacity.errcheck = check_value # type: ignore 424 | 425 | rt.IndexProperty_SetLeafCapacity.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 426 | rt.IndexProperty_SetLeafCapacity.restype = ctypes.c_int 427 | rt.IndexProperty_SetLeafCapacity.errcheck = check_return # type: ignore 428 | 429 | rt.IndexProperty_GetLeafCapacity.argtypes = [ctypes.c_void_p] 430 | rt.IndexProperty_GetLeafCapacity.restype = ctypes.c_uint32 431 | rt.IndexProperty_GetLeafCapacity.errcheck = check_value # type: ignore 432 | 433 | rt.IndexProperty_SetPagesize.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 434 | rt.IndexProperty_SetPagesize.restype = ctypes.c_int 435 | rt.IndexProperty_SetPagesize.errcheck = check_return # type: ignore 436 | 437 | rt.IndexProperty_GetPagesize.argtypes = [ctypes.c_void_p] 438 | rt.IndexProperty_GetPagesize.restype = ctypes.c_uint32 439 | rt.IndexProperty_GetPagesize.errcheck = check_value # type: ignore 440 | 441 | rt.IndexProperty_SetLeafPoolCapacity.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 442 | rt.IndexProperty_SetLeafPoolCapacity.restype = ctypes.c_int 443 | rt.IndexProperty_SetLeafPoolCapacity.errcheck = check_return # type: ignore 444 | 445 | rt.IndexProperty_GetLeafPoolCapacity.argtypes = [ctypes.c_void_p] 446 | rt.IndexProperty_GetLeafPoolCapacity.restype = ctypes.c_uint32 447 | rt.IndexProperty_GetLeafPoolCapacity.errcheck = check_value # type: ignore 448 | 449 | rt.IndexProperty_SetIndexPoolCapacity.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 450 | rt.IndexProperty_SetIndexPoolCapacity.restype = ctypes.c_int 451 | rt.IndexProperty_SetIndexPoolCapacity.errcheck = check_return # type: ignore 452 | 453 | rt.IndexProperty_GetIndexPoolCapacity.argtypes = [ctypes.c_void_p] 454 | rt.IndexProperty_GetIndexPoolCapacity.restype = ctypes.c_uint32 455 | rt.IndexProperty_GetIndexPoolCapacity.errcheck = check_value # type: ignore 456 | 457 | rt.IndexProperty_SetRegionPoolCapacity.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 458 | rt.IndexProperty_SetRegionPoolCapacity.restype = ctypes.c_int 459 | rt.IndexProperty_SetRegionPoolCapacity.errcheck = check_return # type: ignore 460 | 461 | rt.IndexProperty_GetRegionPoolCapacity.argtypes = [ctypes.c_void_p] 462 | rt.IndexProperty_GetRegionPoolCapacity.restype = ctypes.c_uint32 463 | rt.IndexProperty_GetRegionPoolCapacity.errcheck = check_value # type: ignore 464 | 465 | rt.IndexProperty_SetPointPoolCapacity.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 466 | rt.IndexProperty_SetPointPoolCapacity.restype = ctypes.c_int 467 | rt.IndexProperty_SetPointPoolCapacity.errcheck = check_return # type: ignore 468 | 469 | rt.IndexProperty_GetPointPoolCapacity.argtypes = [ctypes.c_void_p] 470 | rt.IndexProperty_GetPointPoolCapacity.restype = ctypes.c_uint32 471 | rt.IndexProperty_GetPointPoolCapacity.errcheck = check_value # type: ignore 472 | 473 | rt.IndexProperty_SetBufferingCapacity.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 474 | rt.IndexProperty_SetBufferingCapacity.restype = ctypes.c_int 475 | rt.IndexProperty_SetBufferingCapacity.errcheck = check_return # type: ignore 476 | 477 | rt.IndexProperty_GetBufferingCapacity.argtypes = [ctypes.c_void_p] 478 | rt.IndexProperty_GetBufferingCapacity.restype = ctypes.c_uint32 479 | rt.IndexProperty_GetBufferingCapacity.errcheck = check_value # type: ignore 480 | 481 | rt.IndexProperty_SetEnsureTightMBRs.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 482 | rt.IndexProperty_SetEnsureTightMBRs.restype = ctypes.c_int 483 | rt.IndexProperty_SetEnsureTightMBRs.errcheck = check_return # type: ignore 484 | 485 | rt.IndexProperty_GetEnsureTightMBRs.argtypes = [ctypes.c_void_p] 486 | rt.IndexProperty_GetEnsureTightMBRs.restype = ctypes.c_uint32 487 | rt.IndexProperty_GetEnsureTightMBRs.errcheck = check_value # type: ignore 488 | 489 | rt.IndexProperty_SetOverwrite.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 490 | rt.IndexProperty_SetOverwrite.restype = ctypes.c_int 491 | rt.IndexProperty_SetOverwrite.errcheck = check_return # type: ignore 492 | 493 | rt.IndexProperty_GetOverwrite.argtypes = [ctypes.c_void_p] 494 | rt.IndexProperty_GetOverwrite.restype = ctypes.c_uint32 495 | rt.IndexProperty_GetOverwrite.errcheck = check_value # type: ignore 496 | 497 | rt.IndexProperty_SetNearMinimumOverlapFactor.argtypes = [ 498 | ctypes.c_void_p, 499 | ctypes.c_uint32, 500 | ] 501 | rt.IndexProperty_SetNearMinimumOverlapFactor.restype = ctypes.c_int 502 | rt.IndexProperty_SetNearMinimumOverlapFactor.errcheck = check_return # type: ignore 503 | 504 | rt.IndexProperty_GetNearMinimumOverlapFactor.argtypes = [ctypes.c_void_p] 505 | rt.IndexProperty_GetNearMinimumOverlapFactor.restype = ctypes.c_uint32 506 | rt.IndexProperty_GetNearMinimumOverlapFactor.errcheck = check_value # type: ignore 507 | 508 | rt.IndexProperty_SetWriteThrough.argtypes = [ctypes.c_void_p, ctypes.c_uint32] 509 | rt.IndexProperty_SetWriteThrough.restype = ctypes.c_int 510 | rt.IndexProperty_SetWriteThrough.errcheck = check_return # type: ignore 511 | 512 | rt.IndexProperty_GetWriteThrough.argtypes = [ctypes.c_void_p] 513 | rt.IndexProperty_GetWriteThrough.restype = ctypes.c_uint32 514 | rt.IndexProperty_GetWriteThrough.errcheck = check_value # type: ignore 515 | 516 | rt.IndexProperty_SetFillFactor.argtypes = [ctypes.c_void_p, ctypes.c_double] 517 | rt.IndexProperty_SetFillFactor.restype = ctypes.c_int 518 | rt.IndexProperty_SetFillFactor.errcheck = check_return # type: ignore 519 | 520 | rt.IndexProperty_GetFillFactor.argtypes = [ctypes.c_void_p] 521 | rt.IndexProperty_GetFillFactor.restype = ctypes.c_double 522 | rt.IndexProperty_GetFillFactor.errcheck = check_value # type: ignore 523 | 524 | rt.IndexProperty_SetSplitDistributionFactor.argtypes = [ 525 | ctypes.c_void_p, 526 | ctypes.c_double, 527 | ] 528 | rt.IndexProperty_SetSplitDistributionFactor.restype = ctypes.c_int 529 | rt.IndexProperty_SetSplitDistributionFactor.errcheck = check_return # type: ignore 530 | 531 | rt.IndexProperty_GetSplitDistributionFactor.argtypes = [ctypes.c_void_p] 532 | rt.IndexProperty_GetSplitDistributionFactor.restype = ctypes.c_double 533 | rt.IndexProperty_GetSplitDistributionFactor.errcheck = check_value # type: ignore 534 | 535 | rt.IndexProperty_SetTPRHorizon.argtypes = [ctypes.c_void_p, ctypes.c_double] 536 | rt.IndexProperty_SetTPRHorizon.restype = ctypes.c_int 537 | rt.IndexProperty_SetTPRHorizon.errcheck = check_return # type: ignore 538 | 539 | rt.IndexProperty_GetTPRHorizon.argtypes = [ctypes.c_void_p] 540 | rt.IndexProperty_GetTPRHorizon.restype = ctypes.c_double 541 | rt.IndexProperty_GetTPRHorizon.errcheck = check_value # type: ignore 542 | 543 | rt.IndexProperty_SetReinsertFactor.argtypes = [ctypes.c_void_p, ctypes.c_double] 544 | rt.IndexProperty_SetReinsertFactor.restype = ctypes.c_int 545 | rt.IndexProperty_SetReinsertFactor.errcheck = check_return # type: ignore 546 | 547 | rt.IndexProperty_GetReinsertFactor.argtypes = [ctypes.c_void_p] 548 | rt.IndexProperty_GetReinsertFactor.restype = ctypes.c_double 549 | rt.IndexProperty_GetReinsertFactor.errcheck = check_value # type: ignore 550 | 551 | rt.IndexProperty_SetFileName.argtypes = [ctypes.c_void_p, ctypes.c_char_p] 552 | rt.IndexProperty_SetFileName.restype = ctypes.c_int 553 | rt.IndexProperty_SetFileName.errcheck = check_return # type: ignore 554 | 555 | rt.IndexProperty_GetFileName.argtypes = [ctypes.c_void_p] 556 | rt.IndexProperty_GetFileName.errcheck = free_returned_char_p # type: ignore 557 | rt.IndexProperty_GetFileName.restype = ctypes.POINTER(ctypes.c_char) 558 | 559 | rt.IndexProperty_SetFileNameExtensionDat.argtypes = [ctypes.c_void_p, ctypes.c_char_p] 560 | rt.IndexProperty_SetFileNameExtensionDat.restype = ctypes.c_int 561 | rt.IndexProperty_SetFileNameExtensionDat.errcheck = check_return # type: ignore 562 | 563 | rt.IndexProperty_GetFileNameExtensionDat.argtypes = [ctypes.c_void_p] 564 | rt.IndexProperty_GetFileNameExtensionDat.errcheck = free_returned_char_p # type: ignore 565 | rt.IndexProperty_GetFileNameExtensionDat.restype = ctypes.POINTER(ctypes.c_char) 566 | 567 | rt.IndexProperty_SetFileNameExtensionIdx.argtypes = [ctypes.c_void_p, ctypes.c_char_p] 568 | rt.IndexProperty_SetFileNameExtensionIdx.restype = ctypes.c_int 569 | rt.IndexProperty_SetFileNameExtensionIdx.errcheck = check_return # type: ignore 570 | 571 | rt.IndexProperty_GetFileNameExtensionIdx.argtypes = [ctypes.c_void_p] 572 | rt.IndexProperty_GetFileNameExtensionIdx.errcheck = free_returned_char_p # type: ignore 573 | rt.IndexProperty_GetFileNameExtensionIdx.restype = ctypes.POINTER(ctypes.c_char) 574 | 575 | rt.IndexProperty_SetCustomStorageCallbacksSize.argtypes = [ 576 | ctypes.c_void_p, 577 | ctypes.c_uint32, 578 | ] 579 | rt.IndexProperty_SetCustomStorageCallbacksSize.restype = ctypes.c_int 580 | rt.IndexProperty_SetCustomStorageCallbacksSize.errcheck = check_return # type: ignore 581 | 582 | rt.IndexProperty_GetCustomStorageCallbacksSize.argtypes = [ctypes.c_void_p] 583 | rt.IndexProperty_GetCustomStorageCallbacksSize.restype = ctypes.c_uint32 584 | rt.IndexProperty_GetCustomStorageCallbacksSize.errcheck = check_value # type: ignore 585 | 586 | rt.IndexProperty_SetCustomStorageCallbacks.argtypes = [ctypes.c_void_p, ctypes.c_void_p] 587 | rt.IndexProperty_SetCustomStorageCallbacks.restype = ctypes.c_int 588 | rt.IndexProperty_SetCustomStorageCallbacks.errcheck = check_return # type: ignore 589 | 590 | rt.IndexProperty_GetCustomStorageCallbacks.argtypes = [ctypes.c_void_p] 591 | rt.IndexProperty_GetCustomStorageCallbacks.restype = ctypes.c_void_p 592 | rt.IndexProperty_GetCustomStorageCallbacks.errcheck = check_value # type: ignore 593 | 594 | rt.IndexProperty_SetIndexID.argtypes = [ctypes.c_void_p, ctypes.c_int64] 595 | rt.IndexProperty_SetIndexID.restype = ctypes.c_int 596 | rt.IndexProperty_SetIndexID.errcheck = check_return # type: ignore 597 | 598 | rt.IndexProperty_GetIndexID.argtypes = [ctypes.c_void_p] 599 | rt.IndexProperty_GetIndexID.restype = ctypes.c_int64 600 | rt.IndexProperty_GetIndexID.errcheck = check_value # type: ignore 601 | 602 | rt.SIDX_NewBuffer.argtypes = [ctypes.c_size_t] 603 | rt.SIDX_NewBuffer.restype = ctypes.c_void_p 604 | rt.SIDX_NewBuffer.errcheck = check_void # type: ignore 605 | 606 | rt.SIDX_DeleteBuffer.argtypes = [ctypes.c_void_p] 607 | rt.SIDX_DeleteBuffer.restype = None 608 | 609 | # TPR-Tree API 610 | try: 611 | rt.Index_InsertTPData.argtypes = [ 612 | ctypes.c_void_p, 613 | ctypes.c_int64, 614 | ctypes.POINTER(ctypes.c_double), 615 | ctypes.POINTER(ctypes.c_double), 616 | ctypes.POINTER(ctypes.c_double), 617 | ctypes.POINTER(ctypes.c_double), 618 | ctypes.c_double, 619 | ctypes.c_double, 620 | ctypes.c_uint32, 621 | ctypes.POINTER(ctypes.c_ubyte), 622 | ctypes.c_size_t, 623 | ] 624 | rt.Index_InsertTPData.restype = ctypes.c_int 625 | rt.Index_InsertTPData.errcheck = check_return # type: ignore 626 | 627 | rt.Index_DeleteTPData.argtypes = [ 628 | ctypes.c_void_p, 629 | ctypes.c_int64, 630 | ctypes.POINTER(ctypes.c_double), 631 | ctypes.POINTER(ctypes.c_double), 632 | ctypes.POINTER(ctypes.c_double), 633 | ctypes.POINTER(ctypes.c_double), 634 | ctypes.c_double, 635 | ctypes.c_double, 636 | ctypes.c_uint32, 637 | ] 638 | rt.Index_DeleteTPData.restype = ctypes.c_int 639 | rt.Index_DeleteTPData.errcheck = check_return # type: ignore 640 | 641 | rt.Index_TPIntersects_id.argtypes = [ 642 | ctypes.c_void_p, 643 | ctypes.POINTER(ctypes.c_double), 644 | ctypes.POINTER(ctypes.c_double), 645 | ctypes.POINTER(ctypes.c_double), 646 | ctypes.POINTER(ctypes.c_double), 647 | ctypes.c_double, 648 | ctypes.c_double, 649 | ctypes.c_uint32, 650 | ctypes.POINTER(ctypes.POINTER(ctypes.c_int64)), 651 | ctypes.POINTER(ctypes.c_uint64), 652 | ] 653 | rt.Index_TPIntersects_id.restype = ctypes.c_int 654 | rt.Index_TPIntersects_id.errcheck = check_return # type: ignore 655 | 656 | rt.Index_TPIntersects_obj.argtypes = [ 657 | ctypes.c_void_p, 658 | ctypes.POINTER(ctypes.c_double), 659 | ctypes.POINTER(ctypes.c_double), 660 | ctypes.POINTER(ctypes.c_double), 661 | ctypes.POINTER(ctypes.c_double), 662 | ctypes.c_double, 663 | ctypes.c_double, 664 | ctypes.c_uint32, 665 | ctypes.POINTER(ctypes.POINTER(ctypes.c_void_p)), 666 | ctypes.POINTER(ctypes.c_uint64), 667 | ] 668 | rt.Index_TPIntersects_obj.restype = ctypes.c_int 669 | rt.Index_TPIntersects_obj.errcheck = check_return # type: ignore 670 | 671 | rt.Index_TPIntersects_count.argtypes = [ 672 | ctypes.c_void_p, 673 | ctypes.POINTER(ctypes.c_double), 674 | ctypes.POINTER(ctypes.c_double), 675 | ctypes.POINTER(ctypes.c_double), 676 | ctypes.POINTER(ctypes.c_double), 677 | ctypes.c_double, 678 | ctypes.c_double, 679 | ctypes.c_uint32, 680 | ctypes.POINTER(ctypes.c_uint64), 681 | ] 682 | rt.Index_TPIntersects_count.restype = ctypes.c_int 683 | rt.Index_TPIntersects_count.errcheck = check_return # type: ignore 684 | 685 | rt.Index_TPNearestNeighbors_id.argtypes = [ 686 | ctypes.c_void_p, 687 | ctypes.POINTER(ctypes.c_double), 688 | ctypes.POINTER(ctypes.c_double), 689 | ctypes.POINTER(ctypes.c_double), 690 | ctypes.POINTER(ctypes.c_double), 691 | ctypes.c_double, 692 | ctypes.c_double, 693 | ctypes.c_uint32, 694 | ctypes.POINTER(ctypes.POINTER(ctypes.c_int64)), 695 | ctypes.POINTER(ctypes.c_uint64), 696 | ] 697 | rt.Index_TPNearestNeighbors_id.restype = ctypes.c_int 698 | rt.Index_TPNearestNeighbors_id.errcheck = check_return # type: ignore 699 | 700 | rt.Index_TPNearestNeighbors_obj.argtypes = [ 701 | ctypes.c_void_p, 702 | ctypes.POINTER(ctypes.c_double), 703 | ctypes.POINTER(ctypes.c_double), 704 | ctypes.POINTER(ctypes.c_double), 705 | ctypes.POINTER(ctypes.c_double), 706 | ctypes.c_double, 707 | ctypes.c_double, 708 | ctypes.c_uint32, 709 | ctypes.POINTER(ctypes.POINTER(ctypes.c_void_p)), 710 | ctypes.POINTER(ctypes.c_uint64), 711 | ] 712 | rt.Index_TPNearestNeighbors_obj.restype = ctypes.c_int 713 | rt.Index_TPNearestNeighbors_obj.errcheck = check_return # type: ignore 714 | except AttributeError: 715 | pass 716 | -------------------------------------------------------------------------------- /rtree/exceptions.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | 4 | class RTreeError(Exception): 5 | "RTree exception, indicates a RTree-related error." 6 | 7 | pass 8 | -------------------------------------------------------------------------------- /rtree/finder.py: -------------------------------------------------------------------------------- 1 | """ 2 | Locate `libspatialindex` shared library and header files. 3 | """ 4 | 5 | from __future__ import annotations 6 | 7 | import ctypes 8 | import importlib.metadata 9 | import os 10 | import platform 11 | import sys 12 | from ctypes.util import find_library 13 | from pathlib import Path 14 | 15 | _cwd = Path(__file__).parent 16 | _sys_prefix = Path(sys.prefix) 17 | 18 | # generate a bunch of candidate locations where the 19 | # libspatialindex shared library *might* be hanging out 20 | _candidates = [] 21 | if "SPATIALINDEX_C_LIBRARY" in os.environ: 22 | _candidates.append(Path(os.environ["SPATIALINDEX_C_LIBRARY"])) 23 | _candidates += [_cwd / "lib", _cwd, Path("")] 24 | 25 | 26 | def load() -> ctypes.CDLL: 27 | """Load the `libspatialindex` shared library. 28 | 29 | :returns: Loaded shared library 30 | """ 31 | if os.name == "nt": 32 | # check the platform architecture 33 | if "64" in platform.architecture()[0]: 34 | arch = "64" 35 | else: 36 | arch = "32" 37 | lib_name = f"spatialindex_c-{arch}.dll" 38 | 39 | # add search paths for conda installs 40 | if (_sys_prefix / "conda-meta").exists() or "conda" in sys.version: 41 | _candidates.append(_sys_prefix / "Library" / "bin") 42 | 43 | # get the current PATH 44 | oldenv = os.environ.get("PATH", "").strip().rstrip(";") 45 | # run through our list of candidate locations 46 | for path in _candidates: 47 | if not path.exists(): 48 | continue 49 | # temporarily add the path to the PATH environment variable 50 | # so Windows can find additional DLL dependencies. 51 | os.environ["PATH"] = ";".join([str(path), oldenv]) 52 | try: 53 | rt = ctypes.cdll.LoadLibrary(str(path / lib_name)) 54 | if rt is not None: 55 | return rt 56 | except OSError: 57 | pass 58 | except BaseException as err: 59 | print(f"rtree.finder unexpected error: {err!s}", file=sys.stderr) 60 | finally: 61 | os.environ["PATH"] = oldenv 62 | raise OSError(f"could not find or load {lib_name}") 63 | 64 | elif os.name == "posix": 65 | # posix includes both mac and linux 66 | # use the extension for the specific platform 67 | if platform.system() == "Darwin": 68 | # macos shared libraries are `.dylib` 69 | lib_name = "libspatialindex_c.dylib" 70 | else: 71 | # linux shared libraries are `.so` 72 | lib_name = "libspatialindex_c.so" 73 | 74 | # add path for binary wheel prepared with cibuildwheel/auditwheel 75 | try: 76 | pkg_files = importlib.metadata.files("rtree") 77 | if pkg_files is not None: 78 | for file in pkg_files: # type: ignore 79 | if ( 80 | file.parent.name == "rtree.libs" 81 | and file.stem.startswith("libspatialindex") 82 | and ".so" in file.suffixes 83 | ): 84 | _candidates.insert(1, Path(file.locate())) 85 | break 86 | except importlib.metadata.PackageNotFoundError: 87 | pass 88 | 89 | # get the starting working directory 90 | cwd = os.getcwd() 91 | for cand in _candidates: 92 | if cand.is_dir(): 93 | # if our candidate is a directory use best guess 94 | path = cand 95 | target = cand / lib_name 96 | elif cand.is_file(): 97 | # if candidate is just a file use that 98 | path = cand.parent 99 | target = cand 100 | else: 101 | continue 102 | 103 | if not target.exists(): 104 | continue 105 | 106 | try: 107 | # move to the location we're checking 108 | os.chdir(path) 109 | # try loading the target file candidate 110 | rt = ctypes.cdll.LoadLibrary(str(target)) 111 | if rt is not None: 112 | return rt 113 | except BaseException as err: 114 | print( 115 | f"rtree.finder ({target}) unexpected error: {err!s}", 116 | file=sys.stderr, 117 | ) 118 | finally: 119 | os.chdir(cwd) 120 | 121 | try: 122 | # try loading library using LD path search 123 | pth = find_library("spatialindex_c") 124 | if pth is not None: 125 | return ctypes.cdll.LoadLibrary(pth) 126 | 127 | except BaseException: 128 | pass 129 | 130 | raise OSError("Could not load libspatialindex_c library") 131 | 132 | 133 | def get_include() -> str: 134 | """Return the directory that contains the spatialindex \\*.h files. 135 | 136 | :returns: Path to include directory or "" if not found. 137 | """ 138 | # check if was bundled with a binary wheel 139 | try: 140 | pkg_files = importlib.metadata.files("rtree") 141 | if pkg_files is not None: 142 | for path in pkg_files: # type: ignore 143 | if path.name == "SpatialIndex.h": 144 | return str(Path(path.locate()).parent.parent) 145 | except importlib.metadata.PackageNotFoundError: 146 | pass 147 | 148 | # look for this header file in a few directories 149 | path_to_spatialindex_h = Path("include/spatialindex/SpatialIndex.h") 150 | 151 | # check sys.prefix, e.g. conda's libspatialindex package 152 | if os.name == "nt": 153 | file = _sys_prefix / "Library" / path_to_spatialindex_h 154 | else: 155 | file = _sys_prefix / path_to_spatialindex_h 156 | if file.is_file(): 157 | return str(file.parent.parent) 158 | 159 | # check if relative to lib 160 | libdir = Path(load()._name).parent 161 | file = libdir.parent / path_to_spatialindex_h 162 | if file.is_file(): 163 | return str(file.parent.parent) 164 | 165 | # check system install 166 | file = Path("/usr") / path_to_spatialindex_h 167 | if file.is_file(): 168 | return str(file.parent.parent) 169 | 170 | # not found 171 | return "" 172 | -------------------------------------------------------------------------------- /rtree/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/rtree/76656fd252d0c85dbef87bbd940160c40a3f114c/rtree/py.typed -------------------------------------------------------------------------------- /scripts/install_libspatialindex.bat: -------------------------------------------------------------------------------- 1 | python -c "import sys; print(sys.version)" 2 | 3 | set SIDX_VERSION=2.1.0 4 | 5 | curl -LO --retry 5 --retry-max-time 120 "https://github.com/libspatialindex/libspatialindex/archive/%SIDX_VERSION%.zip" 6 | 7 | tar xvf "%SIDX_VERSION%.zip" 8 | 9 | cd libspatialindex-%SIDX_VERSION% 10 | 11 | mkdir build 12 | cd build 13 | 14 | pip install ninja 15 | 16 | set INSTALL_PREFIX=%~dp0\..\rtree 17 | 18 | cmake -G Ninja ^ 19 | -D CMAKE_BUILD_TYPE=Release ^ 20 | -D BUILD_SHARED_LIBS="ON" ^ 21 | -D CMAKE_INSTALL_PREFIX="%INSTALL_PREFIX%" ^ 22 | -D CMAKE_INSTALL_BINDIR=lib ^ 23 | -D CMAKE_INSTALL_LIBDIR=libdir ^ 24 | .. 25 | 26 | ninja install 27 | 28 | :: remove unneeded libdir 29 | rmdir %INSTALL_PREFIX%\libdir /s /q 30 | 31 | dir %INSTALL_PREFIX% 32 | dir %INSTALL_PREFIX%\lib 33 | dir %INSTALL_PREFIX%\include /s 34 | -------------------------------------------------------------------------------- /scripts/install_libspatialindex.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -xe 3 | 4 | # A simple script to install libspatialindex from a Github Release 5 | VERSION=2.1.0 6 | SHA256=86aa0925dd151ff9501a5965c4f8d7fb3dcd8accdc386a650dbdd62660399926 7 | 8 | # where to copy resulting files 9 | # this has to be run before `cd`-ing anywhere 10 | install_prefix() { 11 | OURPWD=$PWD 12 | cd "$(dirname "$0")" 13 | cd ../rtree 14 | arr=$(pwd) 15 | cd "$OURPWD" 16 | echo $arr 17 | } 18 | 19 | scriptloc() { 20 | OURPWD=$PWD 21 | cd "$(dirname "$0")" 22 | arr=$(pwd) 23 | cd "$OURPWD" 24 | echo $arr 25 | } 26 | # note that we're doing this convoluted thing to get 27 | # an absolute path so mac doesn't yell at us 28 | INSTALL_PREFIX=`install_prefix` 29 | SL=`scriptloc` 30 | 31 | rm -f $VERSION.zip 32 | curl -LOs --retry 5 --retry-max-time 120 https://github.com/libspatialindex/libspatialindex/archive/${VERSION}.zip 33 | 34 | # check the file hash 35 | if [ "$(uname)" = "Darwin" ] 36 | then 37 | echo "${SHA256} ${VERSION}.zip" | shasum -a 256 -c - 38 | else 39 | echo "${SHA256} ${VERSION}.zip" | sha256sum -c - 40 | fi 41 | 42 | rm -rf "libspatialindex-${VERSION}" 43 | unzip -q $VERSION 44 | cd libspatialindex-${VERSION} 45 | 46 | mkdir build 47 | cd build 48 | 49 | printenv 50 | 51 | if [ "$(uname)" = "Darwin" ]; then 52 | CMAKE_ARGS="-D CMAKE_OSX_ARCHITECTURES=${ARCHFLAGS##* } \ 53 | -D CMAKE_INSTALL_RPATH=@loader_path" 54 | fi 55 | 56 | cmake ${CMAKE_ARGS} \ 57 | -D CMAKE_BUILD_TYPE=Release \ 58 | -D BUILD_SHARED_LIBS=ON \ 59 | -D CMAKE_INSTALL_PREFIX=${INSTALL_PREFIX} \ 60 | -D CMAKE_INSTALL_LIBDIR=lib \ 61 | -D CMAKE_PLATFORM_NO_VERSIONED_SONAME=ON \ 62 | .. 63 | make -j 4 64 | 65 | # copy built libraries relative to path of this script 66 | make install 67 | 68 | # remove unneeded extras in lib 69 | rm -rfv ${INSTALL_PREFIX}/lib/cmake 70 | rm -rfv ${INSTALL_PREFIX}/lib/pkgconfig 71 | 72 | ls -R ${INSTALL_PREFIX}/lib 73 | ls -R ${INSTALL_PREFIX}/include 74 | -------------------------------------------------------------------------------- /scripts/repair_wheel.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import argparse 4 | import os 5 | import shutil 6 | import subprocess 7 | import sys 8 | import tempfile 9 | from pathlib import Path 10 | 11 | 12 | def main(): 13 | if sys.platform.startswith("linux"): 14 | os_ = "linux" 15 | elif sys.platform.startswith("darwin"): 16 | os_ = "macos" 17 | elif sys.platform.startswith("win32"): 18 | os_ = "windows" 19 | else: 20 | raise NotImplementedError( 21 | f"sys.platform '{sys.platform}' is not supported yet." 22 | ) 23 | 24 | p = argparse.ArgumentParser( 25 | description="Convert wheel to be independent of python implementation and ABI" 26 | ) 27 | p.set_defaults(prog=Path(sys.argv[0]).name) 28 | p.add_argument("WHEEL_FILE", help="Path to wheel file.") 29 | p.add_argument( 30 | "-w", 31 | "--wheel-dir", 32 | dest="WHEEL_DIR", 33 | help=('Directory to store delocated wheels (default: "wheelhouse/")'), 34 | default="wheelhouse/", 35 | ) 36 | 37 | args = p.parse_args() 38 | 39 | file = Path(args.WHEEL_FILE).resolve(strict=True) 40 | wheelhouse = Path(args.WHEEL_DIR).resolve() 41 | wheelhouse.mkdir(parents=True, exist_ok=True) 42 | 43 | with tempfile.TemporaryDirectory() as tmpdir_: 44 | tmpdir = Path(tmpdir_) 45 | # use the platform specific repair tool first 46 | if os_ == "linux": 47 | # use path from cibuildwheel which allows auditwheel to create 48 | # rtree.libs/libspatialindex-*.so.* 49 | cibw_lib_path = "/project/rtree/lib" 50 | if os.environ.get("LD_LIBRARY_PATH"): # append path 51 | os.environ["LD_LIBRARY_PATH"] += f"{os.pathsep}{cibw_lib_path}" 52 | else: 53 | os.environ["LD_LIBRARY_PATH"] = cibw_lib_path 54 | subprocess.run( 55 | ["auditwheel", "repair", "-w", str(tmpdir), str(file)], check=True 56 | ) 57 | elif os_ == "macos": 58 | subprocess.run( 59 | [ 60 | "delocate-wheel", 61 | # "--require-archs", 62 | # "arm64,x86_64", 63 | "-w", 64 | str(tmpdir), 65 | str(file), 66 | ], 67 | check=True, 68 | ) 69 | elif os_ == "windows": 70 | # no specific tool, just copy 71 | shutil.copyfile(file, tmpdir / file.name) 72 | (file,) = tmpdir.glob("*.whl") 73 | 74 | # make this a py3 wheel 75 | subprocess.run( 76 | [ 77 | "wheel", 78 | "tags", 79 | "--python-tag", 80 | "py3", 81 | "--abi-tag", 82 | "none", 83 | "--remove", 84 | str(file), 85 | ], 86 | check=True, 87 | ) 88 | (file,) = tmpdir.glob("*.whl") 89 | # unpack 90 | subprocess.run(["wheel", "unpack", file.name], cwd=tmpdir, check=True) 91 | for unpackdir in tmpdir.iterdir(): 92 | if unpackdir.is_dir(): 93 | break 94 | else: 95 | raise RuntimeError("subdirectory not found") 96 | 97 | if os_ == "linux": 98 | # This is auditwheel's libs, which needs post-processing 99 | libs_dir = unpackdir / "rtree.libs" 100 | lsidx_list = list(libs_dir.glob("libspatialindex*.so*")) 101 | assert len(lsidx_list) == 1, list(libs_dir.iterdir()) 102 | lsidx = lsidx_list[0] 103 | subprocess.run(["patchelf", "--set-rpath", "$ORIGIN", lsidx], check=True) 104 | # remove duplicated dir 105 | lib_dir = unpackdir / "rtree" / "lib" 106 | shutil.rmtree(lib_dir) 107 | # re-pack 108 | subprocess.run(["wheel", "pack", str(unpackdir.name)], cwd=tmpdir, check=True) 109 | files = list(tmpdir.glob("*.whl")) 110 | assert len(files) == 1, files 111 | file = files[0] 112 | file.rename(wheelhouse / file.name) 113 | 114 | 115 | if __name__ == "__main__": 116 | main() 117 | -------------------------------------------------------------------------------- /scripts/visualize.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | 5 | from liblas import file 6 | from osgeo import ogr 7 | 8 | from rtree import index 9 | 10 | 11 | def quick_create_layer_def(lyr, field_list): 12 | # Each field is a tuple of (name, type, width, precision) 13 | # Any of type, width and precision can be skipped. Default type is string. 14 | 15 | for field in field_list: 16 | name = field[0] 17 | if len(field) > 1: 18 | type = field[1] 19 | else: 20 | type = ogr.OFTString 21 | 22 | field_defn = ogr.FieldDefn(name, type) 23 | 24 | if len(field) > 2: 25 | field_defn.SetWidth(int(field[2])) 26 | 27 | if len(field) > 3: 28 | field_defn.SetPrecision(int(field[3])) 29 | 30 | lyr.CreateField(field_defn) 31 | 32 | field_defn.Destroy() 33 | 34 | 35 | shape_drv = ogr.GetDriverByName("ESRI Shapefile") 36 | 37 | shapefile_name = sys.argv[1].split(".")[0] 38 | shape_ds = shape_drv.CreateDataSource(shapefile_name) 39 | leaf_block_lyr = shape_ds.CreateLayer("leaf", geom_type=ogr.wkbPolygon) 40 | point_block_lyr = shape_ds.CreateLayer("point", geom_type=ogr.wkbPolygon) 41 | point_lyr = shape_ds.CreateLayer("points", geom_type=ogr.wkbPoint) 42 | 43 | quick_create_layer_def( 44 | leaf_block_lyr, [("BLK_ID", ogr.OFTInteger), ("COUNT", ogr.OFTInteger)] 45 | ) 46 | 47 | quick_create_layer_def( 48 | point_block_lyr, [("BLK_ID", ogr.OFTInteger), ("COUNT", ogr.OFTInteger)] 49 | ) 50 | 51 | quick_create_layer_def(point_lyr, [("ID", ogr.OFTInteger), ("BLK_ID", ogr.OFTInteger)]) 52 | 53 | p = index.Property() 54 | p.filename = sys.argv[1] 55 | p.overwrite = False 56 | 57 | p.storage = index.RT_Disk 58 | idx = index.Index(sys.argv[1]) 59 | 60 | leaves = idx.leaves() 61 | # leaves[0] == (0L, [2L, 92L, 51L, 55L, 26L], [-132.41727847799999, 62 | # -96.717721818399994, -132.41727847799999, -96.717721818399994]) 63 | 64 | f = file.File(sys.argv[1]) 65 | 66 | 67 | def area(minx, miny, maxx, maxy): 68 | width = abs(maxx - minx) 69 | height = abs(maxy - miny) 70 | 71 | return width * height 72 | 73 | 74 | def get_bounds(leaf_ids, lasfile, block_id): 75 | # read the first point and set the bounds to that 76 | 77 | p = lasfile.read(leaf_ids[0]) 78 | minx, maxx = p.x, p.x 79 | miny, maxy = p.y, p.y 80 | 81 | print(len(leaf_ids)) 82 | print(leaf_ids[0:10]) 83 | 84 | for p_id in leaf_ids: 85 | p = lasfile.read(p_id) 86 | minx = min(minx, p.x) 87 | maxx = max(maxx, p.x) 88 | miny = min(miny, p.y) 89 | maxy = max(maxy, p.y) 90 | feature = ogr.Feature(feature_def=point_lyr.GetLayerDefn()) 91 | g = ogr.CreateGeometryFromWkt(f"POINT ({p.x:.8f} {p.y:.8f})") 92 | feature.SetGeometry(g) 93 | feature.SetField("ID", p_id) 94 | feature.SetField("BLK_ID", block_id) 95 | result = point_lyr.CreateFeature(feature) 96 | del result 97 | 98 | return (minx, miny, maxx, maxy) 99 | 100 | 101 | def make_poly(minx, miny, maxx, maxy): 102 | wkt = ( 103 | f"POLYGON (({minx:.8f} {miny:.8f}, {maxx:.8f} {miny:.8f}, {maxx:.8f} " 104 | f"{maxy:.8f}, {minx:.8f} {maxy:.8f}, {minx:.8f} {miny:.8f}))" 105 | ) 106 | shp = ogr.CreateGeometryFromWkt(wkt) 107 | return shp 108 | 109 | 110 | def make_feature(lyr, geom, id, count): 111 | feature = ogr.Feature(feature_def=lyr.GetLayerDefn()) 112 | feature.SetGeometry(geom) 113 | feature.SetField("BLK_ID", id) 114 | feature.SetField("COUNT", count) 115 | result = lyr.CreateFeature(feature) 116 | del result 117 | 118 | 119 | t = 0 120 | for leaf in leaves: 121 | id = leaf[0] 122 | ids = leaf[1] 123 | count = len(ids) 124 | # import pdb;pdb.set_trace() 125 | 126 | if len(leaf[2]) == 4: 127 | minx, miny, maxx, maxy = leaf[2] 128 | else: 129 | minx, miny, maxx, maxy, minz, maxz = leaf[2] 130 | 131 | if id == 186: 132 | print(leaf[2]) 133 | 134 | print(leaf[2]) 135 | leaf = make_poly(minx, miny, maxx, maxy) 136 | print("leaf: " + str([minx, miny, maxx, maxy])) 137 | 138 | pminx, pminy, pmaxx, pmaxy = get_bounds(ids, f, id) 139 | point = make_poly(pminx, pminy, pmaxx, pmaxy) 140 | 141 | print("point: " + str([pminx, pminy, pmaxx, pmaxy])) 142 | print("point bounds: " + str([point.GetArea(), area(pminx, pminy, pmaxx, pmaxy)])) 143 | print("leaf bounds: " + str([leaf.GetArea(), area(minx, miny, maxx, maxy)])) 144 | print("leaf - point: " + str([abs(point.GetArea() - leaf.GetArea())])) 145 | print([minx, miny, maxx, maxy]) 146 | # if shp2.GetArea() != shp.GetArea(): 147 | # import pdb;pdb.set_trace() 148 | # sys.exit(1) 149 | 150 | make_feature(leaf_block_lyr, leaf, id, count) 151 | make_feature(point_block_lyr, point, id, count) 152 | 153 | t += 1 154 | # if t ==2: 155 | # break 156 | 157 | leaf_block_lyr.SyncToDisk() 158 | point_lyr.SyncToDisk() 159 | 160 | shape_ds.Destroy() 161 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from pathlib import Path 3 | 4 | from setuptools import setup 5 | from setuptools.command.install import install 6 | from setuptools.dist import Distribution 7 | from wheel.bdist_wheel import bdist_wheel as _bdist_wheel 8 | 9 | # current working directory of this setup.py file 10 | _cwd = Path(__file__).resolve().parent 11 | 12 | 13 | class bdist_wheel(_bdist_wheel): # type: ignore[misc] 14 | def finalize_options(self) -> None: 15 | _bdist_wheel.finalize_options(self) 16 | self.root_is_pure = False 17 | 18 | 19 | class BinaryDistribution(Distribution): # type: ignore[misc] 20 | """Distribution which always forces a binary package with platform name""" 21 | 22 | def has_ext_modules(foo) -> bool: 23 | return True 24 | 25 | 26 | class InstallPlatlib(install): # type: ignore[misc] 27 | def finalize_options(self) -> None: 28 | """ 29 | Copy the shared libraries and header files into the wheel. Note that 30 | this will *only* check in `rtree/lib` and `include` rather than 31 | anywhere on the system so if you are building a wheel you *must* copy 32 | or symlink the `.so`/`.dll`/`.dylib` files into `rtree/lib` and 33 | `.h` into `rtree/include`. 34 | """ 35 | install.finalize_options(self) 36 | if self.distribution.has_ext_modules(): 37 | self.install_lib = self.install_platlib 38 | 39 | # source files to copy 40 | source_dir = _cwd / "rtree" 41 | 42 | # destination for the files in the build directory 43 | target_dir = Path(self.build_lib) / "rtree" 44 | 45 | # copy lib tree 46 | source_lib = source_dir / "lib" 47 | if source_lib.is_dir(): 48 | target_lib = target_dir / "lib" 49 | self.copy_tree(str(source_lib), str(target_lib)) 50 | 51 | # copy include tree 52 | source_include = source_dir / "include" 53 | if source_include.is_dir(): 54 | target_include = target_dir / "include" 55 | self.copy_tree(str(source_include), str(target_include)) 56 | 57 | 58 | # See pyproject.toml for other project metadata 59 | setup( 60 | name="rtree", 61 | distclass=BinaryDistribution, 62 | cmdclass={"bdist_wheel": bdist_wheel, "install": InstallPlatlib}, 63 | ) 64 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Toblerity/rtree/76656fd252d0c85dbef87bbd940160c40a3f114c/tests/__init__.py -------------------------------------------------------------------------------- /tests/boxes_15x15.data: -------------------------------------------------------------------------------- 1 | 34.3776829412 26.7375853734 49.3776829412 41.7375853734 2 | -51.7912278527 56.5716384064 -36.7912278527 71.5716384064 3 | -132.417278478 -96.7177218184 -117.417278478 -81.7177218184 4 | 19.9788779448 -53.1068061438 34.9788779448 -38.1068061438 5 | 50.9432853241 53.830194296 65.9432853241 68.830194296 6 | 114.777310066 -42.0534139041 129.777310066 -27.0534139041 7 | -80.5201136918 -60.5173650142 -65.5201136918 -45.5173650142 8 | -109.709042971 -88.8853631128 -94.7090429709 -73.8853631128 9 | 163.797701593 49.0535662325 178.797701593 64.0535662325 10 | 119.52474488 -47.8047995045 134.52474488 -32.8047995045 11 | -49.6358346107 25.7591536504 -34.6358346107 40.7591536504 12 | 43.1951329802 -61.7003551556 58.1951329802 -46.7003551556 13 | 5.07182469992 -32.9621617938 20.0718246999 -17.9621617938 14 | 157.392784956 -59.9967638674 172.392784956 -44.9967638674 15 | 169.761387556 77.3118040104 184.761387556 92.3118040104 16 | -90.9030625259 23.7969275036 -75.9030625259 38.7969275036 17 | 13.3161023563 35.5651016032 28.3161023563 50.5651016032 18 | -71.4124633746 -27.8098115487 -56.4124633746 -12.8098115487 19 | -101.490578923 40.5161619529 -86.4905789231 55.5161619529 20 | -22.5493804457 -9.48190527182 -7.54938044566 5.51809472818 21 | 22.7819453953 81.6043699778 37.7819453953 96.6043699778 22 | 163.851232856 52.6576397095 178.851232856 67.6576397095 23 | 8.7520267341 -82.9532179134 23.7520267341 -67.9532179134 24 | -25.1295517688 -52.9753074372 -10.1295517688 -37.9753074372 25 | 125.380855923 53.093317371 140.380855923 68.093317371 26 | -79.9963004315 -8.58901526761 -64.9963004315 6.41098473239 27 | -3.49476632412 -93.5592177527 11.5052336759 -78.5592177527 28 | 5.12311663372 38.9766284779 20.1231166337 53.9766284779 29 | -126.802193031 72.7620993955 -111.802193031 87.7620993955 30 | 144.816733092 33.8296664631 159.816733092 48.8296664631 31 | -124.187243051 30.4856075292 -109.187243051 45.4856075292 32 | 63.8011147852 -64.8232471563 78.8011147852 -49.8232471563 33 | 125.091625278 10.0243913301 140.091625278 25.0243913301 34 | -79.6265618345 37.4238531184 -64.6265618345 52.4238531184 35 | 84.0917344559 -61.9889564492 99.0917344559 -46.9889564492 36 | 44.1303873224 36.9948838398 59.1303873224 51.9948838398 37 | 57.579189376 -44.3308895399 72.579189376 -29.3308895399 38 | -135.915887605 -68.4604833795 -120.915887605 -53.4604833795 39 | -52.5931165731 -83.132095062 -37.5931165731 -68.132095062 40 | -3.66134703734 -24.6160151663 11.3386529627 -9.61601516627 41 | 50.9138603775 6.66349450637 65.9138603775 21.6634945064 42 | -59.0308862561 -28.7050068456 -44.0308862561 -13.7050068456 43 | 51.6601755093 -32.4794848001 66.6601755093 -17.4794848001 44 | -174.739939684 35.8453347176 -159.739939684 50.8453347176 45 | -107.905359545 -33.9905804035 -92.9053595447 -18.9905804035 46 | -43.8298865873 -38.8139629115 -28.8298865873 -23.8139629115 47 | -186.673789279 15.8707951216 -171.673789279 30.8707951216 48 | 13.0878151873 18.9267257542 28.0878151873 33.9267257542 49 | -19.7764534411 -15.1648038653 -4.7764534411 -0.16480386529 50 | -136.725385806 -62.3357813894 -121.725385806 -47.3357813894 51 | 56.3180682679 27.7748493606 71.3180682679 42.7748493606 52 | -117.234207271 -95.984091959 -102.234207271 -80.984091959 53 | -112.676334783 69.8614225716 -97.6763347829 84.8614225716 54 | 63.4481415226 49.5185084111 78.4481415226 64.5185084111 55 | -164.583933393 -24.3224792074 -149.583933393 -9.32247920738 56 | 29.8740632141 -94.4036564677 44.8740632141 -79.4036564677 57 | 111.222002785 27.3091348937 126.222002785 42.3091348937 58 | 153.388416036 -51.7982686059 168.388416036 -36.7982686059 59 | 101.187835391 -79.2096166175 116.187835391 -64.2096166175 60 | 88.5716895369 -0.592196575665 103.571689537 14.4078034243 61 | 121.697565289 -20.4740930579 136.697565289 -5.47409305786 62 | -57.6430699458 32.6596016791 -42.6430699458 47.6596016791 63 | -51.9988160106 -16.5263906642 -36.9988160106 -1.52639066423 64 | -128.45654531 40.0833021378 -113.45654531 55.0833021378 65 | 104.084274855 1.04302798395 119.084274855 16.0430279839 66 | -65.3078063084 52.8659272125 -50.3078063084 67.8659272125 67 | -185.575231871 0.603830128936 -170.575231871 15.6038301289 68 | -99.670852574 63.077063843 -84.670852574 78.077063843 69 | -97.5397037499 24.1544066414 -82.5397037499 39.1544066414 70 | 17.1213365558 80.8998469932 32.1213365558 95.8998469932 71 | -66.0514693697 -67.879371904 -51.0514693697 -52.879371904 72 | -165.624597131 -28.2121530482 -150.624597131 -13.2121530482 73 | -153.938620771 -22.5333324395 -138.938620771 -7.5333324395 74 | 108.059653776 -30.1015722619 123.059653776 -15.1015722619 75 | 66.3357992327 33.4460170804 81.3357992327 48.4460170804 76 | 122.051245261 62.1986667929 137.051245261 77.1986667929 77 | -9.14331797752 -4.94220638202 5.85668202248 10.057793618 78 | -6.21767716831 -37.4474638489 8.78232283169 -22.4474638489 79 | -10.2422235441 -36.7771789022 4.75777645591 -21.7771789022 80 | 151.39952872 5.78259379576 166.39952872 20.7825937958 81 | 53.0412866301 27.1060539476 68.0412866301 42.1060539476 82 | -179.969415049 -86.9431323167 -164.969415049 -71.9431323167 83 | -122.143517094 52.4812451482 -107.143517094 67.4812451482 84 | 126.651232891 -71.3593917404 141.651232891 -56.3593917404 85 | 35.5628371672 -44.4833782826 50.5628371672 -29.4833782826 86 | 106.338230585 74.4980976394 121.338230585 89.4980976394 87 | 2.49246106376 64.4571886404 17.4924610638 79.4571886404 88 | 26.9239556956 74.8154250821 41.9239556956 89.8154250821 89 | -145.467051901 -23.3901235678 -130.467051901 -8.39012356782 90 | -31.1747618493 -78.3450857919 -16.1747618493 -63.3450857919 91 | -45.6363494594 41.8549865381 -30.6363494594 56.8549865381 92 | -139.598628861 -76.0620586165 -124.598628861 -61.0620586165 93 | 75.3893757582 -96.3227872859 90.3893757582 -81.3227872859 94 | 66.4127845964 -29.3758752649 81.4127845964 -14.3758752649 95 | 71.002709831 5.93248532466 86.002709831 20.9324853247 96 | -166.73585749 -91.958750292 -151.73585749 -76.958750292 97 | -122.966652056 -44.5184865975 -107.966652056 -29.5184865975 98 | -114.787601823 -21.1179486167 -99.7876018227 -6.11794861667 99 | -37.7449906403 -70.1494304858 -22.7449906403 -55.1494304858 100 | 70.2802523802 34.6578320934 85.2802523802 49.6578320934 101 | -------------------------------------------------------------------------------- /tests/common.py: -------------------------------------------------------------------------------- 1 | """Common test functions.""" 2 | 3 | import pytest 4 | 5 | from rtree.core import rt 6 | 7 | sidx_version_string = rt.SIDX_Version().decode() 8 | sidx_version = tuple(map(int, sidx_version_string.split(".", maxsplit=3)[:3])) 9 | 10 | skip_sidx_lt_210 = pytest.mark.skipif(sidx_version < (2, 1, 0), reason="SIDX < 2.1.0") 11 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import shutil 5 | from collections.abc import Iterator 6 | 7 | import numpy 8 | import py 9 | import pytest 10 | 11 | from .common import sidx_version_string 12 | 13 | data_files = ["boxes_15x15.data"] 14 | 15 | 16 | @pytest.fixture(autouse=True) 17 | def temporary_working_directory(tmpdir: py.path.local) -> Iterator[None]: 18 | for filename in data_files: 19 | filename = os.path.join(os.path.dirname(__file__), filename) 20 | shutil.copy(filename, str(tmpdir)) 21 | with tmpdir.as_cwd(): 22 | yield 23 | 24 | 25 | def pytest_report_header(config): 26 | """Header for pytest.""" 27 | vers = [ 28 | f"SIDX version: {sidx_version_string}", 29 | f"NumPy version: {numpy.__version__}", 30 | ] 31 | return "\n".join(vers) 32 | -------------------------------------------------------------------------------- /tests/rungrind.dist: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | valgrind --tool=memcheck --leak-check=yes --suppressions=/home/sean/Projects/valgrind-python.supp python test_doctests.py 3 | -------------------------------------------------------------------------------- /tests/test_finder.py: -------------------------------------------------------------------------------- 1 | from ctypes import CDLL 2 | from pathlib import Path 3 | 4 | from rtree import finder 5 | 6 | 7 | def test_load(): 8 | lib = finder.load() 9 | assert isinstance(lib, CDLL) 10 | 11 | 12 | def test_get_include(): 13 | incl = finder.get_include() 14 | assert isinstance(incl, str) 15 | if incl: 16 | path = Path(incl) 17 | assert path.is_dir() 18 | assert (path / "spatialindex").is_dir() 19 | assert (path / "spatialindex" / "SpatialIndex.h").is_file() 20 | -------------------------------------------------------------------------------- /tests/test_index.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import ctypes 4 | import pickle 5 | import sys 6 | import tempfile 7 | import unittest 8 | from collections.abc import Iterator 9 | 10 | import numpy as np 11 | import pytest 12 | 13 | import rtree 14 | from rtree import core, index 15 | from rtree.exceptions import RTreeError 16 | 17 | from .common import skip_sidx_lt_210 18 | 19 | 20 | class IndexTestCase(unittest.TestCase): 21 | def setUp(self) -> None: 22 | self.boxes15 = np.genfromtxt("boxes_15x15.data") 23 | self.idx = index.Index() 24 | for i, coords in enumerate(self.boxes15): 25 | self.idx.add(i, coords) 26 | 27 | def boxes15_stream( 28 | self, interleaved: bool = True 29 | ) -> Iterator[tuple[int, tuple[float, float, float, float], int]]: 30 | boxes15 = np.genfromtxt("boxes_15x15.data") 31 | for i, (minx, miny, maxx, maxy) in enumerate(boxes15): 32 | if interleaved: 33 | yield (i, (minx, miny, maxx, maxy), 42) 34 | else: 35 | yield (i, (minx, maxx, miny, maxy), 42) 36 | 37 | def stream_basic(self) -> None: 38 | # some versions of libspatialindex screw up indexes on stream loading 39 | # so do a very simple index check 40 | rtree_test = rtree.index.Index( 41 | [(1564, [0, 0, 0, 10, 10, 10], None)], 42 | properties=rtree.index.Property(dimension=3), 43 | ) 44 | assert next(rtree_test.intersection([1, 1, 1, 2, 2, 2])) == 1564 45 | 46 | 47 | class IndexCount(unittest.TestCase): 48 | def setUp(self) -> None: 49 | self.boxes15 = np.genfromtxt("boxes_15x15.data") 50 | self.idx = index.Index() 51 | for i, coords in enumerate(self.boxes15): 52 | self.idx.add(i, coords) 53 | 54 | def test_len(self) -> None: 55 | self.assertEqual(len(self.idx), len(self.boxes15)) 56 | 57 | def test_get_size(self) -> None: 58 | with pytest.deprecated_call(): 59 | self.assertEqual(self.idx.get_size(), len(self.boxes15)) 60 | 61 | 62 | class IndexBounds(unittest.TestCase): 63 | def test_invalid_specifications(self) -> None: 64 | """Invalid specifications of bounds properly throw""" 65 | 66 | idx = index.Index() 67 | self.assertRaises(RTreeError, idx.add, None, (0.0, 0.0, -1.0, 1.0)) 68 | self.assertRaises(RTreeError, idx.intersection, (0.0, 0.0, -1.0, 1.0)) 69 | self.assertRaises(ctypes.ArgumentError, idx.add, None, (1, 1)) 70 | 71 | 72 | class IndexProperties(IndexTestCase): 73 | @pytest.mark.skipif( 74 | not hasattr(core.rt, "Index_GetResultSetOffset"), 75 | reason="Index_GetResultsSetOffset required in libspatialindex", 76 | ) 77 | def test_result_offset(self) -> None: 78 | idx = index.Rtree() 79 | idx.set_result_offset(3) 80 | self.assertEqual(idx.result_offset, 3) 81 | 82 | @pytest.mark.skipif( 83 | not hasattr(core.rt, "Index_GetResultSetLimit"), 84 | reason="Index_GetResultsSetOffset required in libspatialindex", 85 | ) 86 | def test_result_limit(self) -> None: 87 | idx = index.Rtree() 88 | idx.set_result_limit(44) 89 | self.assertEqual(idx.result_limit, 44) 90 | 91 | def test_invalid_properties(self) -> None: 92 | """Invalid values are guarded""" 93 | p = index.Property() 94 | 95 | self.assertRaises(RTreeError, p.set_buffering_capacity, -4321) 96 | self.assertRaises(RTreeError, p.set_region_pool_capacity, -4321) 97 | self.assertRaises(RTreeError, p.set_point_pool_capacity, -4321) 98 | self.assertRaises(RTreeError, p.set_index_pool_capacity, -4321) 99 | self.assertRaises(RTreeError, p.set_pagesize, -4321) 100 | self.assertRaises(RTreeError, p.set_index_capacity, -4321) 101 | self.assertRaises(RTreeError, p.set_storage, -4321) 102 | self.assertRaises(RTreeError, p.set_variant, -4321) 103 | self.assertRaises(RTreeError, p.set_dimension, -2) 104 | self.assertRaises(RTreeError, p.set_index_type, 6) 105 | self.assertRaises(RTreeError, p.get_index_id) 106 | 107 | def test_index_properties(self) -> None: 108 | """Setting index properties returns expected values""" 109 | idx = index.Rtree() 110 | p = index.Property() 111 | 112 | p.leaf_capacity = 100 113 | p.fill_factor = 0.5 114 | p.index_capacity = 10 115 | p.near_minimum_overlap_factor = 7 116 | p.buffering_capacity = 10 117 | p.variant = 0 118 | p.dimension = 3 119 | p.storage = 0 120 | p.pagesize = 4096 121 | p.index_pool_capacity = 1500 122 | p.point_pool_capacity = 1600 123 | p.region_pool_capacity = 1700 124 | p.tight_mbr = True 125 | p.overwrite = True 126 | p.writethrough = True 127 | p.tpr_horizon = 20.0 128 | p.reinsert_factor = 0.3 129 | p.idx_extension = "index" 130 | p.dat_extension = "data" 131 | 132 | idx = index.Index(properties=p) 133 | 134 | props = idx.properties 135 | self.assertEqual(props.leaf_capacity, 100) 136 | self.assertEqual(props.fill_factor, 0.5) 137 | self.assertEqual(props.index_capacity, 10) 138 | self.assertEqual(props.near_minimum_overlap_factor, 7) 139 | self.assertEqual(props.buffering_capacity, 10) 140 | self.assertEqual(props.variant, 0) 141 | self.assertEqual(props.dimension, 3) 142 | self.assertEqual(props.storage, 0) 143 | self.assertEqual(props.pagesize, 4096) 144 | self.assertEqual(props.index_pool_capacity, 1500) 145 | self.assertEqual(props.point_pool_capacity, 1600) 146 | self.assertEqual(props.region_pool_capacity, 1700) 147 | self.assertEqual(props.tight_mbr, True) 148 | self.assertEqual(props.overwrite, True) 149 | self.assertEqual(props.writethrough, True) 150 | self.assertEqual(props.tpr_horizon, 20.0) 151 | self.assertEqual(props.reinsert_factor, 0.3) 152 | self.assertEqual(props.idx_extension, "index") 153 | self.assertEqual(props.dat_extension, "data") 154 | 155 | 156 | class TestPickling(unittest.TestCase): 157 | # https://github.com/Toblerity/rtree/issues/87 158 | @pytest.mark.xfail 159 | def test_index(self) -> None: 160 | idx = rtree.index.Index() 161 | idx.insert(0, [0, 1, 2, 3], 4) 162 | unpickled = pickle.loads(pickle.dumps(idx)) 163 | self.assertNotEqual(idx.handle, unpickled.handle) 164 | self.assertEqual(idx.properties.as_dict(), unpickled.properties.as_dict()) 165 | self.assertEqual(idx.interleaved, unpickled.interleaved) 166 | self.assertEqual(len(idx), len(unpickled)) 167 | self.assertEqual(idx.bounds, unpickled.bounds) 168 | a = next(idx.intersection(idx.bounds, objects=True)) 169 | b = next(unpickled.intersection(unpickled.bounds, objects=True)) 170 | self.assertEqual(a.id, b.id) 171 | self.assertEqual(a.bounds, b.bounds) 172 | self.assertEqual(a.object, b.object) 173 | 174 | def test_property(self) -> None: 175 | p = rtree.index.Property() 176 | unpickled = pickle.loads(pickle.dumps(p)) 177 | self.assertNotEqual(p.handle, unpickled.handle) 178 | self.assertEqual(p.as_dict(), unpickled.as_dict()) 179 | 180 | 181 | class IndexContainer(IndexTestCase): 182 | def test_container(self) -> None: 183 | """rtree.index.RtreeContainer works as expected""" 184 | 185 | container = rtree.index.RtreeContainer() 186 | objects = list() 187 | 188 | for coordinates in self.boxes15: 189 | objects.append(object()) 190 | container.insert(objects[-1], coordinates) 191 | 192 | self.assertEqual(len(container), len(self.boxes15)) 193 | assert all(obj in container for obj in objects) 194 | 195 | for obj, coordinates in zip(objects, self.boxes15[:5]): 196 | container.delete(obj, coordinates) 197 | 198 | assert all(obj in container for obj in objects[5:]) 199 | assert all(obj not in container for obj in objects[:5]) 200 | assert len(container) == len(self.boxes15) - 5 201 | 202 | with pytest.raises(IndexError): 203 | container.delete(objects[0], self.boxes15[0]) 204 | 205 | # Insert duplicate object, at different location 206 | container.insert(objects[5], self.boxes15[0]) 207 | assert objects[5] in container 208 | # And then delete it, but check object still present 209 | container.delete(objects[5], self.boxes15[0]) 210 | assert objects[5] in container 211 | 212 | # Intersection 213 | obj = objects[10] 214 | results = container.intersection(self.boxes15[10]) 215 | assert obj in results 216 | 217 | # Intersection with bbox 218 | obj = objects[10] 219 | results = container.intersection(self.boxes15[10], bbox=True) 220 | result = [result for result in results if result.object is obj][0] 221 | assert np.array_equal(result.bbox, self.boxes15[10]) 222 | 223 | # Nearest 224 | obj = objects[8] 225 | results = container.intersection(self.boxes15[8]) 226 | assert obj in results 227 | 228 | # Nearest with bbox 229 | obj = objects[8] 230 | results = container.nearest(self.boxes15[8], bbox=True) 231 | result = [result for result in results if result.object is obj][0] 232 | assert np.array_equal(result.bbox, self.boxes15[8]) 233 | 234 | # Test iter method 235 | assert objects[12] in set(container) 236 | 237 | 238 | class IndexIntersection(IndexTestCase): 239 | def test_intersection(self) -> None: 240 | """Test basic insertion and retrieval""" 241 | 242 | self.assertTrue(0 in self.idx.intersection((0, 0, 60, 60))) 243 | hits = list(self.idx.intersection((0, 0, 60, 60))) 244 | 245 | self.assertEqual(len(hits), 10) 246 | self.assertEqual(hits, [0, 4, 16, 27, 35, 40, 47, 50, 76, 80]) 247 | 248 | def test_objects(self) -> None: 249 | """Test insertion of objects""" 250 | 251 | idx = index.Index() 252 | for i, coords in enumerate(self.boxes15): 253 | idx.add(i, coords) 254 | idx.insert( 255 | 4321, (34.3776829412, 26.7375853734, 49.3776829412, 41.7375853734), obj=42 256 | ) 257 | hits = idx.intersection((0, 0, 60, 60), objects=True) 258 | hit = [h for h in hits if h.id == 4321][0] 259 | self.assertEqual(hit.id, 4321) 260 | self.assertEqual(hit.object, 42) 261 | box = [f"{t:.10f}" for t in hit.bbox] 262 | expected = ["34.3776829412", "26.7375853734", "49.3776829412", "41.7375853734"] 263 | self.assertEqual(box, expected) 264 | 265 | def test_double_insertion(self) -> None: 266 | """Inserting the same id twice does not overwrite data""" 267 | idx = index.Index() 268 | idx.add(1, (2, 2)) 269 | idx.add(1, (3, 3)) 270 | 271 | self.assertEqual([1, 1], list(idx.intersection((0, 0, 5, 5)))) 272 | 273 | @skip_sidx_lt_210 274 | def test_intersection_v(self) -> None: 275 | mins = np.array([[0, 1]] * 2).T 276 | maxs = np.array([[60, 50]] * 2).T 277 | ret = self.idx.intersection_v(mins, maxs) 278 | assert type(ret) is tuple 279 | ids, counts = ret 280 | assert ids.dtype == np.int64 281 | ids0 = [0, 4, 16, 27, 35, 40, 47, 50, 76, 80] 282 | ids1 = [0, 16, 27, 35, 47, 76] 283 | assert ids.tolist() == ids0 + ids1 284 | assert counts.dtype == np.uint64 285 | assert counts.tolist() == [len(ids0), len(ids1)] 286 | 287 | # errors 288 | with pytest.raises(ValueError, match="must have 2 dimensions"): 289 | self.idx.intersection_v(np.ones((2, 3, 4)), 4) 290 | with pytest.raises(ValueError, match="shapes not equal"): 291 | self.idx.intersection_v([0], [10, 12]) 292 | 293 | 294 | class TestIndexIntersectionUnion: 295 | @pytest.fixture(scope="class") 296 | def index_a_interleaved(self) -> index.Index: 297 | idx = index.Index(interleaved=True) 298 | idx.insert(1, (3, 3, 5, 5), "a_1") 299 | idx.insert(2, (4, 2, 6, 4), "a_2") 300 | return idx 301 | 302 | @pytest.fixture(scope="class") 303 | def index_a_uninterleaved(self) -> index.Index: 304 | idx = index.Index(interleaved=False) 305 | idx.insert(1, (3, 5, 3, 5), "a_1") 306 | idx.insert(2, (4, 6, 2, 4), "a_2") 307 | return idx 308 | 309 | @pytest.fixture(scope="class") 310 | def index_b_interleaved(self) -> index.Index: 311 | idx = index.Index(interleaved=True) 312 | idx.insert(3, (2, 1, 7, 6), "b_3") 313 | idx.insert(4, (8, 7, 9, 8), "b_4") 314 | return idx 315 | 316 | @pytest.fixture(scope="class") 317 | def index_b_uninterleaved(self) -> index.Index: 318 | idx = index.Index(interleaved=False) 319 | idx.insert(3, (2, 7, 1, 6), "b_3") 320 | idx.insert(4, (8, 9, 7, 8), "b_4") 321 | return idx 322 | 323 | def test_intersection_interleaved( 324 | self, index_a_interleaved: index.Index, index_b_interleaved: index.Index 325 | ) -> None: 326 | index_c_interleaved = index_a_interleaved & index_b_interleaved 327 | assert index_c_interleaved.interleaved 328 | assert len(index_c_interleaved) == 2 329 | for hit in index_c_interleaved.intersection( 330 | index_c_interleaved.bounds, objects=True 331 | ): 332 | if hit.bbox == [3.0, 3.0, 5.0, 5.0]: 333 | assert hit.object == ("a_1", "b_3") 334 | elif hit.bbox == [4.0, 2.0, 6.0, 4.0]: 335 | assert hit.object == ("a_2", "b_3") 336 | else: 337 | assert False 338 | 339 | @skip_sidx_lt_210 340 | def test_intersection_v_interleaved( 341 | self, index_a_interleaved: index.Index, index_b_interleaved: index.Index 342 | ) -> None: 343 | index_c_interleaved = index_a_interleaved & index_b_interleaved 344 | mins = index_c_interleaved.bounds[0:2] 345 | maxs = index_c_interleaved.bounds[2:4] 346 | idxs, counts = index_c_interleaved.intersection_v(mins, maxs) 347 | assert idxs.tolist() == [0, 1] 348 | assert counts.tolist() == [2] 349 | 350 | def test_intersection_uninterleaved( 351 | self, index_a_uninterleaved: index.Index, index_b_uninterleaved: index.Index 352 | ) -> None: 353 | index_c_uninterleaved = index_a_uninterleaved & index_b_uninterleaved 354 | assert not index_c_uninterleaved.interleaved 355 | assert len(index_c_uninterleaved) == 2 356 | for hit in index_c_uninterleaved.intersection( 357 | index_c_uninterleaved.bounds, objects=True 358 | ): 359 | if hit.bounds == [3.0, 5.0, 3.0, 5.0]: 360 | assert hit.object == ("a_1", "b_3") 361 | elif hit.bounds == [4.0, 6.0, 2.0, 4.0]: 362 | assert hit.object == ("a_2", "b_3") 363 | else: 364 | assert False 365 | 366 | @skip_sidx_lt_210 367 | def test_intersection_v_uninterleaved( 368 | self, index_a_uninterleaved: index.Index, index_b_uninterleaved: index.Index 369 | ) -> None: 370 | index_c_uninterleaved = index_a_uninterleaved & index_b_uninterleaved 371 | mins = index_c_uninterleaved.bounds[0::2] 372 | maxs = index_c_uninterleaved.bounds[1::2] 373 | idxs, counts = index_c_uninterleaved.intersection_v(mins, maxs) 374 | assert idxs.tolist() == [0, 1] 375 | assert counts.tolist() == [2] 376 | 377 | def test_intersection_mismatch( 378 | self, index_a_interleaved: index.Index, index_b_uninterleaved: index.Index 379 | ) -> None: 380 | with pytest.raises(AssertionError): 381 | index_a_interleaved & index_b_uninterleaved 382 | 383 | def test_union_interleaved( 384 | self, index_a_interleaved: index.Index, index_b_interleaved: index.Index 385 | ) -> None: 386 | index_c_interleaved = index_a_interleaved | index_b_interleaved 387 | assert index_c_interleaved.interleaved 388 | assert len(index_c_interleaved) == 4 389 | for hit in index_c_interleaved.intersection( 390 | index_c_interleaved.bounds, objects=True 391 | ): 392 | if hit.bbox == [3.0, 3.0, 5.0, 5.0]: 393 | assert hit.object == "a_1" 394 | elif hit.bbox == [4.0, 2.0, 6.0, 4.0]: 395 | assert hit.object == "a_2" 396 | elif hit.bbox == [2.0, 1.0, 7.0, 6.0]: 397 | assert hit.object == "b_3" 398 | elif hit.bbox == [8.0, 7.0, 9.0, 8.0]: 399 | assert hit.object == "b_4" 400 | else: 401 | assert False 402 | 403 | def test_union_uninterleaved( 404 | self, index_a_uninterleaved: index.Index, index_b_uninterleaved: index.Index 405 | ) -> None: 406 | index_c_uninterleaved = index_a_uninterleaved | index_b_uninterleaved 407 | assert not index_c_uninterleaved.interleaved 408 | assert len(index_c_uninterleaved) == 4 409 | for hit in index_c_uninterleaved.intersection( 410 | index_c_uninterleaved.bounds, objects=True 411 | ): 412 | if hit.bounds == [3.0, 5.0, 3.0, 5.0]: 413 | assert hit.object == "a_1" 414 | elif hit.bounds == [4.0, 6.0, 2.0, 4.0]: 415 | assert hit.object == "a_2" 416 | elif hit.bounds == [2.0, 7.0, 1.0, 6.0]: 417 | assert hit.object == "b_3" 418 | elif hit.bounds == [8.0, 9.0, 7.0, 8.0]: 419 | assert hit.object == "b_4" 420 | else: 421 | assert False 422 | 423 | def test_union_mismatch( 424 | self, index_a_interleaved: index.Index, index_b_uninterleaved: index.Index 425 | ) -> None: 426 | with pytest.raises(AssertionError): 427 | index_a_interleaved | index_b_uninterleaved 428 | 429 | 430 | class IndexSerialization(unittest.TestCase): 431 | def setUp(self) -> None: 432 | self.boxes15 = np.genfromtxt("boxes_15x15.data") 433 | 434 | def boxes15_stream( 435 | self, interleaved: bool = True 436 | ) -> Iterator[tuple[int, tuple[float, float, float, float], int]]: 437 | for i, (minx, miny, maxx, maxy) in enumerate(self.boxes15): 438 | if interleaved: 439 | yield (i, (minx, miny, maxx, maxy), 42) 440 | else: 441 | yield (i, (minx, maxx, miny, maxy), 42) 442 | 443 | def test_unicode_filenames(self) -> None: 444 | """Unicode filenames work as expected""" 445 | tname = tempfile.mktemp() 446 | filename = tname + "gilename\u4500abc" 447 | idx = index.Index(filename) 448 | idx.insert( 449 | 4321, (34.3776829412, 26.7375853734, 49.3776829412, 41.7375853734), obj=42 450 | ) 451 | 452 | def test_pickling(self) -> None: 453 | """Pickling works as expected""" 454 | 455 | idx = index.Index() 456 | import json 457 | 458 | some_data = {"a": 22, "b": [1, "ccc"]} 459 | 460 | # https://github.com/python/mypy/issues/2427 461 | idx.dumps = lambda obj: json.dumps(obj).encode( # type: ignore[assignment] 462 | "utf-8" 463 | ) 464 | idx.loads = lambda string: json.loads( # type: ignore[assignment] 465 | string.decode("utf-8") 466 | ) 467 | 468 | idx.add(0, (0, 0, 1, 1), some_data) 469 | 470 | self.assertEqual(list(idx.nearest((0, 0), 1, objects="raw"))[0], some_data) 471 | 472 | def test_custom_filenames(self) -> None: 473 | """Test using custom filenames for index serialization""" 474 | p = index.Property() 475 | p.dat_extension = "data" 476 | p.idx_extension = "index" 477 | tname = tempfile.mktemp() 478 | idx = index.Index(tname, properties=p) 479 | for i, coords in enumerate(self.boxes15): 480 | idx.add(i, coords) 481 | 482 | hits = list(idx.intersection((0, 0, 60, 60))) 483 | self.assertEqual(len(hits), 10) 484 | self.assertEqual(hits, [0, 4, 16, 27, 35, 40, 47, 50, 76, 80]) 485 | del idx 486 | 487 | # Check we can reopen the index and get the same results 488 | idx2 = index.Index(tname, properties=p) 489 | hits = list(idx2.intersection((0, 0, 60, 60))) 490 | self.assertEqual(len(hits), 10) 491 | self.assertEqual(hits, [0, 4, 16, 27, 35, 40, 47, 50, 76, 80]) 492 | 493 | @pytest.mark.skipif(not sys.maxsize > 2**32, reason="Fails on 32bit systems") 494 | def test_interleaving(self) -> None: 495 | """Streaming against a persisted index without interleaving""" 496 | 497 | def data_gen( 498 | interleaved: bool = True, 499 | ) -> Iterator[tuple[int, tuple[float, float, float, float], int]]: 500 | for i, (minx, miny, maxx, maxy) in enumerate(self.boxes15): 501 | if interleaved: 502 | yield (i, (minx, miny, maxx, maxy), 42) 503 | else: 504 | yield (i, (minx, maxx, miny, maxy), 42) 505 | 506 | p = index.Property() 507 | tname = tempfile.mktemp() 508 | idx = index.Index( 509 | tname, data_gen(interleaved=False), properties=p, interleaved=False 510 | ) 511 | hits1 = sorted(list(idx.intersection((0, 60, 0, 60)))) 512 | self.assertEqual(len(hits1), 10) 513 | self.assertEqual(hits1, [0, 4, 16, 27, 35, 40, 47, 50, 76, 80]) 514 | 515 | leaves = idx.leaves() 516 | expected = [ 517 | ( 518 | 0, 519 | [ 520 | 2, 521 | 92, 522 | 51, 523 | 55, 524 | 26, 525 | 95, 526 | 7, 527 | 81, 528 | 38, 529 | 22, 530 | 58, 531 | 89, 532 | 91, 533 | 83, 534 | 98, 535 | 37, 536 | 70, 537 | 31, 538 | 49, 539 | 34, 540 | 11, 541 | 6, 542 | 13, 543 | 3, 544 | 23, 545 | 57, 546 | 9, 547 | 96, 548 | 84, 549 | 36, 550 | 5, 551 | 45, 552 | 77, 553 | 78, 554 | 44, 555 | 12, 556 | 42, 557 | 73, 558 | 93, 559 | 41, 560 | 71, 561 | 17, 562 | 39, 563 | 54, 564 | 88, 565 | 72, 566 | 97, 567 | 60, 568 | 62, 569 | 48, 570 | 19, 571 | 25, 572 | 76, 573 | 59, 574 | 66, 575 | 64, 576 | 79, 577 | 94, 578 | 40, 579 | 32, 580 | 46, 581 | 47, 582 | 15, 583 | 68, 584 | 10, 585 | 0, 586 | 80, 587 | 56, 588 | 50, 589 | 30, 590 | ], 591 | [-186.673789279, -96.7177218184, 172.392784956, 45.4856075292], 592 | ), 593 | ( 594 | 2, 595 | [ 596 | 61, 597 | 74, 598 | 29, 599 | 99, 600 | 16, 601 | 43, 602 | 35, 603 | 33, 604 | 27, 605 | 63, 606 | 18, 607 | 90, 608 | 8, 609 | 53, 610 | 82, 611 | 21, 612 | 65, 613 | 24, 614 | 4, 615 | 1, 616 | 75, 617 | 67, 618 | 86, 619 | 52, 620 | 28, 621 | 85, 622 | 87, 623 | 14, 624 | 69, 625 | 20, 626 | ], 627 | [-174.739939684, 32.6596016791, 184.761387556, 96.6043699778], 628 | ), 629 | ] 630 | 631 | # go through the traversal and see if everything is close 632 | assert all( 633 | all(np.allclose(a, b) for a, b in zip(L, E)) # type: ignore 634 | for L, E in zip(leaves, expected) 635 | ) 636 | 637 | hits2 = sorted(list(idx.intersection((0, 60, 0, 60), objects=True))) 638 | self.assertEqual(len(hits2), 10) 639 | self.assertEqual(hits2[0].object, 42) 640 | 641 | def test_overwrite(self) -> None: 642 | """Index overwrite works as expected""" 643 | tname = tempfile.mktemp() 644 | 645 | idx = index.Index(tname) 646 | del idx 647 | idx = index.Index(tname, overwrite=True) 648 | assert isinstance(idx, index.Index) 649 | 650 | 651 | class IndexNearest(IndexTestCase): 652 | def test_nearest_basic(self) -> None: 653 | """Test nearest basic selection of records""" 654 | hits = list(self.idx.nearest((0, 0, 10, 10), 3)) 655 | self.assertEqual(hits, [76, 48, 19]) 656 | 657 | idx = index.Index() 658 | locs = [(2, 4), (6, 8), (10, 12), (11, 13), (15, 17), (13, 20)] 659 | for i, (start, stop) in enumerate(locs): 660 | idx.add(i, (start, 1, stop, 1)) 661 | hits = sorted(idx.nearest((13, 0, 20, 2), 3)) 662 | self.assertEqual(hits, [3, 4, 5]) 663 | 664 | @skip_sidx_lt_210 665 | def test_nearest_v_basic(self) -> None: 666 | mins = np.array([[0, 5]] * 2).T 667 | maxs = np.array([[10, 15]] * 2).T 668 | ret = self.idx.nearest_v(mins, maxs, num_results=3) 669 | assert type(ret) is tuple 670 | ids, counts = ret 671 | assert ids.dtype == np.int64 672 | ids0 = [76, 48, 19] 673 | ids1 = [76, 47, 48] 674 | assert ids.tolist() == ids0 + ids1 675 | assert counts.dtype == np.uint64 676 | assert counts.tolist() == [3, 3] 677 | 678 | ret = self.idx.nearest_v(mins, maxs, num_results=3, return_max_dists=True) 679 | assert type(ret) is tuple 680 | ids, counts, max_dists = ret 681 | assert ids.tolist() == ids0 + ids1 682 | assert counts.tolist() == [3, 3] 683 | assert max_dists.dtype == np.float64 684 | np.testing.assert_allclose(max_dists, [7.54938045, 11.05686397]) 685 | 686 | ret = self.idx.nearest_v( 687 | mins, maxs, num_results=3, max_dists=[10, 10], return_max_dists=True 688 | ) 689 | ids, counts, max_dists = ret 690 | assert ids.tolist() == ids0 + ids1[:2] 691 | assert counts.tolist() == [3, 2] 692 | np.testing.assert_allclose(max_dists, [7.54938045, 3.92672575]) 693 | 694 | # errors 695 | with pytest.raises(ValueError, match="must have 2 dimensions"): 696 | self.idx.nearest_v(np.ones((2, 3, 4)), 4) 697 | with pytest.raises(ValueError, match="shapes not equal"): 698 | self.idx.nearest_v([0], [10, 12]) 699 | with pytest.raises(ValueError, match="max_dists must have 1 dimension"): 700 | self.idx.nearest_v(maxs, mins, max_dists=[[10]]) 701 | with pytest.raises(ValueError, match="max_dists must have length 2"): 702 | self.idx.nearest_v(maxs, mins, max_dists=[10]) 703 | 704 | def test_nearest_equidistant(self) -> None: 705 | """Test that if records are equidistant, both are returned.""" 706 | point = (0, 0) 707 | small_box = (-10, -10, 10, 10) 708 | large_box = (-50, -50, 50, 50) 709 | 710 | idx = index.Index() 711 | idx.insert(0, small_box) 712 | idx.insert(1, large_box) 713 | self.assertEqual(list(idx.nearest(point, 2)), [0, 1]) 714 | self.assertEqual(list(idx.nearest(point, 1)), [0, 1]) 715 | 716 | idx.insert(2, (0, 0)) 717 | self.assertEqual(list(idx.nearest(point, 2)), [0, 1, 2]) 718 | self.assertEqual(list(idx.nearest(point, 1)), [0, 1, 2]) 719 | 720 | idx = index.Index() 721 | idx.insert(0, small_box) 722 | idx.insert(1, large_box) 723 | idx.insert(2, (50, 50)) # point on top right vertex of large_box 724 | point = (51, 51) # right outside of large_box 725 | self.assertEqual(list(idx.nearest(point, 2)), [1, 2]) 726 | self.assertEqual(list(idx.nearest(point, 1)), [1, 2]) 727 | 728 | idx = index.Index() 729 | idx.insert(0, small_box) 730 | idx.insert(1, large_box) 731 | # point right outside on top right vertex of large_box 732 | idx.insert(2, (51, 51)) 733 | point = (51, 52) # shifted 1 unit up from the point above 734 | self.assertEqual(list(idx.nearest(point, 2)), [2, 1]) 735 | self.assertEqual(list(idx.nearest(point, 1)), [2]) 736 | 737 | def test_nearest_object(self) -> None: 738 | """Test nearest object selection of records""" 739 | idx = index.Index() 740 | locs = [(14, 10, 14, 10), (16, 10, 16, 10)] 741 | for i, (minx, miny, maxx, maxy) in enumerate(locs): 742 | idx.add(i, (minx, miny, maxx, maxy), obj={"a": 42}) 743 | 744 | hits = sorted( 745 | (i.id, i.object) for i in idx.nearest((15, 10, 15, 10), 1, objects=True) 746 | ) 747 | self.assertEqual(hits, [(0, {"a": 42}), (1, {"a": 42})]) 748 | 749 | 750 | class IndexDelete(IndexTestCase): 751 | def test_deletion(self) -> None: 752 | """Test we can delete data from the index""" 753 | idx = index.Index() 754 | for i, coords in enumerate(self.boxes15): 755 | idx.add(i, coords) 756 | 757 | for i, coords in enumerate(self.boxes15): 758 | idx.delete(i, coords) 759 | 760 | hits = list(idx.intersection((0, 0, 60, 60))) 761 | self.assertEqual(hits, []) 762 | 763 | 764 | class Index3d(IndexTestCase): 765 | """Test we make and query a 3D index""" 766 | 767 | def setUp(self) -> None: 768 | p = index.Property() 769 | p.dimension = 3 770 | self.idx = index.Index(properties=p, interleaved=False) 771 | self.idx.insert(1, (0, 0, 60, 60, 22, 22.0)) 772 | self.coords = (-1, 1, 58, 62, 22, 24) 773 | 774 | def test_intersection(self) -> None: 775 | hits = self.idx.intersection(self.coords) 776 | self.assertEqual(list(hits), [1]) 777 | 778 | @skip_sidx_lt_210 779 | def test_intersection_v(self) -> None: 780 | idxs, counts = self.idx.intersection_v(self.coords[0::2], self.coords[1::2]) 781 | assert idxs.tolist() == [1] 782 | assert counts.tolist() == [1] 783 | 784 | 785 | class Index4d(IndexTestCase): 786 | """Test we make and query a 4D index""" 787 | 788 | def setUp(self) -> None: 789 | p = index.Property() 790 | p.dimension = 4 791 | self.idx = index.Index(properties=p, interleaved=False) 792 | self.idx.insert(1, (0, 0, 60, 60, 22, 22.0, 128, 142)) 793 | self.coords = (-1, 1, 58, 62, 22, 24, 120, 150) 794 | 795 | def test_intersection(self) -> None: 796 | hits = self.idx.intersection(self.coords) 797 | self.assertEqual(list(hits), [1]) 798 | 799 | @skip_sidx_lt_210 800 | def test_intersection_v(self) -> None: 801 | idxs, counts = self.idx.intersection_v(self.coords[0::2], self.coords[1::2]) 802 | assert idxs.tolist() == [1] 803 | assert counts.tolist() == [1] 804 | 805 | 806 | class IndexStream(IndexTestCase): 807 | def test_stream_input(self) -> None: 808 | p = index.Property() 809 | sindex = index.Index(self.boxes15_stream(), properties=p) 810 | bounds = (0, 0, 60, 60) 811 | hits = sindex.intersection(bounds) 812 | self.assertEqual(sorted(hits), [0, 4, 16, 27, 35, 40, 47, 50, 76, 80]) 813 | objects = list(sindex.intersection((0, 0, 60, 60), objects=True)) 814 | 815 | self.assertEqual(len(objects), 10) 816 | self.assertEqual(objects[0].object, 42) 817 | 818 | def test_empty_stream(self) -> None: 819 | """Assert empty stream raises exception""" 820 | self.assertRaises(RTreeError, index.Index, iter(())) 821 | 822 | def test_exception_in_generator(self) -> None: 823 | """Assert exceptions raised in callbacks are raised in main thread""" 824 | 825 | class TestException(Exception): 826 | pass 827 | 828 | def create_index() -> index.Index: 829 | def gen() -> Iterator[tuple[int, tuple[int, int, int, int], None]]: 830 | # insert at least 6 or so before the exception 831 | for i in range(10): 832 | yield (i, (1, 2, 3, 4), None) 833 | raise TestException("raising here") 834 | 835 | return index.Index(gen()) 836 | 837 | self.assertRaises(TestException, create_index) 838 | 839 | def test_exception_at_beginning_of_generator(self) -> None: 840 | """ 841 | Assert exceptions raised in callbacks before generator 842 | function are raised in main thread. 843 | """ 844 | 845 | class TestException(Exception): 846 | pass 847 | 848 | def create_index() -> index.Index: 849 | def gen() -> None: 850 | raise TestException("raising here") 851 | 852 | return index.Index(gen()) # type: ignore[func-returns-value] 853 | 854 | self.assertRaises(TestException, create_index) 855 | 856 | 857 | class DictStorage(index.CustomStorage): 858 | """A simple storage which saves the pages in a python dictionary""" 859 | 860 | def __init__(self) -> None: 861 | index.CustomStorage.__init__(self) 862 | self.clear() 863 | 864 | def create(self, returnError): 865 | """Called when the storage is created on the C side""" 866 | 867 | def destroy(self, returnError): 868 | """Called when the storage is destroyed on the C side""" 869 | 870 | def clear(self) -> None: 871 | """Clear all our data""" 872 | self.dict: dict = {} 873 | 874 | def loadByteArray(self, page, returnError): 875 | """Returns the data for page or returns an error""" 876 | try: 877 | return self.dict[page] 878 | except KeyError: 879 | returnError.contents.value = self.InvalidPageError 880 | 881 | def storeByteArray(self, page, data, returnError): 882 | """Stores the data for page""" 883 | if page == self.NewPage: 884 | newPageId = len(self.dict) 885 | self.dict[newPageId] = data 886 | return newPageId 887 | else: 888 | if page not in self.dict: 889 | returnError.value = self.InvalidPageError 890 | return 0 891 | self.dict[page] = data 892 | return page 893 | 894 | def deleteByteArray(self, page, returnError): 895 | """Deletes a page""" 896 | try: 897 | del self.dict[page] 898 | except KeyError: 899 | returnError.contents.value = self.InvalidPageError 900 | 901 | hasData = property(lambda self: bool(self.dict)) 902 | """ Returns true if we contains some data """ 903 | 904 | 905 | class IndexCustomStorage(unittest.TestCase): 906 | def test_custom_storage(self) -> None: 907 | """Custom index storage works as expected""" 908 | settings = index.Property() 909 | settings.writethrough = True 910 | settings.buffering_capacity = 1 911 | 912 | # Notice that there is a small in-memory buffer by default. 913 | # We effectively disable it here so our storage directly receives 914 | # any load/store/delete calls. 915 | # This is not necessary in general and can hamper performance; 916 | # we just use it here for illustrative and testing purposes. 917 | 918 | storage = DictStorage() 919 | r = index.Index(storage, properties=settings) 920 | 921 | # Interestingly enough, if we take a look at the contents of our 922 | # storage now, we can see the Rtree has already written two pages 923 | # to it. This is for header and index. 924 | 925 | state1 = storage.dict.copy() 926 | self.assertEqual(list(state1.keys()), [0, 1]) 927 | 928 | r.add(123, (0, 0, 1, 1)) 929 | 930 | state2 = storage.dict.copy() 931 | self.assertNotEqual(state1, state2) 932 | 933 | item = list(r.nearest((0, 0), 1, objects=True))[0] 934 | self.assertEqual(item.id, 123) 935 | self.assertTrue(r.valid()) 936 | self.assertTrue(isinstance(list(storage.dict.values())[0], bytes)) 937 | 938 | r.delete(123, (0, 0, 1, 1)) 939 | self.assertTrue(r.valid()) 940 | 941 | r.clearBuffer() 942 | self.assertTrue(r.valid()) 943 | 944 | del r 945 | 946 | storage.clear() 947 | self.assertFalse(storage.hasData) 948 | 949 | del storage 950 | 951 | def test_custom_storage_reopening(self) -> None: 952 | """Reopening custom index storage works as expected""" 953 | 954 | storage = DictStorage() 955 | r1 = index.Index(storage, overwrite=True) 956 | r1.add(555, (2, 2)) 957 | del r1 958 | self.assertTrue(storage.hasData) 959 | 960 | r2 = index.Index(storage, overwrite=False) 961 | count = r2.count((0, 0, 10, 10)) 962 | self.assertEqual(count, 1) 963 | -------------------------------------------------------------------------------- /tests/test_tpr.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import unittest 5 | from collections import defaultdict, namedtuple 6 | from collections.abc import Iterator 7 | from math import ceil 8 | from typing import Any 9 | 10 | import numpy as np 11 | from numpy.random import default_rng 12 | 13 | from rtree.index import Index, Property, RT_TPRTree 14 | 15 | 16 | class Cartesian( 17 | namedtuple( 18 | "Cartesian", 19 | ("id", "time", "x", "y", "x_vel", "y_vel", "update_time", "out_of_bounds"), 20 | ) 21 | ): 22 | __slots__ = () 23 | 24 | def getX(self, t: float) -> float: 25 | return self.x + self.x_vel * (t - self.time) 26 | 27 | def getY(self, t: float) -> float: 28 | return self.y + self.y_vel * (t - self.time) 29 | 30 | def getXY(self, t: float) -> tuple[float, float]: 31 | return self.getX(t), self.getY(t) 32 | 33 | def get_coordinates( 34 | self, t_now: float | None = None 35 | ) -> tuple[ 36 | tuple[float, float, float, float], 37 | tuple[float, float, float, float], 38 | float | tuple[float, float], 39 | ]: 40 | return ( 41 | (self.x, self.y, self.x, self.y), 42 | (self.x_vel, self.y_vel, self.x_vel, self.y_vel), 43 | self.time if t_now is None else (self.time, t_now), 44 | ) 45 | 46 | 47 | class QueryCartesian( 48 | namedtuple("QueryCartesian", ("start_time", "end_time", "x", "y", "dx", "dy")) 49 | ): 50 | __slots__ = () 51 | 52 | def get_coordinates( 53 | self, 54 | ) -> tuple[ 55 | tuple[float, float, float, float], 56 | tuple[float, float, float, float], 57 | tuple[float, float], 58 | ]: 59 | return ( 60 | (self.x - self.dx, self.y - self.dy, self.x + self.dx, self.y + self.dy), 61 | (0, 0, 0, 0), 62 | (self.start_time, self.end_time), 63 | ) 64 | 65 | 66 | def data_generator( 67 | dataset_size: int = 100, 68 | simulation_length: int = 10, 69 | max_update_interval: int = 20, 70 | queries_per_time_step: int = 5, 71 | min_query_extent: float = 0.05, 72 | max_query_extent: float = 0.1, 73 | horizon: int = 20, 74 | min_query_interval: int = 2, 75 | max_query_interval: int = 10, 76 | agility: float = 0.01, 77 | min_speed: float = 0.0025, 78 | max_speed: float = 0.0166, 79 | min_x: int = 0, 80 | min_y: int = 0, 81 | max_x: int = 1, 82 | max_y: int = 1, 83 | ) -> Iterator[tuple[str, int, Any]]: 84 | rng = default_rng() 85 | 86 | def create_object( 87 | id_: float, time: float, x: float | None = None, y: float | None = None 88 | ) -> Cartesian: 89 | # Create object with random or defined x, y and random velocity 90 | if x is None: 91 | x = rng.uniform(min_x, max_x) 92 | if y is None: 93 | y = rng.uniform(min_y, max_y) 94 | speed = rng.uniform(min_speed, max_speed) 95 | angle = rng.uniform(-np.pi, np.pi) 96 | x_vel, y_vel = speed * np.cos(angle), speed * np.sin(angle) 97 | 98 | # Set update time for when out of bounds, or max interval 99 | for dt in range(1, max_update_interval + 1): 100 | if not (0 < x + x_vel * dt < max_x and 0 < y + y_vel * dt < max_y): 101 | out_of_bounds = True 102 | update_time = time + dt 103 | break 104 | else: 105 | out_of_bounds = False 106 | update_time = time + max_update_interval 107 | 108 | return Cartesian(id_, time, x, y, x_vel, y_vel, update_time, out_of_bounds) 109 | 110 | objects = list() 111 | objects_to_update = defaultdict(set) 112 | for id_ in range(dataset_size): 113 | object_ = create_object(id_, 0) 114 | objects.append(object_) 115 | objects_to_update[object_.update_time].add(object_) 116 | yield "INSERT", 0, object_ 117 | 118 | for t_now in range(1, simulation_length): 119 | need_to_update = ceil(dataset_size * agility) 120 | updated_ids = set() 121 | 122 | while need_to_update > 0 or objects_to_update[t_now]: 123 | kill = False 124 | if objects_to_update[t_now]: 125 | object_ = objects_to_update[t_now].pop() 126 | if object_ not in objects: 127 | continue 128 | kill = object_.out_of_bounds 129 | else: 130 | id_ = rng.integers(0, dataset_size) 131 | while id_ in updated_ids: 132 | id_ = rng.integers(0, dataset_size) 133 | object_ = objects[id_] 134 | 135 | updated_ids.add(object_.id) 136 | need_to_update -= 1 137 | 138 | yield "DELETE", t_now, object_ 139 | 140 | if kill: 141 | x = y = None 142 | else: 143 | x, y = object_.getXY(t_now) 144 | object_ = create_object(object_.id, t_now, x, y) 145 | objects[object_.id] = object_ 146 | objects_to_update[object_.update_time].add(object_) 147 | 148 | yield "INSERT", t_now, object_ 149 | 150 | for _ in range(queries_per_time_step): 151 | x = rng.uniform(min_x, max_x) 152 | y = rng.uniform(min_y, max_y) 153 | dx = rng.uniform(min_query_extent, max_query_extent) 154 | dy = rng.uniform(min_query_extent, max_query_extent) 155 | dt = rng.integers(min_query_interval, max_query_interval + 1) 156 | t = rng.integers(t_now, t_now + horizon - dt) 157 | 158 | yield "QUERY", t_now, QueryCartesian(t, t + dt, x, y, dx, dy) 159 | 160 | 161 | def intersects( 162 | x1: float, y1: float, x2: float, y2: float, x: float, y: float, dx: float, dy: float 163 | ) -> bool: 164 | # Checks if line from x1, y1 to x2, y2 intersects with rectangle with 165 | # bottom left at x-dx, y-dy and top right at x+dx, y+dy. 166 | # Implementation of https://stackoverflow.com/a/293052 167 | 168 | # Check if line points not both more/less than max/min for each axis 169 | if ( 170 | (x1 > x + dx and x2 > x + dx) 171 | or (x1 < x - dx and x2 < x - dx) 172 | or (y1 > y + dy and y2 > y + dy) 173 | or (y1 < y - dy and y2 < y - dy) 174 | ): 175 | return False 176 | 177 | # Check on which side (+ve, -ve) of the line the rectangle corners are, 178 | # returning True if any corner is on a different side. 179 | calcs = ( 180 | (y2 - y1) * rect_x + (x1 - x2) * rect_y + (x2 * y1 - x1 * y2) 181 | for rect_x, rect_y in ( 182 | (x - dx, y - dy), 183 | (x + dx, y - dy), 184 | (x - dx, y + dy), 185 | (x + dx, y + dy), 186 | ) 187 | ) 188 | sign = np.sign(next(calcs)) # First corner (bottom left) 189 | return any(np.sign(calc) != sign for calc in calcs) # Check remaining 3 190 | 191 | 192 | class TPRTests(unittest.TestCase): 193 | def test_tpr(self) -> None: 194 | # TODO : this freezes forever on some windows cloud builds 195 | if os.name == "nt": 196 | return 197 | 198 | # Cartesians list for brute force 199 | objects = dict() 200 | tpr_tree = Index(properties=Property(type=RT_TPRTree)) 201 | 202 | for operation, t_now, object_ in data_generator(): 203 | if operation == "INSERT": 204 | tpr_tree.insert(object_.id, object_.get_coordinates()) 205 | objects[object_.id] = object_ 206 | elif operation == "DELETE": 207 | tpr_tree.delete(object_.id, object_.get_coordinates(t_now)) 208 | del objects[object_.id] 209 | elif operation == "QUERY": 210 | tree_intersect = set(tpr_tree.intersection(object_.get_coordinates())) 211 | 212 | # Brute intersect 213 | brute_intersect = set() 214 | for tree_object in objects.values(): 215 | x_low, y_low = tree_object.getXY(object_.start_time) 216 | x_high, y_high = tree_object.getXY(object_.end_time) 217 | 218 | if intersects( 219 | x_low, 220 | y_low, 221 | x_high, 222 | y_high, # Line 223 | object_.x, 224 | object_.y, 225 | object_.dx, 226 | object_.dy, 227 | ): # Rect 228 | brute_intersect.add(tree_object.id) 229 | 230 | # Tree should match brute force approach 231 | assert tree_intersect == brute_intersect 232 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | requires = 3 | tox>=4 4 | env_list = py{39,310,311,312,313} 5 | 6 | [testenv] 7 | description = run unit tests 8 | deps = 9 | pytest>=6 10 | numpy 11 | install_command = 12 | python -I -m pip install --only-binary=:all: {opts} {packages} 13 | ignore_errors = True 14 | ignore_outcome = True 15 | commands = pytest 16 | --------------------------------------------------------------------------------