├── .github ├── SECURITY.md ├── dependabot.yml ├── release.yml ├── workflows │ ├── CI.yml │ └── zizmor.yml └── zizmor.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yml ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.rst ├── docs ├── api.rst ├── conf.py ├── index.rst ├── requirements.in ├── requirements.txt └── spelling-wordlist.txt ├── noxfile.py ├── pyproject.toml ├── rpds.pyi ├── src └── lib.rs └── tests ├── __init__.py ├── requirements.in ├── requirements.txt ├── test_hash_trie_map.py ├── test_hash_trie_set.py ├── test_list.py └── test_queue.py /.github/SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | In general, only the latest released `rpds-py` version is supported and will receive updates. 6 | 7 | ## Reporting a Vulnerability 8 | 9 | To report a security vulnerability, please send an email to `Julian+Security` at `GrayVines.com` with subject line `SECURITY (rpds-py)`. 10 | 11 | I will do my best to respond within 48 hours to acknowledge the message and discuss further steps. 12 | 13 | If the vulnerability is accepted, an advisory will be sent out via GitHub's security advisory functionality. 14 | 15 | For non-sensitive discussion related to this policy itself, feel free to open an issue on the issue tracker. 16 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | 8 | - package-ecosystem: "cargo" 9 | directory: "/" 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | changelog: 2 | exclude: 3 | authors: 4 | - dependabot 5 | - pre-commit-ci 6 | -------------------------------------------------------------------------------- /.github/workflows/CI.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches-ignore: 6 | - "wip*" 7 | tags: 8 | - "v[0-9].*" 9 | pull_request: 10 | schedule: 11 | # Daily at 5:33 12 | - cron: "33 5 * * *" 13 | workflow_dispatch: 14 | 15 | permissions: {} 16 | 17 | jobs: 18 | list: 19 | runs-on: ubuntu-latest 20 | outputs: 21 | noxenvs: ${{ steps.noxenvs-matrix.outputs.noxenvs }} 22 | steps: 23 | - uses: actions/checkout@v4 24 | with: 25 | persist-credentials: false 26 | - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb 27 | with: 28 | enable-cache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning] 29 | - id: noxenvs-matrix 30 | run: | 31 | echo >>$GITHUB_OUTPUT noxenvs=$( 32 | uvx nox --list-sessions --json | jq '[.[].session]' 33 | ) 34 | 35 | test: 36 | needs: list 37 | runs-on: ubuntu-latest 38 | 39 | strategy: 40 | fail-fast: false 41 | matrix: 42 | noxenv: ${{ fromJson(needs.list.outputs.noxenvs) }} 43 | 44 | steps: 45 | - uses: actions/checkout@v4 46 | with: 47 | persist-credentials: false 48 | - name: Install dependencies 49 | run: sudo apt-get update && sudo apt-get install -y libenchant-2-dev 50 | if: runner.os == 'Linux' && startsWith(matrix.noxenv, 'docs') 51 | - name: Install dependencies 52 | run: brew install enchant 53 | if: runner.os == 'macOS' && startsWith(matrix.noxenv, 'docs') 54 | - name: Set up Python 55 | uses: actions/setup-python@v5 56 | with: 57 | python-version: | 58 | 3.9 59 | 3.10 60 | 3.11 61 | 3.12 62 | 3.13 63 | 3.13t 64 | pypy3.9 65 | pypy3.10 66 | pypy3.11 67 | allow-prereleases: true 68 | 69 | - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb 70 | with: 71 | enable-cache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning] 72 | - name: Run nox 73 | run: uvx nox -s "${{ matrix.noxenv }}" -- ${{ matrix.posargs }} # zizmor: ignore[template-injection] 74 | 75 | manylinux: 76 | needs: test 77 | runs-on: ubuntu-latest 78 | 79 | strategy: 80 | fail-fast: false 81 | matrix: 82 | target: [x86_64, x86, aarch64, armv7, s390x, ppc64le] 83 | 84 | steps: 85 | - uses: actions/checkout@v4 86 | with: 87 | persist-credentials: false 88 | - uses: actions/setup-python@v5 89 | with: 90 | python-version: | 91 | 3.9 92 | 3.10 93 | 3.11 94 | 3.12 95 | 3.13 96 | 3.13t 97 | pypy3.9 98 | pypy3.10 99 | pypy3.11 100 | allow-prereleases: true 101 | - name: Build wheels 102 | uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1 103 | with: 104 | target: ${{ matrix.target }} 105 | args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13 3.13t pypy3.9 pypy3.10 pypy3.11' 106 | sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning] 107 | manylinux: auto 108 | - name: Upload wheels 109 | uses: actions/upload-artifact@v4 110 | with: 111 | name: dist-${{ github.job }}-${{ matrix.target }} 112 | path: dist 113 | 114 | musllinux: 115 | needs: test 116 | runs-on: ubuntu-latest 117 | 118 | strategy: 119 | fail-fast: false 120 | matrix: 121 | target: 122 | - aarch64-unknown-linux-musl 123 | - i686-unknown-linux-musl 124 | - x86_64-unknown-linux-musl 125 | 126 | steps: 127 | - uses: actions/checkout@v4 128 | with: 129 | persist-credentials: false 130 | - uses: actions/setup-python@v5 131 | with: 132 | python-version: | 133 | 3.9 134 | 3.10 135 | 3.11 136 | 3.12 137 | 3.13 138 | 3.13t 139 | pypy3.9 140 | pypy3.10 141 | pypy3.11 142 | allow-prereleases: true 143 | - name: Build wheels 144 | uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1 145 | with: 146 | target: ${{ matrix.target }} 147 | args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13 3.13t pypy3.9 pypy3.10 pypy3.11' 148 | manylinux: musllinux_1_2 149 | sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning] 150 | - name: Upload wheels 151 | uses: actions/upload-artifact@v4 152 | with: 153 | name: dist-${{ github.job }}-${{ matrix.target }} 154 | path: dist 155 | 156 | windows: 157 | needs: test 158 | runs-on: windows-latest 159 | 160 | strategy: 161 | fail-fast: false 162 | matrix: 163 | target: [x64, x86] # x86 is not supported by pypy 164 | 165 | steps: 166 | - uses: actions/checkout@v4 167 | with: 168 | persist-credentials: false 169 | - uses: actions/setup-python@v5 170 | with: 171 | python-version: | 172 | 3.9 173 | 3.10 174 | 3.11 175 | 3.12 176 | 3.13 177 | ${{ matrix.target == 'x64' && 'pypy3.9' || '' }} 178 | ${{ matrix.target == 'x64' && 'pypy3.10' || '' }} 179 | allow-prereleases: true 180 | architecture: ${{ matrix.target }} 181 | - name: Build wheels 182 | uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1 183 | with: 184 | target: ${{ matrix.target }} 185 | args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13' --interpreter ${{ matrix.target == 'x64' && 'pypy3.9 pypy3.10' || '' }} 186 | sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning] 187 | - name: Upload wheels 188 | uses: actions/upload-artifact@v4 189 | with: 190 | name: dist-${{ github.job }}-${{ matrix.target }} 191 | path: dist 192 | 193 | windows-arm: 194 | needs: test 195 | runs-on: windows-11-arm 196 | 197 | strategy: 198 | fail-fast: false 199 | matrix: 200 | target: 201 | - aarch64-pc-windows-msvc 202 | 203 | steps: 204 | - uses: actions/checkout@v4 205 | with: 206 | persist-credentials: false 207 | # Install each python version seperatly so that the paths can be passed to maturin. (otherwise finds pre-installed x64 versions) 208 | - uses: actions/setup-python@v5 209 | id: cp311 210 | with: 211 | python-version: 3.11 212 | allow-prereleases: true 213 | architecture: arm64 214 | - uses: actions/setup-python@v5 215 | id: cp312 216 | with: 217 | python-version: 3.12 218 | allow-prereleases: true 219 | architecture: arm64 220 | - uses: actions/setup-python@v5 221 | id: cp313 222 | with: 223 | python-version: 3.13 224 | allow-prereleases: true 225 | architecture: arm64 226 | # rust toolchain is not currently installed on windopws arm64 images: https://github.com/actions/partner-runner-images/issues/77 227 | - name: Setup rust 228 | id: setup-rust 229 | run: | 230 | Invoke-WebRequest https://static.rust-lang.org/rustup/dist/aarch64-pc-windows-msvc/rustup-init.exe -OutFile .\rustup-init.exe 231 | .\rustup-init.exe -y 232 | Add-Content $env:GITHUB_PATH "$env:USERPROFILE\.cargo\bin" 233 | - name: Build wheels 234 | uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1 235 | with: 236 | target: ${{ matrix.target }} 237 | args: --release --out dist --interpreter ${{ steps.cp311.outputs.python-path }} ${{ steps.cp312.outputs.python-path }} ${{ steps.cp313.outputs.python-path }} 238 | sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning] 239 | - name: Upload wheels 240 | uses: actions/upload-artifact@v4 241 | with: 242 | name: dist-${{ github.job }}-${{ matrix.target }} 243 | path: dist 244 | 245 | # free-threaded and normal builds share a site-packages folder on Windows so 246 | # we must build free-threaded separately 247 | windows-free-threaded: 248 | needs: test 249 | runs-on: windows-latest 250 | 251 | strategy: 252 | fail-fast: false 253 | matrix: 254 | target: [x64, x86] # x86 is not supported by pypy 255 | 256 | steps: 257 | - uses: actions/checkout@v4 258 | with: 259 | persist-credentials: false 260 | - uses: actions/setup-python@v5 261 | with: 262 | python-version: 3.13t 263 | allow-prereleases: true 264 | architecture: ${{ matrix.target }} 265 | - name: Build wheels 266 | uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1 267 | with: 268 | target: ${{ matrix.target }} 269 | args: --release --out dist --interpreter '3.13t' 270 | sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning] 271 | - name: Upload wheels 272 | uses: actions/upload-artifact@v4 273 | with: 274 | name: dist-${{ github.job }}-${{ matrix.target }}-free-threaded 275 | path: dist 276 | 277 | macos: 278 | needs: test 279 | runs-on: macos-latest 280 | 281 | strategy: 282 | fail-fast: false 283 | matrix: 284 | target: [x86_64, aarch64] 285 | 286 | steps: 287 | - uses: actions/checkout@v4 288 | with: 289 | persist-credentials: false 290 | - uses: actions/setup-python@v5 291 | with: 292 | python-version: | 293 | 3.9 294 | 3.10 295 | 3.11 296 | 3.12 297 | 3.13 298 | 3.13t 299 | pypy3.9 300 | pypy3.10 301 | pypy3.11 302 | allow-prereleases: true 303 | - name: Build wheels 304 | uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1 305 | with: 306 | target: ${{ matrix.target }} 307 | args: --release --out dist --interpreter '3.9 3.10 3.11 3.12 3.13 3.13t pypy3.9 pypy3.10 pypy3.11' 308 | sccache: ${{ github.ref_type != 'tag' }} # zizmor: ignore[cache-poisoning] 309 | - name: Upload wheels 310 | uses: actions/upload-artifact@v4 311 | with: 312 | name: dist-${{ github.job }}-${{ matrix.target }} 313 | path: dist 314 | 315 | sdist: 316 | needs: test 317 | runs-on: ubuntu-latest 318 | steps: 319 | - uses: actions/checkout@v4 320 | with: 321 | persist-credentials: false 322 | - uses: actions/setup-python@v5 323 | with: 324 | python-version: 3.13 325 | - name: Build an sdist 326 | uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1 327 | with: 328 | command: sdist 329 | args: --out dist 330 | - name: Upload sdist 331 | uses: actions/upload-artifact@v4 332 | with: 333 | name: dist-${{ github.job }} 334 | path: dist 335 | 336 | release: 337 | needs: 338 | [manylinux, musllinux, windows, windows-arm, windows-free-threaded, macos] 339 | runs-on: ubuntu-latest 340 | if: "startsWith(github.ref, 'refs/tags/')" 341 | environment: 342 | name: PyPI 343 | url: https://pypi.org/p/rpds-py 344 | permissions: 345 | contents: write 346 | id-token: write 347 | 348 | steps: 349 | - uses: actions/download-artifact@v4 350 | with: 351 | pattern: dist-* 352 | merge-multiple: true 353 | - name: Publish to PyPI 354 | uses: PyO3/maturin-action@aef21716ff3dcae8a1c301d23ec3e4446972a6e3 # v1.49.1 355 | with: 356 | command: upload 357 | args: --non-interactive --skip-existing * 358 | - name: Create a GitHub Release 359 | if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') 360 | uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 361 | with: 362 | files: | 363 | * 364 | generate_release_notes: true 365 | -------------------------------------------------------------------------------- /.github/workflows/zizmor.yml: -------------------------------------------------------------------------------- 1 | name: GitHub Actions Security Analysis with zizmor 🌈 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | branches: ["**"] 8 | 9 | permissions: {} 10 | 11 | jobs: 12 | zizmor: 13 | runs-on: ubuntu-latest 14 | 15 | permissions: 16 | security-events: write 17 | 18 | steps: 19 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 20 | with: 21 | persist-credentials: false 22 | 23 | - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 24 | - name: Run zizmor 🌈 25 | run: uvx zizmor --format=sarif . > results.sarif 26 | 27 | env: 28 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | 30 | - name: Upload SARIF file 31 | uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 32 | with: 33 | sarif_file: results.sarif 34 | category: zizmor 35 | -------------------------------------------------------------------------------- /.github/zizmor.yml: -------------------------------------------------------------------------------- 1 | rules: 2 | template-injection: 3 | ignore: 4 | # our matrix is dynamically generated via `nox -l` but with no user input 5 | - CI.yml:71:9 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | .pytest_cache/ 6 | *.py[cod] 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | .venv/ 14 | env/ 15 | bin/ 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | include/ 26 | man/ 27 | venv/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | pip-selfcheck.json 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .cache 42 | nosetests.xml 43 | coverage.xml 44 | 45 | # Translations 46 | *.mo 47 | 48 | # Mr Developer 49 | .mr.developer.cfg 50 | .project 51 | .pydevproject 52 | 53 | # Rope 54 | .ropeproject 55 | 56 | # Django stuff: 57 | *.log 58 | *.pot 59 | 60 | .DS_Store 61 | 62 | # Sphinx documentation 63 | docs/_build/ 64 | 65 | # PyCharm 66 | .idea/ 67 | 68 | # VSCode 69 | .vscode/ 70 | 71 | # Pyenv 72 | .python-version 73 | 74 | # User defined 75 | /dirhtml 76 | _cache 77 | 78 | TODO 79 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | skip: 3 | # pre-commit.ci doesn't have Rust installed 4 | - fmt 5 | - clippy 6 | - zizmor 7 | 8 | repos: 9 | - repo: https://github.com/pre-commit/pre-commit-hooks 10 | rev: v5.0.0 11 | hooks: 12 | - id: check-ast 13 | - id: check-docstring-first 14 | - id: check-toml 15 | - id: check-vcs-permalinks 16 | - id: check-yaml 17 | - id: debug-statements 18 | - id: end-of-file-fixer 19 | - id: mixed-line-ending 20 | args: [--fix, lf] 21 | - id: trailing-whitespace 22 | - repo: https://github.com/doublify/pre-commit-rust 23 | rev: "v1.0" 24 | hooks: 25 | - id: fmt 26 | - id: clippy 27 | - repo: https://github.com/psf/black 28 | rev: 25.1.0 29 | hooks: 30 | - id: black 31 | - repo: https://github.com/pre-commit/mirrors-prettier 32 | rev: "v4.0.0-alpha.8" 33 | hooks: 34 | - id: prettier 35 | - repo: https://github.com/woodruffw/zizmor 36 | rev: v0.8.0 37 | hooks: 38 | - id: zizmor 39 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | python: "3.11" 7 | rust: "1.70" 8 | 9 | sphinx: 10 | builder: dirhtml 11 | configuration: docs/conf.py 12 | fail_on_warning: true 13 | 14 | formats: all 15 | 16 | python: 17 | install: 18 | - requirements: docs/requirements.txt 19 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 4 4 | 5 | [[package]] 6 | name = "archery" 7 | version = "1.2.1" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "eae2ed21cd55021f05707a807a5fc85695dafb98832921f6cfa06db67ca5b869" 10 | dependencies = [ 11 | "triomphe", 12 | ] 13 | 14 | [[package]] 15 | name = "autocfg" 16 | version = "1.4.0" 17 | source = "registry+https://github.com/rust-lang/crates.io-index" 18 | checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" 19 | 20 | [[package]] 21 | name = "cc" 22 | version = "1.2.22" 23 | source = "registry+https://github.com/rust-lang/crates.io-index" 24 | checksum = "32db95edf998450acc7881c932f94cd9b05c87b4b2599e8bab064753da4acfd1" 25 | dependencies = [ 26 | "shlex", 27 | ] 28 | 29 | [[package]] 30 | name = "heck" 31 | version = "0.5.0" 32 | source = "registry+https://github.com/rust-lang/crates.io-index" 33 | checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" 34 | 35 | [[package]] 36 | name = "indoc" 37 | version = "2.0.6" 38 | source = "registry+https://github.com/rust-lang/crates.io-index" 39 | checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" 40 | 41 | [[package]] 42 | name = "libc" 43 | version = "0.2.172" 44 | source = "registry+https://github.com/rust-lang/crates.io-index" 45 | checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" 46 | 47 | [[package]] 48 | name = "memoffset" 49 | version = "0.9.1" 50 | source = "registry+https://github.com/rust-lang/crates.io-index" 51 | checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" 52 | dependencies = [ 53 | "autocfg", 54 | ] 55 | 56 | [[package]] 57 | name = "once_cell" 58 | version = "1.21.3" 59 | source = "registry+https://github.com/rust-lang/crates.io-index" 60 | checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" 61 | 62 | [[package]] 63 | name = "portable-atomic" 64 | version = "1.11.0" 65 | source = "registry+https://github.com/rust-lang/crates.io-index" 66 | checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" 67 | 68 | [[package]] 69 | name = "proc-macro2" 70 | version = "1.0.95" 71 | source = "registry+https://github.com/rust-lang/crates.io-index" 72 | checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" 73 | dependencies = [ 74 | "unicode-ident", 75 | ] 76 | 77 | [[package]] 78 | name = "pyo3" 79 | version = "0.25.0" 80 | source = "registry+https://github.com/rust-lang/crates.io-index" 81 | checksum = "f239d656363bcee73afef85277f1b281e8ac6212a1d42aa90e55b90ed43c47a4" 82 | dependencies = [ 83 | "indoc", 84 | "libc", 85 | "memoffset", 86 | "once_cell", 87 | "portable-atomic", 88 | "pyo3-build-config", 89 | "pyo3-ffi", 90 | "pyo3-macros", 91 | "unindent", 92 | ] 93 | 94 | [[package]] 95 | name = "pyo3-build-config" 96 | version = "0.25.0" 97 | source = "registry+https://github.com/rust-lang/crates.io-index" 98 | checksum = "755ea671a1c34044fa165247aaf6f419ca39caa6003aee791a0df2713d8f1b6d" 99 | dependencies = [ 100 | "once_cell", 101 | "python3-dll-a", 102 | "target-lexicon", 103 | ] 104 | 105 | [[package]] 106 | name = "pyo3-ffi" 107 | version = "0.25.0" 108 | source = "registry+https://github.com/rust-lang/crates.io-index" 109 | checksum = "fc95a2e67091e44791d4ea300ff744be5293f394f1bafd9f78c080814d35956e" 110 | dependencies = [ 111 | "libc", 112 | "pyo3-build-config", 113 | ] 114 | 115 | [[package]] 116 | name = "pyo3-macros" 117 | version = "0.25.0" 118 | source = "registry+https://github.com/rust-lang/crates.io-index" 119 | checksum = "a179641d1b93920829a62f15e87c0ed791b6c8db2271ba0fd7c2686090510214" 120 | dependencies = [ 121 | "proc-macro2", 122 | "pyo3-macros-backend", 123 | "quote", 124 | "syn", 125 | ] 126 | 127 | [[package]] 128 | name = "pyo3-macros-backend" 129 | version = "0.25.0" 130 | source = "registry+https://github.com/rust-lang/crates.io-index" 131 | checksum = "9dff85ebcaab8c441b0e3f7ae40a6963ecea8a9f5e74f647e33fcf5ec9a1e89e" 132 | dependencies = [ 133 | "heck", 134 | "proc-macro2", 135 | "pyo3-build-config", 136 | "quote", 137 | "syn", 138 | ] 139 | 140 | [[package]] 141 | name = "python3-dll-a" 142 | version = "0.2.13" 143 | source = "registry+https://github.com/rust-lang/crates.io-index" 144 | checksum = "49fe4227a288cf9493942ad0220ea3f185f4d1f2a14f197f7344d6d02f4ed4ed" 145 | dependencies = [ 146 | "cc", 147 | ] 148 | 149 | [[package]] 150 | name = "quote" 151 | version = "1.0.40" 152 | source = "registry+https://github.com/rust-lang/crates.io-index" 153 | checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" 154 | dependencies = [ 155 | "proc-macro2", 156 | ] 157 | 158 | [[package]] 159 | name = "rpds" 160 | version = "1.1.1" 161 | source = "registry+https://github.com/rust-lang/crates.io-index" 162 | checksum = "a7f89f654d51fffdd6026289d07d1fd523244d46ae0a8bc22caa6dd7f9e8cb0b" 163 | dependencies = [ 164 | "archery", 165 | ] 166 | 167 | [[package]] 168 | name = "rpds-py" 169 | version = "0.25.1" 170 | dependencies = [ 171 | "archery", 172 | "pyo3", 173 | "rpds", 174 | ] 175 | 176 | [[package]] 177 | name = "shlex" 178 | version = "1.3.0" 179 | source = "registry+https://github.com/rust-lang/crates.io-index" 180 | checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" 181 | 182 | [[package]] 183 | name = "syn" 184 | version = "2.0.101" 185 | source = "registry+https://github.com/rust-lang/crates.io-index" 186 | checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" 187 | dependencies = [ 188 | "proc-macro2", 189 | "quote", 190 | "unicode-ident", 191 | ] 192 | 193 | [[package]] 194 | name = "target-lexicon" 195 | version = "0.13.2" 196 | source = "registry+https://github.com/rust-lang/crates.io-index" 197 | checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a" 198 | 199 | [[package]] 200 | name = "triomphe" 201 | version = "0.1.14" 202 | source = "registry+https://github.com/rust-lang/crates.io-index" 203 | checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85" 204 | 205 | [[package]] 206 | name = "unicode-ident" 207 | version = "1.0.18" 208 | source = "registry+https://github.com/rust-lang/crates.io-index" 209 | checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" 210 | 211 | [[package]] 212 | name = "unindent" 213 | version = "0.2.4" 214 | source = "registry+https://github.com/rust-lang/crates.io-index" 215 | checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3" 216 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rpds-py" 3 | version = "0.25.1" 4 | edition = "2021" 5 | 6 | [lib] 7 | name = "rpds" 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | rpds = "1.1.1" 12 | archery = "1.2.1" 13 | 14 | [dependencies.pyo3] 15 | version = "0.25.0" 16 | # To build extension for PyPy on Windows, "generate-import-lib" is needed: 17 | # https://github.com/PyO3/maturin-action/issues/267#issuecomment-2106844429 18 | features = ["extension-module", "generate-import-lib"] 19 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2023 Julian Berman 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | =========== 2 | ``rpds.py`` 3 | =========== 4 | 5 | |PyPI| |Pythons| |CI| 6 | 7 | .. |PyPI| image:: https://img.shields.io/pypi/v/rpds-py.svg 8 | :alt: PyPI version 9 | :target: https://pypi.org/project/rpds-py/ 10 | 11 | .. |Pythons| image:: https://img.shields.io/pypi/pyversions/rpds-py.svg 12 | :alt: Supported Python versions 13 | :target: https://pypi.org/project/rpds-py/ 14 | 15 | .. |CI| image:: https://github.com/crate-py/rpds/workflows/CI/badge.svg 16 | :alt: Build status 17 | :target: https://github.com/crate-py/rpds/actions?query=workflow%3ACI 18 | 19 | .. |ReadTheDocs| image:: https://readthedocs.org/projects/referencing/badge/?version=stable&style=flat 20 | :alt: ReadTheDocs status 21 | :target: https://referencing.readthedocs.io/en/stable/ 22 | 23 | 24 | Python bindings to the `Rust rpds crate `_ for persistent data structures. 25 | 26 | What's here is quite minimal (in transparency, it was written initially to support replacing ``pyrsistent`` in the `referencing library `_). 27 | If you see something missing (which is very likely), a PR is definitely welcome to add it. 28 | 29 | Installation 30 | ------------ 31 | 32 | The distribution on PyPI is named ``rpds.py`` (equivalently ``rpds-py``), and thus can be installed via e.g.: 33 | 34 | .. code:: sh 35 | 36 | $ pip install rpds-py 37 | 38 | Note that if you install ``rpds-py`` from source, you will need a Rust toolchain installed, as it is a build-time dependency. 39 | An example of how to do so in a ``Dockerfile`` can be found `here `_. 40 | 41 | If you believe you are on a common platform which should have wheels built (i.e. and not need to compile from source), feel free to file an issue or pull request modifying the GitHub action used here to build wheels via ``maturin``. 42 | 43 | Usage 44 | ----- 45 | 46 | Methods in general are named similarly to their ``rpds`` counterparts (rather than ``pyrsistent``\ 's conventions, though probably a full drop-in ``pyrsistent``\ -compatible wrapper module is a good addition at some point). 47 | 48 | .. code:: python 49 | 50 | >>> from rpds import HashTrieMap, HashTrieSet, List 51 | 52 | >>> m = HashTrieMap({"foo": "bar", "baz": "quux"}) 53 | >>> m.insert("spam", 37) == HashTrieMap({"foo": "bar", "baz": "quux", "spam": 37}) 54 | True 55 | >>> m.remove("foo") == HashTrieMap({"baz": "quux"}) 56 | True 57 | 58 | >>> s = HashTrieSet({"foo", "bar", "baz", "quux"}) 59 | >>> s.insert("spam") == HashTrieSet({"foo", "bar", "baz", "quux", "spam"}) 60 | True 61 | >>> s.remove("foo") == HashTrieSet({"bar", "baz", "quux"}) 62 | True 63 | 64 | >>> L = List([1, 3, 5]) 65 | >>> L.push_front(-1) == List([-1, 1, 3, 5]) 66 | True 67 | >>> L.rest == List([3, 5]) 68 | True 69 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | .. automodule:: rpds 5 | :members: 6 | :undoc-members: 7 | :imported-members: 8 | :special-members: __iter__, __getitem__, __len__, __rmatmul__ 9 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | import importlib.metadata 2 | import re 3 | 4 | from url import URL 5 | 6 | GITHUB = URL.parse("https://github.com/") 7 | HOMEPAGE = GITHUB / "crate-py/rpds" 8 | 9 | project = "rpds.py" 10 | author = "Julian Berman" 11 | copyright = f"2023, {author}" 12 | 13 | release = importlib.metadata.version("rpds.py") 14 | version = release.partition("-")[0] 15 | 16 | language = "en" 17 | default_role = "any" 18 | 19 | extensions = [ 20 | "sphinx.ext.autodoc", 21 | "sphinx.ext.autosectionlabel", 22 | "sphinx.ext.coverage", 23 | "sphinx.ext.doctest", 24 | "sphinx.ext.extlinks", 25 | "sphinx.ext.intersphinx", 26 | "sphinx.ext.napoleon", 27 | "sphinx.ext.todo", 28 | "sphinx.ext.viewcode", 29 | "sphinx_copybutton", 30 | "sphinxcontrib.spelling", 31 | "sphinxext.opengraph", 32 | ] 33 | 34 | pygments_style = "lovelace" 35 | pygments_dark_style = "one-dark" 36 | 37 | html_theme = "furo" 38 | 39 | 40 | def entire_domain(host): 41 | return r"http.?://" + re.escape(host) + r"($|/.*)" 42 | 43 | 44 | linkcheck_ignore = [ 45 | entire_domain("img.shields.io"), 46 | f"{GITHUB}.*#.*", 47 | str(HOMEPAGE / "actions"), 48 | str(HOMEPAGE / "workflows/CI/badge.svg"), 49 | ] 50 | 51 | # = Extensions = 52 | 53 | # -- autodoc -- 54 | 55 | autodoc_default_options = { 56 | "members": True, 57 | "member-order": "bysource", 58 | } 59 | 60 | # -- autosectionlabel -- 61 | 62 | autosectionlabel_prefix_document = True 63 | 64 | # -- intersphinx -- 65 | 66 | intersphinx_mapping = { 67 | "python": ("https://docs.python.org/", None), 68 | } 69 | 70 | # -- extlinks -- 71 | 72 | extlinks = { 73 | "gh": (str(HOMEPAGE) + "/%s", None), 74 | "github": (str(GITHUB) + "/%s", None), 75 | } 76 | extlinks_detect_hardcoded_links = True 77 | 78 | # -- sphinx-copybutton -- 79 | 80 | copybutton_prompt_text = r">>> |\.\.\. |\$" 81 | copybutton_prompt_is_regexp = True 82 | 83 | # -- sphinxcontrib-spelling -- 84 | 85 | spelling_word_list_filename = "spelling-wordlist.txt" 86 | spelling_show_suggestions = True 87 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Python bindings to the `Rust rpds crate `_ for persistent data structures. 2 | 3 | What's here is quite minimal (in transparency, it was written initially to support replacing ``pyrsistent`` in the `referencing library `_). 4 | If you see something missing (which is very likely), a PR is definitely welcome to add it. 5 | 6 | Installation 7 | ------------ 8 | 9 | The distribution on PyPI is named ``rpds.py`` (equivalently ``rpds-py``), and thus can be installed via e.g.: 10 | 11 | .. code:: sh 12 | 13 | $ pip install rpds-py 14 | 15 | Note that if you install ``rpds-py`` from source, you will need a Rust toolchain installed, as it is a build-time dependency. 16 | An example of how to do so in a ``Dockerfile`` can be found `here `_. 17 | 18 | If you believe you are on a common platform which should have wheels built (i.e. and not need to compile from source), feel free to file an issue or pull request modifying the GitHub action used here to build wheels via ``maturin``. 19 | 20 | Usage 21 | ----- 22 | 23 | Methods in general are named similarly to their ``rpds`` counterparts (rather than ``pyrsistent``\ 's conventions, though probably a full drop-in ``pyrsistent``\ -compatible wrapper module is a good addition at some point). 24 | 25 | .. code:: python 26 | 27 | >>> from rpds import HashTrieMap, HashTrieSet, List 28 | 29 | >>> m = HashTrieMap({"foo": "bar", "baz": "quux"}) 30 | >>> m.insert("spam", 37) == HashTrieMap({"foo": "bar", "baz": "quux", "spam": 37}) 31 | True 32 | >>> m.remove("foo") == HashTrieMap({"baz": "quux"}) 33 | True 34 | 35 | >>> s = HashTrieSet({"foo", "bar", "baz", "quux"}) 36 | >>> s.insert("spam") == HashTrieSet({"foo", "bar", "baz", "quux", "spam"}) 37 | True 38 | >>> s.remove("foo") == HashTrieSet({"bar", "baz", "quux"}) 39 | True 40 | 41 | >>> L = List([1, 3, 5]) 42 | >>> L.push_front(-1) == List([-1, 1, 3, 5]) 43 | True 44 | >>> L.rest == List([3, 5]) 45 | True 46 | 47 | 48 | .. toctree:: 49 | :glob: 50 | :hidden: 51 | 52 | api 53 | -------------------------------------------------------------------------------- /docs/requirements.in: -------------------------------------------------------------------------------- 1 | file:.#egg=rpds-py 2 | furo 3 | pygments-github-lexers 4 | sphinx-copybutton 5 | sphinx>5 6 | sphinxcontrib-spelling>5 7 | sphinxext-opengraph 8 | url.py 9 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by uv via the following command: 2 | # uv pip compile --output-file /Users/julian/Development/rpds.py/docs/requirements.txt docs/requirements.in 3 | alabaster==1.0.0 4 | # via sphinx 5 | babel==2.17.0 6 | # via sphinx 7 | beautifulsoup4==4.13.4 8 | # via furo 9 | certifi==2025.4.26 10 | # via requests 11 | charset-normalizer==3.4.2 12 | # via requests 13 | docutils==0.21.2 14 | # via sphinx 15 | furo==2024.8.6 16 | # via -r docs/requirements.in 17 | idna==3.10 18 | # via requests 19 | imagesize==1.4.1 20 | # via sphinx 21 | jinja2==3.1.6 22 | # via sphinx 23 | markupsafe==3.0.2 24 | # via jinja2 25 | packaging==25.0 26 | # via sphinx 27 | pyenchant==3.2.2 28 | # via sphinxcontrib-spelling 29 | pygments==2.19.1 30 | # via 31 | # furo 32 | # pygments-github-lexers 33 | # sphinx 34 | pygments-github-lexers==0.0.5 35 | # via -r docs/requirements.in 36 | requests==2.32.3 37 | # via 38 | # sphinx 39 | # sphinxcontrib-spelling 40 | roman-numerals-py==3.1.0 41 | # via sphinx 42 | rpds-py @ file:.#egg=rpds-py 43 | # via -r docs/requirements.in 44 | snowballstemmer==3.0.1 45 | # via sphinx 46 | soupsieve==2.7 47 | # via beautifulsoup4 48 | sphinx==8.2.3 49 | # via 50 | # -r docs/requirements.in 51 | # furo 52 | # sphinx-basic-ng 53 | # sphinx-copybutton 54 | # sphinxcontrib-spelling 55 | # sphinxext-opengraph 56 | sphinx-basic-ng==1.0.0b2 57 | # via furo 58 | sphinx-copybutton==0.5.2 59 | # via -r docs/requirements.in 60 | sphinxcontrib-applehelp==2.0.0 61 | # via sphinx 62 | sphinxcontrib-devhelp==2.0.0 63 | # via sphinx 64 | sphinxcontrib-htmlhelp==2.1.0 65 | # via sphinx 66 | sphinxcontrib-jsmath==1.0.1 67 | # via sphinx 68 | sphinxcontrib-qthelp==2.0.0 69 | # via sphinx 70 | sphinxcontrib-serializinghtml==2.0.0 71 | # via sphinx 72 | sphinxcontrib-spelling==8.0.1 73 | # via -r docs/requirements.in 74 | sphinxext-opengraph==0.10.0 75 | # via -r docs/requirements.in 76 | typing-extensions==4.13.2 77 | # via beautifulsoup4 78 | url-py==0.14.1 79 | # via -r docs/requirements.in 80 | urllib3==2.4.0 81 | # via requests 82 | -------------------------------------------------------------------------------- /docs/spelling-wordlist.txt: -------------------------------------------------------------------------------- 1 | iter 2 | len 3 | toolchain 4 | -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from tempfile import TemporaryDirectory 3 | import os 4 | 5 | import nox 6 | 7 | ROOT = Path(__file__).parent 8 | PYPROJECT = ROOT / "pyproject.toml" 9 | DOCS = ROOT / "docs" 10 | TESTS = ROOT / "tests" 11 | 12 | REQUIREMENTS = dict( 13 | docs=DOCS / "requirements.txt", 14 | tests=TESTS / "requirements.txt", 15 | ) 16 | REQUIREMENTS_IN = [ # this is actually ordered, as files depend on each other 17 | (path.parent / f"{path.stem}.in", path) for path in REQUIREMENTS.values() 18 | ] 19 | 20 | SUPPORTED = [ 21 | "3.9", 22 | "3.10", 23 | "pypy3.10", 24 | "3.11", 25 | "pypy3.11", 26 | "3.12", 27 | "3.13", 28 | "3.13t", 29 | ] 30 | LATEST = "3.13" 31 | 32 | nox.options.default_venv_backend = "uv|virtualenv" 33 | nox.options.sessions = [] 34 | 35 | 36 | def session(default=True, python=LATEST, **kwargs): # noqa: D103 37 | def _session(fn): 38 | if default: 39 | nox.options.sessions.append(kwargs.get("name", fn.__name__)) 40 | return nox.session(python=python, **kwargs)(fn) 41 | 42 | return _session 43 | 44 | 45 | @session(python=SUPPORTED) 46 | def tests(session): 47 | """ 48 | Run the test suite with a corresponding Python version. 49 | """ 50 | # Really we want --profile=test here (for 51 | # https://github.com/crate-py/rpds/pull/87#issuecomment-2291409297) 52 | # but it produces strange symbol errors saying: 53 | # dynamic module does not define module export function (PyInit_rpds) 54 | # so OK, dev it is. 55 | session.install( 56 | "--config-settings", 57 | "build-args=--profile=dev", 58 | "--no-cache", 59 | "-r", 60 | REQUIREMENTS["tests"], 61 | ) 62 | 63 | if session.posargs and session.posargs[0] == "coverage": 64 | if len(session.posargs) > 1 and session.posargs[1] == "github": 65 | github = Path(os.environ["GITHUB_STEP_SUMMARY"]) 66 | else: 67 | github = None 68 | 69 | session.install("coverage[toml]") 70 | session.run("coverage", "run", "-m", "pytest", TESTS) 71 | if github is None: 72 | session.run("coverage", "report") 73 | else: 74 | with github.open("a") as summary: 75 | summary.write("### Coverage\n\n") 76 | summary.flush() # without a flush, output seems out of order. 77 | session.run( 78 | "coverage", 79 | "report", 80 | "--format=markdown", 81 | stdout=summary, 82 | ) 83 | else: 84 | session.run("pytest", "--parallel-threads=10", *session.posargs, TESTS) 85 | 86 | 87 | @session() 88 | def audit(session): 89 | """ 90 | Audit dependencies for vulnerabilities. 91 | """ 92 | session.install("pip-audit", ROOT) 93 | session.run("python", "-m", "pip_audit") 94 | 95 | 96 | @session(tags=["build"]) 97 | def build(session): 98 | """ 99 | Build a distribution suitable for PyPI and check its validity. 100 | """ 101 | session.install("build", "twine") 102 | with TemporaryDirectory() as tmpdir: 103 | session.run("python", "-m", "build", ROOT, "--outdir", tmpdir) 104 | session.run("twine", "check", "--strict", tmpdir + "/*") 105 | 106 | 107 | @session(tags=["style"]) 108 | def style(session): 109 | """ 110 | Check Python code style. 111 | """ 112 | session.install("ruff") 113 | session.run("ruff", "check", TESTS, __file__) 114 | 115 | 116 | @session() 117 | def typing(session): 118 | """ 119 | Check the codebase using pyright by type checking the test suite. 120 | """ 121 | session.install("pyright", ROOT, "-r", REQUIREMENTS["tests"]) 122 | session.run("pyright", TESTS) 123 | 124 | 125 | @session(tags=["docs"]) 126 | @nox.parametrize( 127 | "builder", 128 | [ 129 | nox.param(name, id=name) 130 | for name in [ 131 | "dirhtml", 132 | "doctest", 133 | "linkcheck", 134 | "man", 135 | "spelling", 136 | ] 137 | ], 138 | ) 139 | def docs(session, builder): 140 | """ 141 | Build the documentation using a specific Sphinx builder. 142 | """ 143 | session.install("-r", REQUIREMENTS["docs"]) 144 | with TemporaryDirectory() as tmpdir_str: 145 | tmpdir = Path(tmpdir_str) 146 | argv = ["-n", "-T", "-W"] 147 | if builder != "spelling": 148 | argv += ["-q"] 149 | posargs = session.posargs or [tmpdir / builder] 150 | session.run( 151 | "python", 152 | "-m", 153 | "sphinx", 154 | "-b", 155 | builder, 156 | DOCS, 157 | *argv, 158 | *posargs, 159 | ) 160 | 161 | 162 | @session(tags=["docs", "style"], name="docs(style)") 163 | def docs_style(session): 164 | """ 165 | Check the documentation style. 166 | """ 167 | session.install( 168 | "doc8", 169 | "pygments", 170 | "pygments-github-lexers", 171 | ) 172 | session.run("python", "-m", "doc8", "--config", PYPROJECT, DOCS) 173 | 174 | 175 | @session(default=False) 176 | def requirements(session): 177 | """ 178 | Update the project's pinned requirements. 179 | 180 | You should commit the result afterwards. 181 | """ 182 | if session.venv_backend == "uv": 183 | cmd = ["uv", "pip", "compile"] 184 | else: 185 | session.install("pip-tools") 186 | cmd = ["pip-compile", "--resolver", "backtracking", "--strip-extras"] 187 | 188 | for each, out in REQUIREMENTS_IN: 189 | # otherwise output files end up with silly absolute path comments... 190 | relative = each.relative_to(ROOT) 191 | session.run(*cmd, "--upgrade", "--output-file", out, relative) 192 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["maturin>=1.2,<2.0"] 3 | build-backend = "maturin" 4 | 5 | [project] 6 | name = "rpds-py" 7 | description = "Python bindings to Rust's persistent data structures (rpds)" 8 | requires-python = ">=3.9" 9 | readme = "README.rst" 10 | license = "MIT" 11 | license-files = ["LICENSE"] 12 | keywords = ["data structures", "rust", "persistent"] 13 | authors = [ 14 | { name = "Julian Berman", email = "Julian+rpds@GrayVines.com" }, 15 | ] 16 | classifiers = [ 17 | "Development Status :: 3 - Alpha", 18 | "Intended Audience :: Developers", 19 | "Operating System :: OS Independent", 20 | "Programming Language :: Rust", 21 | "Programming Language :: Python :: 3.9", 22 | "Programming Language :: Python :: 3.10", 23 | "Programming Language :: Python :: 3.11", 24 | "Programming Language :: Python :: 3.12", 25 | "Programming Language :: Python :: 3.13", 26 | "Programming Language :: Python :: 3", 27 | "Programming Language :: Python :: Implementation :: CPython", 28 | "Programming Language :: Python :: Implementation :: PyPy", 29 | ] 30 | dynamic = ["version"] 31 | 32 | [project.urls] 33 | Documentation = "https://rpds.readthedocs.io/" 34 | Homepage = "https://github.com/crate-py/rpds" 35 | Issues = "https://github.com/crate-py/rpds/issues/" 36 | Funding = "https://github.com/sponsors/Julian" 37 | Tidelift = "https://tidelift.com/subscription/pkg/pypi-rpds-py?utm_source=pypi-rpds-py&utm_medium=referral&utm_campaign=pypi-link" 38 | Source = "https://github.com/crate-py/rpds" 39 | Upstream = "https://github.com/orium/rpds" 40 | 41 | [tool.black] 42 | line-length = 79 43 | 44 | [tool.coverage.html] 45 | show_contexts = true 46 | skip_covered = false 47 | 48 | [tool.coverage.run] 49 | branch = true 50 | dynamic_context = "test_function" 51 | 52 | [tool.coverage.report] 53 | exclude_also = [ 54 | "if TYPE_CHECKING:", 55 | "\\s*\\.\\.\\.\\s*", 56 | ] 57 | fail_under = 100 58 | show_missing = true 59 | skip_covered = true 60 | 61 | [tool.doc8] 62 | ignore = [ 63 | "D000", # see PyCQA/doc8#125 64 | "D001", # one sentence per line, so max length doesn't make sense 65 | ] 66 | 67 | [tool.maturin] 68 | features = ["pyo3/extension-module"] 69 | 70 | [tool.pyright] 71 | reportUnnecessaryTypeIgnoreComment = true 72 | strict = ["**/*"] 73 | exclude = [ 74 | "**/tests/__init__.py", 75 | "**/tests/test_*.py", 76 | ] 77 | 78 | [tool.ruff] 79 | line-length = 79 80 | 81 | [tool.ruff.lint] 82 | select = ["ALL"] 83 | ignore = [ 84 | "A001", # It's fine to shadow builtins 85 | "A002", 86 | "A003", 87 | "A005", 88 | "ARG", # This is all wrong whenever an interface is involved 89 | "ANN", # Just let the type checker do this 90 | "B006", # Mutable arguments require care but are OK if you don't abuse them 91 | "B008", # It's totally OK to call functions for default arguments. 92 | "B904", # raise SomeException(...) is fine. 93 | "B905", # No need for explicit strict, this is simply zip's default behavior 94 | "C408", # Calling dict is fine when it saves quoting the keys 95 | "C901", # Not really something to focus on 96 | "D105", # It's fine to not have docstrings for magic methods. 97 | "D107", # __init__ especially doesn't need a docstring 98 | "D200", # This rule makes diffs uglier when expanding docstrings 99 | "D203", # No blank lines before docstrings. 100 | "D212", # Start docstrings on the second line. 101 | "D400", # This rule misses sassy docstrings ending with ! or ? 102 | "D401", # This rule is too flaky. 103 | "D406", # Section headers should end with a colon not a newline 104 | "D407", # Underlines aren't needed 105 | "D412", # Plz spaces after section headers 106 | "EM101", # These don't bother me, it's fine there's some duplication. 107 | "EM102", 108 | "FBT", # It's worth avoiding boolean args but I don't care to enforce it 109 | "FIX", # Yes thanks, if I could it wouldn't be there 110 | "N", # These naming rules are silly 111 | "PLR0912", # These metrics are fine to be aware of but not to enforce 112 | "PLR0913", 113 | "PLR0915", 114 | "PLW2901", # Shadowing for loop variables is occasionally fine. 115 | "PT006", # pytest parametrize takes strings as well 116 | "PYI025", # wat, I'm not confused, thanks. 117 | "RET502", # Returning None implicitly is fine 118 | "RET503", 119 | "RET505", # These push you to use `if` instead of `elif`, but for no reason 120 | "RET506", 121 | "RSE102", # Ha, what, who even knew you could leave the parens off. But no. 122 | "SIM300", # Not sure what heuristic this uses, but it's easily incorrect 123 | "SLF001", # Private usage within this package itself is fine 124 | "TD", # These TODO style rules are also silly 125 | "UP007", # We support 3.9 126 | ] 127 | 128 | [tool.ruff.lint.flake8-pytest-style] 129 | mark-parentheses = false 130 | 131 | [tool.ruff.lint.flake8-quotes] 132 | docstring-quotes = "double" 133 | 134 | [tool.ruff.lint.isort] 135 | combine-as-imports = true 136 | from-first = true 137 | known-first-party = ["rpds"] 138 | 139 | [tool.ruff.lint.per-file-ignores] 140 | "noxfile.py" = ["ANN", "D100", "S101", "T201"] 141 | "docs/*" = ["ANN", "D", "INP001"] 142 | "tests/*" = ["ANN", "B018", "D", "PLR", "RUF012", "S", "SIM", "TRY"] 143 | -------------------------------------------------------------------------------- /rpds.pyi: -------------------------------------------------------------------------------- 1 | from typing import ( 2 | ItemsView, 3 | Iterable, 4 | Iterator, 5 | KeysView, 6 | Mapping, 7 | TypeVar, 8 | ValuesView, 9 | ) 10 | 11 | _T = TypeVar("_T") 12 | _KT_co = TypeVar("_KT_co", covariant=True) 13 | _VT_co = TypeVar("_VT_co", covariant=True) 14 | _KU_co = TypeVar("_KU_co", covariant=True) 15 | _VU_co = TypeVar("_VU_co", covariant=True) 16 | 17 | class HashTrieMap(Mapping[_KT_co, _VT_co]): 18 | def __init__( 19 | self, 20 | value: Mapping[_KT_co, _VT_co] | Iterable[tuple[_KT_co, _VT_co]] = {}, 21 | **kwds: Mapping[_KT_co, _VT_co], 22 | ): ... 23 | def __getitem__(self, key: _KT_co) -> _VT_co: ... 24 | def __iter__(self) -> Iterator[_KT_co]: ... 25 | def __len__(self) -> int: ... 26 | def discard(self, key: _KT_co) -> HashTrieMap[_KT_co, _VT_co]: ... 27 | def items(self) -> ItemsView[_KT_co, _VT_co]: ... 28 | def keys(self) -> KeysView[_KT_co]: ... 29 | def values(self) -> ValuesView[_VT_co]: ... 30 | def remove(self, key: _KT_co) -> HashTrieMap[_KT_co, _VT_co]: ... 31 | def insert( 32 | self, 33 | key: _KT_co, 34 | val: _VT_co, 35 | ) -> HashTrieMap[_KT_co, _VT_co]: ... 36 | def update( 37 | self, 38 | *args: Mapping[_KU_co, _VU_co] | Iterable[tuple[_KU_co, _VU_co]], 39 | ) -> HashTrieMap[_KT_co | _KU_co, _VT_co | _VU_co]: ... 40 | @classmethod 41 | def convert( 42 | cls, 43 | value: Mapping[_KT_co, _VT_co] | Iterable[tuple[_KT_co, _VT_co]], 44 | ) -> HashTrieMap[_KT_co, _VT_co]: ... 45 | @classmethod 46 | def fromkeys( 47 | cls, 48 | keys: Iterable[_KT_co], 49 | value: _VT_co = None, 50 | ) -> HashTrieMap[_KT_co, _VT_co]: ... 51 | 52 | class HashTrieSet(frozenset[_T]): 53 | def __init__(self, value: Iterable[_T] = ()): ... 54 | def __iter__(self) -> Iterator[_T]: ... 55 | def __len__(self) -> int: ... 56 | def discard(self, value: _T) -> HashTrieSet[_T]: ... 57 | def remove(self, value: _T) -> HashTrieSet[_T]: ... 58 | def insert(self, value: _T) -> HashTrieSet[_T]: ... 59 | def update(self, *args: Iterable[_T]) -> HashTrieSet[_T]: ... 60 | 61 | class List(Iterable[_T]): 62 | def __init__(self, value: Iterable[_T] = (), *more: _T): ... 63 | def __iter__(self) -> Iterator[_T]: ... 64 | def __len__(self) -> int: ... 65 | def push_front(self, value: _T) -> List[_T]: ... 66 | def drop_first(self) -> List[_T]: ... 67 | 68 | class Queue(Iterable[_T]): 69 | def __init__(self, value: Iterable[_T] = (), *more: _T): ... 70 | def __iter__(self) -> Iterator[_T]: ... 71 | def __len__(self) -> int: ... 72 | def enqueue(self, value: _T) -> Queue[_T]: ... 73 | def dequeue(self, value: _T) -> Queue[_T]: ... 74 | @property 75 | def is_empty(self) -> _T: ... 76 | @property 77 | def peek(self) -> _T: ... 78 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | use pyo3::exceptions::{PyIndexError, PyTypeError}; 2 | use pyo3::pyclass::CompareOp; 3 | use pyo3::types::{PyDict, PyIterator, PyTuple, PyType}; 4 | use pyo3::{exceptions::PyKeyError, types::PyMapping, types::PyTupleMethods}; 5 | use pyo3::{prelude::*, BoundObject, PyTypeInfo}; 6 | use rpds::{ 7 | HashTrieMap, HashTrieMapSync, HashTrieSet, HashTrieSetSync, List, ListSync, Queue, QueueSync, 8 | }; 9 | use std::collections::hash_map::DefaultHasher; 10 | use std::hash::{Hash, Hasher}; 11 | 12 | fn hash_shuffle_bits(h: usize) -> usize { 13 | ((h ^ 89869747) ^ (h << 16)).wrapping_mul(3644798167) 14 | } 15 | 16 | #[derive(Debug)] 17 | struct Key { 18 | hash: isize, 19 | inner: PyObject, 20 | } 21 | 22 | impl<'py> IntoPyObject<'py> for Key { 23 | type Target = PyAny; 24 | type Output = Bound<'py, Self::Target>; 25 | type Error = std::convert::Infallible; 26 | 27 | fn into_pyobject(self, py: Python<'py>) -> Result { 28 | Ok(self.inner.into_bound(py)) 29 | } 30 | } 31 | 32 | impl<'a, 'py> IntoPyObject<'py> for &'a Key { 33 | type Target = PyAny; 34 | type Output = Borrowed<'a, 'py, Self::Target>; 35 | type Error = std::convert::Infallible; 36 | 37 | fn into_pyobject(self, py: Python<'py>) -> Result { 38 | Ok(self.inner.bind_borrowed(py)) 39 | } 40 | } 41 | 42 | impl Hash for Key { 43 | fn hash(&self, state: &mut H) { 44 | state.write_isize(self.hash); 45 | } 46 | } 47 | 48 | impl Eq for Key {} 49 | 50 | impl PartialEq for Key { 51 | fn eq(&self, other: &Self) -> bool { 52 | Python::with_gil(|py| { 53 | self.inner 54 | .call_method1(py, "__eq__", (&other.inner,)) 55 | .and_then(|value| value.extract(py)) 56 | .expect("__eq__ failed!") 57 | }) 58 | } 59 | } 60 | 61 | impl Key { 62 | fn clone_ref(&self, py: Python<'_>) -> Self { 63 | Key { 64 | hash: self.hash, 65 | inner: self.inner.clone_ref(py), 66 | } 67 | } 68 | } 69 | 70 | impl<'source> FromPyObject<'source> for Key { 71 | fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { 72 | Ok(Key { 73 | hash: ob.hash()?, 74 | inner: ob.clone().unbind(), 75 | }) 76 | } 77 | } 78 | 79 | #[repr(transparent)] 80 | #[pyclass(name = "HashTrieMap", module = "rpds", frozen, mapping)] 81 | struct HashTrieMapPy { 82 | inner: HashTrieMapSync, 83 | } 84 | 85 | impl From> for HashTrieMapPy { 86 | fn from(map: HashTrieMapSync) -> Self { 87 | HashTrieMapPy { inner: map } 88 | } 89 | } 90 | 91 | impl<'source> FromPyObject<'source> for HashTrieMapPy { 92 | fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { 93 | let mut ret = HashTrieMap::new_sync(); 94 | if let Ok(mapping) = ob.downcast::() { 95 | for each in mapping.items()?.iter() { 96 | let (k, v): (Key, PyObject) = each.extract()?; 97 | ret.insert_mut(k, v); 98 | } 99 | } else { 100 | for each in ob.try_iter()? { 101 | let (k, v) = each?.extract()?; 102 | ret.insert_mut(k, v); 103 | } 104 | } 105 | Ok(HashTrieMapPy { inner: ret }) 106 | } 107 | } 108 | 109 | #[pymethods] 110 | impl HashTrieMapPy { 111 | #[new] 112 | #[pyo3(signature = (value=None, ** kwds))] 113 | fn init(value: Option, kwds: Option<&Bound<'_, PyDict>>) -> PyResult { 114 | let mut map: HashTrieMapPy; 115 | if let Some(value) = value { 116 | map = value; 117 | } else { 118 | map = HashTrieMapPy { 119 | inner: HashTrieMap::new_sync(), 120 | }; 121 | } 122 | if let Some(kwds) = kwds { 123 | for (k, v) in kwds { 124 | map.inner.insert_mut(Key::extract_bound(&k)?, v.into()); 125 | } 126 | } 127 | Ok(map) 128 | } 129 | 130 | fn __contains__(&self, key: Key) -> bool { 131 | self.inner.contains_key(&key) 132 | } 133 | 134 | fn __iter__(slf: PyRef<'_, Self>) -> KeysIterator { 135 | KeysIterator { 136 | inner: slf.inner.clone(), 137 | } 138 | } 139 | 140 | fn __getitem__(&self, key: Key, py: Python) -> PyResult { 141 | match self.inner.get(&key) { 142 | Some(value) => Ok(value.clone_ref(py)), 143 | None => Err(PyKeyError::new_err(key)), 144 | } 145 | } 146 | 147 | fn __len__(&self) -> usize { 148 | self.inner.size() 149 | } 150 | 151 | fn __repr__(&self, py: Python) -> String { 152 | let contents = self.inner.into_iter().map(|(k, v)| { 153 | format!( 154 | "{}: {}", 155 | k.inner 156 | .call_method0(py, "__repr__") 157 | .and_then(|r| r.extract(py)) 158 | .unwrap_or("".to_owned()), 159 | v.call_method0(py, "__repr__") 160 | .and_then(|r| r.extract(py)) 161 | .unwrap_or("".to_owned()) 162 | ) 163 | }); 164 | format!( 165 | "HashTrieMap({{{}}})", 166 | contents.collect::>().join(", ") 167 | ) 168 | } 169 | 170 | fn __richcmp__<'py>(&self, other: &Self, op: CompareOp, py: Python<'py>) -> PyResult { 171 | match op { 172 | CompareOp::Eq => (self.inner.size() == other.inner.size() 173 | && self 174 | .inner 175 | .iter() 176 | .map(|(k1, v1)| (v1, other.inner.get(k1))) 177 | .map(|(v1, v2)| v1.bind(py).eq(v2)) 178 | .all(|r| r.unwrap_or(false))) 179 | .into_pyobject(py) 180 | .map_err(Into::into) 181 | .map(BoundObject::into_any) 182 | .map(BoundObject::unbind), 183 | CompareOp::Ne => (self.inner.size() != other.inner.size() 184 | || self 185 | .inner 186 | .iter() 187 | .map(|(k1, v1)| (v1, other.inner.get(k1))) 188 | .map(|(v1, v2)| v1.bind(py).ne(v2)) 189 | .all(|r| r.unwrap_or(true))) 190 | .into_pyobject(py) 191 | .map_err(Into::into) 192 | .map(BoundObject::into_any) 193 | .map(BoundObject::unbind), 194 | _ => Ok(py.NotImplemented()), 195 | } 196 | } 197 | 198 | fn __hash__(&self, py: Python) -> PyResult { 199 | // modified from https://github.com/python/cpython/blob/d69529d31ccd1510843cfac1ab53bb8cb027541f/Objects/setobject.c#L715 200 | 201 | let mut hash_val = self 202 | .inner 203 | .iter() 204 | .map(|(key, val)| { 205 | let mut hasher = DefaultHasher::new(); 206 | let val_bound = val.bind(py); 207 | 208 | let key_hash = key.hash; 209 | let val_hash = val_bound.hash().map_err(|_| { 210 | PyTypeError::new_err(format!( 211 | "Unhashable type in HashTrieMap of key {}: {}", 212 | key.inner 213 | .bind(py) 214 | .repr() 215 | .and_then(|r| r.extract()) 216 | .unwrap_or(" error".to_string()), 217 | val_bound 218 | .repr() 219 | .and_then(|r| r.extract()) 220 | .unwrap_or(" error".to_string()) 221 | )) 222 | })?; 223 | 224 | hasher.write_isize(key_hash); 225 | hasher.write_isize(val_hash); 226 | 227 | Ok(hasher.finish() as usize) 228 | }) 229 | .try_fold(0, |acc: usize, x: PyResult| { 230 | PyResult::::Ok(acc ^ hash_shuffle_bits(x?)) 231 | })?; 232 | 233 | // factor in the number of entries in the collection 234 | hash_val ^= self.inner.size().wrapping_add(1).wrapping_mul(1927868237); 235 | 236 | // dispense patterns in the hash value 237 | hash_val ^= (hash_val >> 11) ^ (hash_val >> 25); 238 | hash_val = hash_val.wrapping_mul(69069).wrapping_add(907133923); 239 | 240 | Ok(hash_val as isize) 241 | } 242 | 243 | fn __reduce__(slf: PyRef) -> (Bound<'_, PyType>, (Vec<(Key, PyObject)>,)) { 244 | ( 245 | HashTrieMapPy::type_object(slf.py()), 246 | (slf.inner 247 | .iter() 248 | .map(|(k, v)| (k.clone_ref(slf.py()), v.clone_ref(slf.py()))) 249 | .collect(),), 250 | ) 251 | } 252 | 253 | #[classmethod] 254 | fn convert( 255 | _cls: &Bound<'_, PyType>, 256 | value: Bound<'_, PyAny>, 257 | py: Python, 258 | ) -> PyResult { 259 | if value.is_instance_of::() { 260 | Ok(value.unbind()) 261 | } else { 262 | HashTrieMapPy::extract_bound(&value)? 263 | .into_pyobject(py) 264 | .map(BoundObject::into_any) 265 | .map(BoundObject::unbind) 266 | } 267 | } 268 | 269 | #[classmethod] 270 | #[pyo3(signature = (keys, val=None))] 271 | fn fromkeys( 272 | _cls: &Bound<'_, PyType>, 273 | keys: &Bound<'_, PyAny>, 274 | val: Option<&Bound<'_, PyAny>>, 275 | py: Python, 276 | ) -> PyResult { 277 | let mut inner = HashTrieMap::new_sync(); 278 | let none = py.None().into_bound(py); 279 | let value = val.unwrap_or(&none); 280 | for each in keys.try_iter()? { 281 | let key = Key::extract_bound(&each?)?; 282 | inner.insert_mut(key, value.clone().unbind()); 283 | } 284 | Ok(HashTrieMapPy { inner }) 285 | } 286 | 287 | #[pyo3(signature = (key, default=None))] 288 | fn get(&self, key: Key, default: Option, py: Python) -> Option { 289 | if let Some(value) = self.inner.get(&key) { 290 | Some(value.clone_ref(py)) 291 | } else { 292 | default 293 | } 294 | } 295 | 296 | fn keys(&self) -> KeysView { 297 | KeysView { 298 | inner: self.inner.clone(), 299 | } 300 | } 301 | 302 | fn values(&self) -> ValuesView { 303 | ValuesView { 304 | inner: self.inner.clone(), 305 | } 306 | } 307 | 308 | fn items(&self) -> ItemsView { 309 | ItemsView { 310 | inner: self.inner.clone(), 311 | } 312 | } 313 | 314 | fn discard(&self, key: Key) -> PyResult { 315 | match self.inner.contains_key(&key) { 316 | true => Ok(HashTrieMapPy { 317 | inner: self.inner.remove(&key), 318 | }), 319 | false => Ok(HashTrieMapPy { 320 | inner: self.inner.clone(), 321 | }), 322 | } 323 | } 324 | 325 | fn insert(&self, key: Key, value: Bound<'_, PyAny>) -> HashTrieMapPy { 326 | HashTrieMapPy { 327 | inner: self.inner.insert(key, value.unbind()), 328 | } 329 | } 330 | 331 | fn remove(&self, key: Key) -> PyResult { 332 | match self.inner.contains_key(&key) { 333 | true => Ok(HashTrieMapPy { 334 | inner: self.inner.remove(&key), 335 | }), 336 | false => Err(PyKeyError::new_err(key)), 337 | } 338 | } 339 | 340 | #[pyo3(signature = (*maps, **kwds))] 341 | fn update( 342 | &self, 343 | maps: &Bound<'_, PyTuple>, 344 | kwds: Option<&Bound<'_, PyDict>>, 345 | ) -> PyResult { 346 | let mut inner = self.inner.clone(); 347 | for value in maps { 348 | let map = HashTrieMapPy::extract_bound(&value)?; 349 | for (k, v) in &map.inner { 350 | inner.insert_mut(k.clone_ref(value.py()), v.clone_ref(value.py())); 351 | } 352 | } 353 | if let Some(kwds) = kwds { 354 | for (k, v) in kwds { 355 | inner.insert_mut(Key::extract_bound(&k)?, v.extract()?); 356 | } 357 | } 358 | Ok(HashTrieMapPy { inner }) 359 | } 360 | } 361 | 362 | #[pyclass(module = "rpds")] 363 | struct KeysIterator { 364 | inner: HashTrieMapSync, 365 | } 366 | 367 | #[pymethods] 368 | impl KeysIterator { 369 | fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> { 370 | slf 371 | } 372 | 373 | fn __next__(mut slf: PyRefMut<'_, Self>) -> Option { 374 | let first = slf.inner.keys().next()?.clone_ref(slf.py()); 375 | slf.inner = slf.inner.remove(&first); 376 | Some(first) 377 | } 378 | } 379 | 380 | #[pyclass(module = "rpds")] 381 | struct ValuesIterator { 382 | inner: HashTrieMapSync, 383 | } 384 | 385 | #[pymethods] 386 | impl ValuesIterator { 387 | fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> { 388 | slf 389 | } 390 | 391 | fn __next__(mut slf: PyRefMut<'_, Self>) -> Option { 392 | let kv = slf.inner.iter().next()?; 393 | let value = kv.1.clone_ref(slf.py()); 394 | slf.inner = slf.inner.remove(kv.0); 395 | Some(value) 396 | } 397 | } 398 | 399 | #[pyclass(module = "rpds")] 400 | struct ItemsIterator { 401 | inner: HashTrieMapSync, 402 | } 403 | 404 | #[pymethods] 405 | impl ItemsIterator { 406 | fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> { 407 | slf 408 | } 409 | 410 | fn __next__(mut slf: PyRefMut<'_, Self>) -> Option<(Key, PyObject)> { 411 | let kv = slf.inner.iter().next()?; 412 | let key = kv.0.clone_ref(slf.py()); 413 | let value = kv.1.clone_ref(slf.py()); 414 | 415 | slf.inner = slf.inner.remove(kv.0); 416 | 417 | Some((key, value)) 418 | } 419 | } 420 | 421 | #[pyclass(module = "rpds")] 422 | struct KeysView { 423 | inner: HashTrieMapSync, 424 | } 425 | 426 | #[pymethods] 427 | impl KeysView { 428 | fn __contains__(&self, key: Key) -> bool { 429 | self.inner.contains_key(&key) 430 | } 431 | 432 | fn __eq__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 433 | let abc = PyModule::import(py, "collections.abc")?; 434 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? != slf.inner.size() { 435 | return Ok(false); 436 | } 437 | for each in other.try_iter()? { 438 | if !slf.inner.contains_key(&Key::extract_bound(&each?)?) { 439 | return Ok(false); 440 | } 441 | } 442 | Ok(true) 443 | } 444 | 445 | fn __lt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 446 | let abc = PyModule::import(py, "collections.abc")?; 447 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? <= slf.inner.size() { 448 | return Ok(false); 449 | } 450 | 451 | for each in slf.inner.keys() { 452 | if !other.contains(each.inner.clone_ref(slf.py()))? { 453 | return Ok(false); 454 | } 455 | } 456 | Ok(true) 457 | } 458 | 459 | fn __le__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 460 | let abc = PyModule::import(py, "collections.abc")?; 461 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? < slf.inner.size() { 462 | return Ok(false); 463 | } 464 | 465 | for each in slf.inner.keys() { 466 | if !other.contains(each.inner.clone_ref(slf.py()))? { 467 | return Ok(false); 468 | } 469 | } 470 | Ok(true) 471 | } 472 | 473 | fn __gt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 474 | let abc = PyModule::import(py, "collections.abc")?; 475 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? >= slf.inner.size() { 476 | return Ok(false); 477 | } 478 | for each in other.try_iter()? { 479 | if !slf.inner.contains_key(&Key::extract_bound(&each?)?) { 480 | return Ok(false); 481 | } 482 | } 483 | Ok(true) 484 | } 485 | 486 | fn __ge__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 487 | let abc = PyModule::import(py, "collections.abc")?; 488 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? > slf.inner.size() { 489 | return Ok(false); 490 | } 491 | for each in other.try_iter()? { 492 | if !slf.inner.contains_key(&Key::extract_bound(&each?)?) { 493 | return Ok(false); 494 | } 495 | } 496 | Ok(true) 497 | } 498 | 499 | fn __iter__(slf: PyRef<'_, Self>) -> KeysIterator { 500 | KeysIterator { 501 | inner: slf.inner.clone(), 502 | } 503 | } 504 | 505 | fn __len__(slf: PyRef<'_, Self>) -> usize { 506 | slf.inner.size() 507 | } 508 | 509 | fn __and__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>) -> PyResult { 510 | KeysView::intersection(slf, other) 511 | } 512 | 513 | fn __or__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 514 | KeysView::union(slf, other, py) 515 | } 516 | 517 | fn __repr__(&self, py: Python) -> PyResult { 518 | let contents = self.inner.into_iter().map(|(k, _)| { 519 | Ok(k.clone_ref(py) 520 | .inner 521 | .into_pyobject(py)? 522 | .call_method0("__repr__") 523 | .and_then(|r| r.extract()) 524 | .unwrap_or("".to_owned())) 525 | }); 526 | let contents = contents.collect::, PyErr>>()?; 527 | Ok(format!("keys_view({{{}}})", contents.join(", "))) 528 | } 529 | 530 | fn intersection(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>) -> PyResult { 531 | // TODO: iterate over the shorter one if it's got a length 532 | let mut inner = HashTrieSet::new_sync(); 533 | for each in other.try_iter()? { 534 | let key = Key::extract_bound(&each?)?; 535 | if slf.inner.contains_key(&key) { 536 | inner.insert_mut(key); 537 | } 538 | } 539 | Ok(HashTrieSetPy { inner }) 540 | } 541 | 542 | fn union(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 543 | // There doesn't seem to be a low-effort way to get a HashTrieSet out of a map, 544 | // so we just keep our map and add values we'll ignore. 545 | let mut inner = slf.inner.clone(); 546 | for each in other.try_iter()? { 547 | inner.insert_mut(Key::extract_bound(&each?)?, py.None()); 548 | } 549 | Ok(KeysView { inner }) 550 | } 551 | } 552 | 553 | #[pyclass(module = "rpds")] 554 | struct ValuesView { 555 | inner: HashTrieMapSync, 556 | } 557 | 558 | #[pymethods] 559 | impl ValuesView { 560 | fn __iter__(slf: PyRef<'_, Self>) -> ValuesIterator { 561 | ValuesIterator { 562 | inner: slf.inner.clone(), 563 | } 564 | } 565 | 566 | fn __len__(slf: PyRef<'_, Self>) -> usize { 567 | slf.inner.size() 568 | } 569 | 570 | fn __repr__(&self, py: Python) -> PyResult { 571 | let contents = self.inner.into_iter().map(|(_, v)| { 572 | Ok(v.into_pyobject(py)? 573 | .call_method0("__repr__") 574 | .and_then(|r| r.extract()) 575 | .unwrap_or("".to_owned())) 576 | }); 577 | let contents = contents.collect::, PyErr>>()?; 578 | Ok(format!("values_view([{}])", contents.join(", "))) 579 | } 580 | } 581 | 582 | #[pyclass(module = "rpds")] 583 | struct ItemsView { 584 | inner: HashTrieMapSync, 585 | } 586 | 587 | #[derive(FromPyObject)] 588 | struct ItemViewQuery(Key, PyObject); 589 | 590 | #[pymethods] 591 | impl ItemsView { 592 | fn __contains__(slf: PyRef<'_, Self>, item: ItemViewQuery) -> PyResult { 593 | if let Some(value) = slf.inner.get(&item.0) { 594 | return item.1.bind(slf.py()).eq(value); 595 | } 596 | 597 | Ok(false) 598 | } 599 | 600 | fn __iter__(slf: PyRef<'_, Self>) -> ItemsIterator { 601 | ItemsIterator { 602 | inner: slf.inner.clone(), 603 | } 604 | } 605 | 606 | fn __len__(slf: PyRef<'_, Self>) -> usize { 607 | slf.inner.size() 608 | } 609 | 610 | fn __eq__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 611 | let abc = PyModule::import(py, "collections.abc")?; 612 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? != slf.inner.size() { 613 | return Ok(false); 614 | } 615 | for (k, v) in slf.inner.iter() { 616 | if !other.contains((k.inner.clone_ref(slf.py()), v))? { 617 | return Ok(false); 618 | } 619 | } 620 | Ok(true) 621 | } 622 | 623 | fn __repr__(&self, py: Python) -> PyResult { 624 | let contents = self.inner.into_iter().map(|(k, v)| { 625 | let tuple = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?; 626 | Ok(format!("{:?}", tuple)) 627 | }); 628 | let contents = contents.collect::, PyErr>>()?; 629 | Ok(format!("items_view([{}])", contents.join(", "))) 630 | } 631 | 632 | fn __lt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 633 | let abc = PyModule::import(py, "collections.abc")?; 634 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? <= slf.inner.size() { 635 | return Ok(false); 636 | } 637 | for (k, v) in slf.inner.iter() { 638 | let pair = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?; 639 | // FIXME: needs to compare 640 | if !other.contains(pair)? { 641 | return Ok(false); 642 | } 643 | } 644 | Ok(true) 645 | } 646 | 647 | fn __le__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 648 | let abc = PyModule::import(py, "collections.abc")?; 649 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? < slf.inner.size() { 650 | return Ok(false); 651 | } 652 | for (k, v) in slf.inner.iter() { 653 | let pair = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?; 654 | // FIXME: needs to compare 655 | if !other.contains(pair)? { 656 | return Ok(false); 657 | } 658 | } 659 | Ok(true) 660 | } 661 | 662 | fn __gt__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 663 | let abc = PyModule::import(py, "collections.abc")?; 664 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? >= slf.inner.size() { 665 | return Ok(false); 666 | } 667 | for each in other.try_iter()? { 668 | let kv = each?; 669 | let k = kv.get_item(0)?; 670 | match slf.inner.get(&Key::extract_bound(&k)?) { 671 | Some(value) => { 672 | let pair = PyTuple::new(py, [k, value.bind(py).clone()])?; 673 | if !pair.eq(kv)? { 674 | return Ok(false); 675 | } 676 | } 677 | None => return Ok(false), 678 | } 679 | } 680 | Ok(true) 681 | } 682 | 683 | fn __ge__(slf: PyRef<'_, Self>, other: &Bound<'_, PyAny>, py: Python) -> PyResult { 684 | let abc = PyModule::import(py, "collections.abc")?; 685 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? > slf.inner.size() { 686 | return Ok(false); 687 | } 688 | for each in other.try_iter()? { 689 | let kv = each?; 690 | let k = kv.get_item(0)?; 691 | match slf.inner.get(&Key::extract_bound(&k)?) { 692 | Some(value) => { 693 | let pair = PyTuple::new(py, [k, value.bind(py).clone()])?; 694 | if !pair.eq(kv)? { 695 | return Ok(false); 696 | } 697 | } 698 | None => return Ok(false), 699 | } 700 | } 701 | Ok(true) 702 | } 703 | 704 | fn __and__( 705 | slf: PyRef<'_, Self>, 706 | other: &Bound<'_, PyAny>, 707 | py: Python, 708 | ) -> PyResult { 709 | ItemsView::intersection(slf, other, py) 710 | } 711 | 712 | fn __or__( 713 | slf: PyRef<'_, Self>, 714 | other: &Bound<'_, PyAny>, 715 | py: Python, 716 | ) -> PyResult { 717 | ItemsView::union(slf, other, py) 718 | } 719 | 720 | fn intersection( 721 | slf: PyRef<'_, Self>, 722 | other: &Bound<'_, PyAny>, 723 | py: Python, 724 | ) -> PyResult { 725 | // TODO: iterate over the shorter one if it's got a length 726 | let mut inner = HashTrieSet::new_sync(); 727 | for each in other.try_iter()? { 728 | let kv = each?; 729 | let k = kv.get_item(0)?; 730 | if let Some(value) = slf.inner.get(&Key::extract_bound(&k)?) { 731 | let pair = PyTuple::new(py, [k, value.bind(py).clone()])?; 732 | if pair.eq(kv)? { 733 | inner.insert_mut(Key::extract_bound(&pair)?); 734 | } 735 | } 736 | } 737 | Ok(HashTrieSetPy { inner }) 738 | } 739 | 740 | fn union( 741 | slf: PyRef<'_, Self>, 742 | other: &Bound<'_, PyAny>, 743 | py: Python, 744 | ) -> PyResult { 745 | // TODO: this is very inefficient, but again can't seem to get a HashTrieSet out of ourself 746 | let mut inner = HashTrieSet::new_sync(); 747 | for (k, v) in slf.inner.iter() { 748 | let pair = PyTuple::new(py, [k.inner.clone_ref(py), v.clone_ref(py)])?; 749 | inner.insert_mut(Key::extract_bound(&pair)?); 750 | } 751 | for each in other.try_iter()? { 752 | inner.insert_mut(Key::extract_bound(&each?)?); 753 | } 754 | Ok(HashTrieSetPy { inner }) 755 | } 756 | } 757 | 758 | #[repr(transparent)] 759 | #[pyclass(name = "HashTrieSet", module = "rpds", frozen)] 760 | struct HashTrieSetPy { 761 | inner: HashTrieSetSync, 762 | } 763 | 764 | impl<'source> FromPyObject<'source> for HashTrieSetPy { 765 | fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { 766 | let mut ret = HashTrieSet::new_sync(); 767 | for each in ob.try_iter()? { 768 | let k: Key = each?.extract()?; 769 | ret.insert_mut(k); 770 | } 771 | Ok(HashTrieSetPy { inner: ret }) 772 | } 773 | } 774 | 775 | #[pymethods] 776 | impl HashTrieSetPy { 777 | #[new] 778 | #[pyo3(signature = (value=None))] 779 | fn init(value: Option) -> Self { 780 | if let Some(value) = value { 781 | value 782 | } else { 783 | HashTrieSetPy { 784 | inner: HashTrieSet::new_sync(), 785 | } 786 | } 787 | } 788 | 789 | fn __contains__(&self, key: Key) -> bool { 790 | self.inner.contains(&key) 791 | } 792 | 793 | fn __and__(&self, other: &Self, py: Python) -> Self { 794 | self.intersection(other, py) 795 | } 796 | 797 | fn __or__(&self, other: &Self, py: Python) -> Self { 798 | self.union(other, py) 799 | } 800 | 801 | fn __sub__(&self, other: &Self) -> Self { 802 | self.difference(other) 803 | } 804 | 805 | fn __xor__(&self, other: &Self, py: Python) -> Self { 806 | self.symmetric_difference(other, py) 807 | } 808 | 809 | fn __iter__(slf: PyRef<'_, Self>) -> SetIterator { 810 | SetIterator { 811 | inner: slf.inner.clone(), 812 | } 813 | } 814 | 815 | fn __len__(&self) -> usize { 816 | self.inner.size() 817 | } 818 | 819 | fn __repr__(&self, py: Python) -> PyResult { 820 | let contents = self.inner.into_iter().map(|k| { 821 | Ok(k.clone_ref(py) 822 | .into_pyobject(py)? 823 | .call_method0("__repr__") 824 | .and_then(|r| r.extract()) 825 | .unwrap_or("".to_owned())) 826 | }); 827 | let contents = contents.collect::, PyErr>>()?; 828 | Ok(format!("HashTrieSet({{{}}})", contents.join(", "))) 829 | } 830 | 831 | fn __eq__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult { 832 | let abc = PyModule::import(py, "collections.abc")?; 833 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? != slf.inner.size() { 834 | return Ok(false); 835 | } 836 | for each in other.try_iter()? { 837 | if !slf.inner.contains(&Key::extract_bound(&each?)?) { 838 | return Ok(false); 839 | } 840 | } 841 | Ok(true) 842 | } 843 | 844 | fn __hash__(&self) -> PyResult { 845 | // modified from https://github.com/python/cpython/blob/d69529d31ccd1510843cfac1ab53bb8cb027541f/Objects/setobject.c#L715 846 | 847 | let mut hash_val = self 848 | .inner 849 | .iter() 850 | .map(|k| k.hash as usize) 851 | .fold(0, |acc: usize, x: usize| acc ^ hash_shuffle_bits(x)); 852 | 853 | // factor in the number of entries in the collection 854 | hash_val ^= self.inner.size().wrapping_add(1).wrapping_mul(1927868237); 855 | 856 | // dispense patterns in the hash value 857 | hash_val ^= (hash_val >> 11) ^ (hash_val >> 25); 858 | hash_val = hash_val.wrapping_mul(69069).wrapping_add(907133923); 859 | 860 | Ok(hash_val as isize) 861 | } 862 | 863 | fn __lt__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult { 864 | let abc = PyModule::import(py, "collections.abc")?; 865 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? <= slf.inner.size() { 866 | return Ok(false); 867 | } 868 | for each in slf.inner.iter() { 869 | if !other.contains(each.inner.clone_ref(py))? { 870 | return Ok(false); 871 | } 872 | } 873 | Ok(true) 874 | } 875 | 876 | fn __le__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult { 877 | let abc = PyModule::import(py, "collections.abc")?; 878 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? < slf.inner.size() { 879 | return Ok(false); 880 | } 881 | for each in slf.inner.iter() { 882 | if !other.contains(each.inner.clone_ref(slf.py()))? { 883 | return Ok(false); 884 | } 885 | } 886 | Ok(true) 887 | } 888 | 889 | fn __gt__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult { 890 | let abc = PyModule::import(py, "collections.abc")?; 891 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? >= slf.inner.size() { 892 | return Ok(false); 893 | } 894 | for each in other.try_iter()? { 895 | if !slf.inner.contains(&Key::extract_bound(&each?)?) { 896 | return Ok(false); 897 | } 898 | } 899 | Ok(true) 900 | } 901 | 902 | fn __ge__(slf: PyRef<'_, Self>, other: Bound<'_, PyAny>, py: Python) -> PyResult { 903 | let abc = PyModule::import(py, "collections.abc")?; 904 | if !other.is_instance(&abc.getattr("Set")?)? || other.len()? > slf.inner.size() { 905 | return Ok(false); 906 | } 907 | for each in other.try_iter()? { 908 | if !slf.inner.contains(&Key::extract_bound(&each?)?) { 909 | return Ok(false); 910 | } 911 | } 912 | Ok(true) 913 | } 914 | 915 | fn __reduce__(slf: PyRef) -> (Bound<'_, PyType>, (Vec,)) { 916 | ( 917 | HashTrieSetPy::type_object(slf.py()), 918 | (slf.inner.iter().map(|e| e.clone_ref(slf.py())).collect(),), 919 | ) 920 | } 921 | 922 | fn insert(&self, value: Key) -> HashTrieSetPy { 923 | HashTrieSetPy { 924 | inner: self.inner.insert(value), 925 | } 926 | } 927 | 928 | fn discard(&self, value: Key) -> PyResult { 929 | match self.inner.contains(&value) { 930 | true => Ok(HashTrieSetPy { 931 | inner: self.inner.remove(&value), 932 | }), 933 | false => Ok(HashTrieSetPy { 934 | inner: self.inner.clone(), 935 | }), 936 | } 937 | } 938 | 939 | fn remove(&self, value: Key) -> PyResult { 940 | match self.inner.contains(&value) { 941 | true => Ok(HashTrieSetPy { 942 | inner: self.inner.remove(&value), 943 | }), 944 | false => Err(PyKeyError::new_err(value)), 945 | } 946 | } 947 | 948 | fn difference(&self, other: &Self) -> HashTrieSetPy { 949 | let mut inner = self.inner.clone(); 950 | for value in other.inner.iter() { 951 | inner.remove_mut(value); 952 | } 953 | HashTrieSetPy { inner } 954 | } 955 | 956 | fn intersection(&self, other: &Self, py: Python) -> HashTrieSetPy { 957 | let mut inner: HashTrieSetSync = HashTrieSet::new_sync(); 958 | let larger: &HashTrieSetSync; 959 | let iter; 960 | if self.inner.size() > other.inner.size() { 961 | larger = &self.inner; 962 | iter = other.inner.iter(); 963 | } else { 964 | larger = &other.inner; 965 | iter = self.inner.iter(); 966 | } 967 | for value in iter { 968 | if larger.contains(value) { 969 | inner.insert_mut(value.clone_ref(py)); 970 | } 971 | } 972 | HashTrieSetPy { inner } 973 | } 974 | 975 | fn symmetric_difference(&self, other: &Self, py: Python) -> HashTrieSetPy { 976 | let mut inner: HashTrieSetSync; 977 | let iter; 978 | if self.inner.size() > other.inner.size() { 979 | inner = self.inner.clone(); 980 | iter = other.inner.iter(); 981 | } else { 982 | inner = other.inner.clone(); 983 | iter = self.inner.iter(); 984 | } 985 | for value in iter { 986 | if inner.contains(value) { 987 | inner.remove_mut(value); 988 | } else { 989 | inner.insert_mut(value.clone_ref(py)); 990 | } 991 | } 992 | HashTrieSetPy { inner } 993 | } 994 | 995 | fn union(&self, other: &Self, py: Python) -> HashTrieSetPy { 996 | let mut inner: HashTrieSetSync; 997 | let iter; 998 | if self.inner.size() > other.inner.size() { 999 | inner = self.inner.clone(); 1000 | iter = other.inner.iter(); 1001 | } else { 1002 | inner = other.inner.clone(); 1003 | iter = self.inner.iter(); 1004 | } 1005 | for value in iter { 1006 | inner.insert_mut(value.clone_ref(py)); 1007 | } 1008 | HashTrieSetPy { inner } 1009 | } 1010 | 1011 | #[pyo3(signature = (*iterables))] 1012 | fn update(&self, iterables: Bound<'_, PyTuple>) -> PyResult { 1013 | let mut inner = self.inner.clone(); 1014 | for each in iterables { 1015 | let iter = each.try_iter()?; 1016 | for value in iter { 1017 | inner.insert_mut(Key::extract_bound(&value?)?); 1018 | } 1019 | } 1020 | Ok(HashTrieSetPy { inner }) 1021 | } 1022 | } 1023 | 1024 | #[pyclass(module = "rpds")] 1025 | struct SetIterator { 1026 | inner: HashTrieSetSync, 1027 | } 1028 | 1029 | #[pymethods] 1030 | impl SetIterator { 1031 | fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> { 1032 | slf 1033 | } 1034 | 1035 | fn __next__(mut slf: PyRefMut<'_, Self>) -> Option { 1036 | let first = slf.inner.iter().next()?.clone_ref(slf.py()); 1037 | slf.inner = slf.inner.remove(&first); 1038 | Some(first) 1039 | } 1040 | } 1041 | 1042 | #[repr(transparent)] 1043 | #[pyclass(name = "List", module = "rpds", frozen, sequence)] 1044 | struct ListPy { 1045 | inner: ListSync, 1046 | } 1047 | 1048 | impl From> for ListPy { 1049 | fn from(elements: ListSync) -> Self { 1050 | ListPy { inner: elements } 1051 | } 1052 | } 1053 | 1054 | impl<'source> FromPyObject<'source> for ListPy { 1055 | fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { 1056 | let mut ret = List::new_sync(); 1057 | let reversed = PyModule::import(ob.py(), "builtins")?.getattr("reversed")?; 1058 | let rob: Bound<'_, PyIterator> = reversed.call1((ob,))?.try_iter()?; 1059 | for each in rob { 1060 | ret.push_front_mut(each?.extract()?); 1061 | } 1062 | Ok(ListPy { inner: ret }) 1063 | } 1064 | } 1065 | 1066 | #[pymethods] 1067 | impl ListPy { 1068 | #[new] 1069 | #[pyo3(signature = (*elements))] 1070 | fn init(elements: &Bound<'_, PyTuple>) -> PyResult { 1071 | let mut ret: ListPy; 1072 | if elements.len() == 1 { 1073 | ret = elements.get_item(0)?.extract()?; 1074 | } else { 1075 | ret = ListPy { 1076 | inner: List::new_sync(), 1077 | }; 1078 | if elements.len() > 1 { 1079 | for each in (0..elements.len()).rev() { 1080 | ret.inner 1081 | .push_front_mut(elements.get_item(each)?.extract()?); 1082 | } 1083 | } 1084 | } 1085 | Ok(ret) 1086 | } 1087 | 1088 | fn __len__(&self) -> usize { 1089 | self.inner.len() 1090 | } 1091 | 1092 | fn __repr__(&self, py: Python) -> PyResult { 1093 | let contents = self.inner.into_iter().map(|k| { 1094 | Ok(k.into_pyobject(py)? 1095 | .call_method0("__repr__") 1096 | .and_then(|r| r.extract()) 1097 | .unwrap_or("".to_owned())) 1098 | }); 1099 | let contents = contents.collect::, PyErr>>()?; 1100 | Ok(format!("List([{}])", contents.join(", "))) 1101 | } 1102 | 1103 | fn __richcmp__(&self, other: &Self, op: CompareOp, py: Python<'_>) -> PyResult { 1104 | match op { 1105 | CompareOp::Eq => (self.inner.len() == other.inner.len() 1106 | && self 1107 | .inner 1108 | .iter() 1109 | .zip(other.inner.iter()) 1110 | .map(|(e1, e2)| e1.bind(py).eq(e2)) 1111 | .all(|r| r.unwrap_or(false))) 1112 | .into_pyobject(py) 1113 | .map_err(Into::into) 1114 | .map(BoundObject::into_any) 1115 | .map(BoundObject::unbind), 1116 | CompareOp::Ne => (self.inner.len() != other.inner.len() 1117 | || self 1118 | .inner 1119 | .iter() 1120 | .zip(other.inner.iter()) 1121 | .map(|(e1, e2)| e1.bind(py).ne(e2)) 1122 | .any(|r| r.unwrap_or(true))) 1123 | .into_pyobject(py) 1124 | .map_err(Into::into) 1125 | .map(BoundObject::into_any) 1126 | .map(BoundObject::unbind), 1127 | _ => Ok(py.NotImplemented()), 1128 | } 1129 | } 1130 | 1131 | fn __hash__(&self, py: Python) -> PyResult { 1132 | let mut hasher = DefaultHasher::new(); 1133 | 1134 | self.inner 1135 | .iter() 1136 | .enumerate() 1137 | .try_for_each(|(index, each)| { 1138 | each.bind(py) 1139 | .hash() 1140 | .map_err(|_| { 1141 | PyTypeError::new_err(format!( 1142 | "Unhashable type at {} element in List: {}", 1143 | index, 1144 | each.bind(py) 1145 | .repr() 1146 | .and_then(|r| r.extract()) 1147 | .unwrap_or(" error".to_string()) 1148 | )) 1149 | }) 1150 | .map(|x| hasher.write_isize(x)) 1151 | })?; 1152 | 1153 | Ok(hasher.finish()) 1154 | } 1155 | 1156 | fn __iter__(slf: PyRef<'_, Self>) -> ListIterator { 1157 | ListIterator { 1158 | inner: slf.inner.clone(), 1159 | } 1160 | } 1161 | 1162 | fn __reversed__(&self) -> ListPy { 1163 | ListPy { 1164 | inner: self.inner.reverse(), 1165 | } 1166 | } 1167 | 1168 | fn __reduce__(slf: PyRef) -> (Bound<'_, PyType>, (Vec,)) { 1169 | ( 1170 | ListPy::type_object(slf.py()), 1171 | (slf.inner.iter().map(|e| e.clone_ref(slf.py())).collect(),), 1172 | ) 1173 | } 1174 | 1175 | #[getter] 1176 | fn first(&self) -> PyResult<&PyObject> { 1177 | self.inner 1178 | .first() 1179 | .ok_or_else(|| PyIndexError::new_err("empty list has no first element")) 1180 | } 1181 | 1182 | #[getter] 1183 | fn rest(&self) -> ListPy { 1184 | let mut inner = self.inner.clone(); 1185 | inner.drop_first_mut(); 1186 | ListPy { inner } 1187 | } 1188 | 1189 | fn push_front(&self, other: PyObject) -> ListPy { 1190 | ListPy { 1191 | inner: self.inner.push_front(other), 1192 | } 1193 | } 1194 | 1195 | fn drop_first(&self) -> PyResult { 1196 | if let Some(inner) = self.inner.drop_first() { 1197 | Ok(ListPy { inner }) 1198 | } else { 1199 | Err(PyIndexError::new_err("empty list has no first element")) 1200 | } 1201 | } 1202 | } 1203 | 1204 | #[pyclass(module = "rpds")] 1205 | struct ListIterator { 1206 | inner: ListSync, 1207 | } 1208 | 1209 | #[pymethods] 1210 | impl ListIterator { 1211 | fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> { 1212 | slf 1213 | } 1214 | 1215 | fn __next__(mut slf: PyRefMut<'_, Self>) -> Option { 1216 | let first_op = slf.inner.first()?; 1217 | let first = first_op.clone_ref(slf.py()); 1218 | 1219 | slf.inner = slf.inner.drop_first()?; 1220 | 1221 | Some(first) 1222 | } 1223 | } 1224 | 1225 | #[pyclass(module = "rpds")] 1226 | struct QueueIterator { 1227 | inner: QueueSync, 1228 | } 1229 | 1230 | #[pymethods] 1231 | impl QueueIterator { 1232 | fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> { 1233 | slf 1234 | } 1235 | 1236 | fn __next__(mut slf: PyRefMut<'_, Self>) -> Option { 1237 | let first_op = slf.inner.peek()?; 1238 | let first = first_op.clone_ref(slf.py()); 1239 | slf.inner = slf.inner.dequeue()?; 1240 | Some(first) 1241 | } 1242 | } 1243 | 1244 | #[repr(transparent)] 1245 | #[pyclass(name = "Queue", module = "rpds", frozen, sequence)] 1246 | struct QueuePy { 1247 | inner: QueueSync, 1248 | } 1249 | 1250 | impl From> for QueuePy { 1251 | fn from(elements: QueueSync) -> Self { 1252 | QueuePy { inner: elements } 1253 | } 1254 | } 1255 | 1256 | impl<'source> FromPyObject<'source> for QueuePy { 1257 | fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult { 1258 | let mut ret = Queue::new_sync(); 1259 | for each in ob.try_iter()? { 1260 | ret.enqueue_mut(each?.extract()?); 1261 | } 1262 | Ok(QueuePy { inner: ret }) 1263 | } 1264 | } 1265 | 1266 | #[pymethods] 1267 | impl QueuePy { 1268 | #[new] 1269 | #[pyo3(signature = (*elements))] 1270 | fn init(elements: &Bound<'_, PyTuple>, py: Python<'_>) -> PyResult { 1271 | let mut ret: QueuePy; 1272 | if elements.len() == 1 { 1273 | ret = elements.get_item(0)?.extract()?; 1274 | } else { 1275 | ret = QueuePy { 1276 | inner: Queue::new_sync(), 1277 | }; 1278 | if elements.len() > 1 { 1279 | for each in elements { 1280 | ret.inner.enqueue_mut(each.into_pyobject(py)?.unbind()); 1281 | } 1282 | } 1283 | } 1284 | Ok(ret) 1285 | } 1286 | 1287 | fn __eq__(&self, other: &Self, py: Python<'_>) -> bool { 1288 | (self.inner.len() == other.inner.len()) 1289 | && self 1290 | .inner 1291 | .iter() 1292 | .zip(other.inner.iter()) 1293 | .map(|(e1, e2)| e1.bind(py).eq(e2)) 1294 | .all(|r| r.unwrap_or(false)) 1295 | } 1296 | 1297 | fn __hash__(&self, py: Python<'_>) -> PyResult { 1298 | let mut hasher = DefaultHasher::new(); 1299 | 1300 | self.inner 1301 | .iter() 1302 | .enumerate() 1303 | .try_for_each(|(index, each)| { 1304 | each.bind(py) 1305 | .hash() 1306 | .map_err(|_| { 1307 | PyTypeError::new_err(format!( 1308 | "Unhashable type at {} element in Queue: {}", 1309 | index, 1310 | each.bind(py) 1311 | .repr() 1312 | .and_then(|r| r.extract()) 1313 | .unwrap_or(" error".to_string()) 1314 | )) 1315 | }) 1316 | .map(|x| hasher.write_isize(x)) 1317 | })?; 1318 | 1319 | Ok(hasher.finish()) 1320 | } 1321 | 1322 | fn __ne__(&self, other: &Self, py: Python<'_>) -> bool { 1323 | (self.inner.len() != other.inner.len()) 1324 | || self 1325 | .inner 1326 | .iter() 1327 | .zip(other.inner.iter()) 1328 | .map(|(e1, e2)| e1.bind(py).ne(e2)) 1329 | .any(|r| r.unwrap_or(true)) 1330 | } 1331 | 1332 | fn __iter__(slf: PyRef<'_, Self>) -> QueueIterator { 1333 | QueueIterator { 1334 | inner: slf.inner.clone(), 1335 | } 1336 | } 1337 | 1338 | fn __len__(&self) -> usize { 1339 | self.inner.len() 1340 | } 1341 | 1342 | fn __repr__(&self, py: Python) -> PyResult { 1343 | let contents = self.inner.into_iter().map(|k| { 1344 | Ok(k.into_pyobject(py)? 1345 | .call_method0("__repr__") 1346 | .and_then(|r| r.extract()) 1347 | .unwrap_or("".to_owned())) 1348 | }); 1349 | let contents = contents.collect::, PyErr>>()?; 1350 | Ok(format!("Queue([{}])", contents.join(", "))) 1351 | } 1352 | 1353 | #[getter] 1354 | fn peek(&self, py: Python) -> PyResult { 1355 | if let Some(peeked) = self.inner.peek() { 1356 | Ok(peeked.clone_ref(py)) 1357 | } else { 1358 | Err(PyIndexError::new_err("peeked an empty queue")) 1359 | } 1360 | } 1361 | 1362 | #[getter] 1363 | fn is_empty(&self) -> bool { 1364 | self.inner.is_empty() 1365 | } 1366 | 1367 | fn enqueue(&self, value: Bound<'_, PyAny>) -> Self { 1368 | QueuePy { 1369 | inner: self.inner.enqueue(value.into()), 1370 | } 1371 | } 1372 | 1373 | fn dequeue(&self) -> PyResult { 1374 | if let Some(inner) = self.inner.dequeue() { 1375 | Ok(QueuePy { inner }) 1376 | } else { 1377 | Err(PyIndexError::new_err("dequeued an empty queue")) 1378 | } 1379 | } 1380 | } 1381 | 1382 | #[pymodule(gil_used = false)] 1383 | #[pyo3(name = "rpds")] 1384 | fn rpds_py(py: Python, m: &Bound<'_, PyModule>) -> PyResult<()> { 1385 | m.add_class::()?; 1386 | m.add_class::()?; 1387 | m.add_class::()?; 1388 | m.add_class::()?; 1389 | 1390 | PyMapping::register::(py)?; 1391 | 1392 | let abc = PyModule::import(py, "collections.abc")?; 1393 | 1394 | abc.getattr("Set")? 1395 | .call_method1("register", (HashTrieSetPy::type_object(py),))?; 1396 | 1397 | abc.getattr("MappingView")? 1398 | .call_method1("register", (KeysView::type_object(py),))?; 1399 | abc.getattr("MappingView")? 1400 | .call_method1("register", (ValuesView::type_object(py),))?; 1401 | abc.getattr("MappingView")? 1402 | .call_method1("register", (ItemsView::type_object(py),))?; 1403 | 1404 | abc.getattr("KeysView")? 1405 | .call_method1("register", (KeysView::type_object(py),))?; 1406 | abc.getattr("ValuesView")? 1407 | .call_method1("register", (ValuesView::type_object(py),))?; 1408 | abc.getattr("ItemsView")? 1409 | .call_method1("register", (ItemsView::type_object(py),))?; 1410 | 1411 | Ok(()) 1412 | } 1413 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/crate-py/rpds/ce68a75c3426b88a5050f6f19e32a7db3c80468f/tests/__init__.py -------------------------------------------------------------------------------- /tests/requirements.in: -------------------------------------------------------------------------------- 1 | file:.#egg=rpds-py 2 | pytest 3 | pytest-run-parallel 4 | -------------------------------------------------------------------------------- /tests/requirements.txt: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by uv via the following command: 2 | # uv pip compile --output-file /Users/julian/Development/rpds.py/tests/requirements.txt tests/requirements.in 3 | iniconfig==2.1.0 4 | # via pytest 5 | packaging==25.0 6 | # via pytest 7 | pluggy==1.6.0 8 | # via pytest 9 | pytest==8.3.5 10 | # via 11 | # -r tests/requirements.in 12 | # pytest-run-parallel 13 | pytest-run-parallel==0.4.2 14 | # via -r tests/requirements.in 15 | rpds-py @ file:.#egg=rpds-py 16 | # via -r tests/requirements.in 17 | -------------------------------------------------------------------------------- /tests/test_hash_trie_map.py: -------------------------------------------------------------------------------- 1 | """ 2 | Modified from the pyrsistent test suite. 3 | 4 | Pre-modification, these were MIT licensed, and are copyright: 5 | 6 | Copyright (c) 2022 Tobias Gustafsson 7 | 8 | Permission is hereby granted, free of charge, to any person 9 | obtaining a copy of this software and associated documentation 10 | files (the "Software"), to deal in the Software without 11 | restriction, including without limitation the rights to use, 12 | copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | copies of the Software, and to permit persons to whom the 14 | Software is furnished to do so, subject to the following 15 | conditions: 16 | 17 | The above copyright notice and this permission notice shall be 18 | included in all copies or substantial portions of the Software. 19 | 20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 21 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 22 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 23 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 24 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 25 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 26 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 27 | OTHER DEALINGS IN THE SOFTWARE. 28 | """ 29 | 30 | from collections import abc 31 | from operator import methodcaller 32 | import pickle 33 | import sysconfig 34 | 35 | import pytest 36 | 37 | from rpds import HashTrieMap 38 | 39 | # see https://github.com/python/cpython/issues/127065, 40 | # remove this when the CPython bug is fixed in a released version 41 | if bool(sysconfig.get_config_var("Py_GIL_DISABLED")): 42 | 43 | def methodcaller(name, /, *args, **kwargs): 44 | def caller(obj): 45 | return getattr(obj, name)(*args, **kwargs) 46 | 47 | return caller 48 | 49 | 50 | def test_instance_of_hashable(): 51 | assert isinstance(HashTrieMap(), abc.Hashable) 52 | 53 | 54 | def test_instance_of_map(): 55 | assert isinstance(HashTrieMap(), abc.Mapping) 56 | 57 | 58 | def test_literalish_works(): 59 | assert HashTrieMap() == HashTrieMap() 60 | assert HashTrieMap(a=1, b=2) == HashTrieMap({"a": 1, "b": 2}) 61 | 62 | 63 | def test_empty_initialization(): 64 | a_map = HashTrieMap() 65 | assert len(a_map) == 0 66 | 67 | 68 | def test_initialization_with_one_element(): 69 | the_map = HashTrieMap({"a": 2}) 70 | assert len(the_map) == 1 71 | assert the_map["a"] == 2 72 | assert "a" in the_map 73 | 74 | empty_map = the_map.remove("a") 75 | assert len(empty_map) == 0 76 | assert "a" not in empty_map 77 | 78 | 79 | def test_index_non_existing_raises_key_error(): 80 | m1 = HashTrieMap() 81 | with pytest.raises(KeyError) as error: 82 | m1["foo"] 83 | 84 | assert str(error.value) == "'foo'" 85 | 86 | 87 | def test_remove_non_existing_element_raises_key_error(): 88 | m1 = HashTrieMap(a=1) 89 | 90 | with pytest.raises(KeyError) as error: 91 | m1.remove("b") 92 | 93 | assert str(error.value) == "'b'" 94 | 95 | 96 | def test_various_iterations(): 97 | assert {"a", "b"} == set(HashTrieMap(a=1, b=2)) 98 | assert ["a", "b"] == sorted(HashTrieMap(a=1, b=2).keys()) 99 | assert [1, 2] == sorted(HashTrieMap(a=1, b=2).values()) 100 | assert {("a", 1), ("b", 2)} == set(HashTrieMap(a=1, b=2).items()) 101 | 102 | pm = HashTrieMap({k: k for k in range(100)}) 103 | assert len(pm) == len(pm.keys()) 104 | assert len(pm) == len(pm.values()) 105 | assert len(pm) == len(pm.items()) 106 | ks = pm.keys() 107 | assert all(k in pm for k in ks) 108 | assert all(k in ks for k in ks) 109 | us = pm.items() 110 | assert all(pm[k] == v for (k, v) in us) 111 | vs = pm.values() 112 | assert all(v in vs for v in vs) 113 | 114 | 115 | def test_initialization_with_two_elements(): 116 | map1 = HashTrieMap({"a": 2, "b": 3}) 117 | assert len(map1) == 2 118 | assert map1["a"] == 2 119 | assert map1["b"] == 3 120 | 121 | map2 = map1.remove("a") 122 | assert "a" not in map2 123 | assert map2["b"] == 3 124 | 125 | 126 | def test_initialization_with_many_elements(): 127 | init_dict = {str(x): x for x in range(1700)} 128 | the_map = HashTrieMap(init_dict) 129 | 130 | assert len(the_map) == 1700 131 | assert the_map["16"] == 16 132 | assert the_map["1699"] == 1699 133 | assert the_map.insert("256", 256) == the_map 134 | 135 | new_map = the_map.remove("1600") 136 | assert len(new_map) == 1699 137 | assert "1600" not in new_map 138 | assert new_map["1601"] == 1601 139 | 140 | # Some NOP properties 141 | assert new_map.discard("18888") == new_map 142 | assert "19999" not in new_map 143 | assert new_map["1500"] == 1500 144 | assert new_map.insert("1500", new_map["1500"]) == new_map 145 | 146 | 147 | def test_access_non_existing_element(): 148 | map1 = HashTrieMap() 149 | assert len(map1) == 0 150 | 151 | map2 = map1.insert("1", 1) 152 | assert "1" not in map1 153 | assert map2["1"] == 1 154 | assert "2" not in map2 155 | 156 | 157 | def test_overwrite_existing_element(): 158 | map1 = HashTrieMap({"a": 2}) 159 | map2 = map1.insert("a", 3) 160 | 161 | assert len(map2) == 1 162 | assert map2["a"] == 3 163 | 164 | 165 | def test_hashing(): 166 | o = object() 167 | 168 | assert hash(HashTrieMap([(o, o), (1, o)])) == hash( 169 | HashTrieMap([(o, o), (1, o)]), 170 | ) 171 | assert hash(HashTrieMap([(o, o), (1, o)])) == hash( 172 | HashTrieMap([(1, o), (o, o)]), 173 | ) 174 | assert hash(HashTrieMap([(o, "foo")])) == hash(HashTrieMap([(o, "foo")])) 175 | assert hash(HashTrieMap()) == hash(HashTrieMap([])) 176 | 177 | assert hash(HashTrieMap({1: 2})) != hash(HashTrieMap({1: 3})) 178 | assert hash(HashTrieMap({o: 1})) != hash(HashTrieMap({o: o})) 179 | assert hash(HashTrieMap([])) != hash(HashTrieMap([(o, 1)])) 180 | assert hash(HashTrieMap({1: 2, 3: 4})) != hash(HashTrieMap({1: 3, 2: 4})) 181 | 182 | 183 | def test_same_hash_when_content_the_same_but_underlying_vector_size_differs(): 184 | x = HashTrieMap({x: x for x in range(1000)}) 185 | y = HashTrieMap({10: 10, 200: 200, 700: 700}) 186 | 187 | for z in x: 188 | if z not in y: 189 | x = x.remove(z) 190 | 191 | assert x == y 192 | # assert hash(x) == hash(y) # noqa: ERA001 193 | 194 | 195 | class HashabilityControlled: 196 | hashable = True 197 | 198 | def __hash__(self): 199 | if self.hashable: 200 | return 4 # Proven random 201 | raise ValueError("I am not currently hashable.") 202 | 203 | 204 | def test_map_does_not_hash_values_on_second_hash_invocation(): 205 | hashable = HashabilityControlled() 206 | x = HashTrieMap(dict(el=hashable)) 207 | hash(x) 208 | 209 | hashable.hashable = False 210 | with pytest.raises( 211 | TypeError, 212 | match=r"Unhashable type in HashTrieMap of key 'el'", 213 | ): 214 | hash(x) 215 | 216 | 217 | def test_equal(): 218 | x = HashTrieMap(a=1, b=2, c=3) 219 | y = HashTrieMap(a=1, b=2, c=3) 220 | 221 | assert x == y 222 | assert not (x != y) 223 | 224 | assert y == x 225 | assert not (y != x) 226 | 227 | 228 | def test_equal_with_different_insertion_order(): 229 | x = HashTrieMap([(i, i) for i in range(50)]) 230 | y = HashTrieMap([(i, i) for i in range(49, -1, -1)]) 231 | 232 | assert x == y 233 | assert not (x != y) 234 | 235 | assert y == x 236 | assert not (y != x) 237 | 238 | 239 | def test_not_equal(): 240 | x = HashTrieMap(a=1, b=2, c=3) 241 | y = HashTrieMap(a=1, b=2) 242 | 243 | assert x != y 244 | assert not (x == y) 245 | 246 | assert y != x 247 | assert not (y == x) 248 | 249 | 250 | def test_not_equal_to_dict(): 251 | x = HashTrieMap(a=1, b=2, c=3) 252 | y = dict(a=1, b=2, d=4) 253 | 254 | assert x != y 255 | assert not (x == y) 256 | 257 | assert y != x 258 | assert not (y == x) 259 | 260 | 261 | def test_update_with_multiple_arguments(): 262 | # If same value is present in multiple sources, the rightmost is used. 263 | x = HashTrieMap(a=1, b=2, c=3) 264 | y = x.update(HashTrieMap(b=4, c=5), {"c": 6}) 265 | 266 | assert y == HashTrieMap(a=1, b=4, c=6) 267 | 268 | 269 | def test_update_one_argument(): 270 | x = HashTrieMap(a=1) 271 | 272 | assert x.update({"b": 2}) == HashTrieMap(a=1, b=2) 273 | 274 | 275 | def test_update_no_arguments(): 276 | x = HashTrieMap(a=1) 277 | 278 | assert x.update() == x 279 | 280 | 281 | class HashDummy: 282 | def __hash__(self): 283 | return 6528039219058920 # Hash of '33' 284 | 285 | def __eq__(self, other): 286 | return self is other 287 | 288 | 289 | def test_iteration_with_many_elements(): 290 | values = list(range(2000)) 291 | keys = [str(x) for x in values] 292 | init_dict = dict(zip(keys, values)) 293 | 294 | hash_dummy1 = HashDummy() 295 | hash_dummy2 = HashDummy() 296 | 297 | # Throw in a couple of hash collision nodes to tests 298 | # those properly as well 299 | init_dict[hash_dummy1] = 12345 300 | init_dict[hash_dummy2] = 54321 301 | a_map = HashTrieMap(init_dict) 302 | 303 | actual_values = set() 304 | actual_keys = set() 305 | 306 | for k, v in a_map.items(): 307 | actual_values.add(v) 308 | actual_keys.add(k) 309 | 310 | assert actual_keys == {*keys, hash_dummy1, hash_dummy2} 311 | assert actual_values == {*values, 12345, 54321} 312 | 313 | 314 | def test_repr(): 315 | rep = repr(HashTrieMap({"foo": "12", "": 37})) 316 | assert rep in { 317 | "HashTrieMap({'foo': '12', '': 37})", 318 | "HashTrieMap({'': 37, 'foo': '12'})", 319 | } 320 | 321 | 322 | def test_str(): 323 | s = str(HashTrieMap({1: 2, 3: 4})) 324 | assert s == "HashTrieMap({1: 2, 3: 4})" or s == "HashTrieMap({3: 4, 1: 2})" 325 | 326 | 327 | def test_empty_truthiness(): 328 | assert HashTrieMap(a=1) 329 | assert not HashTrieMap() 330 | 331 | 332 | def test_iterable(): 333 | m = HashTrieMap((i, i * 2) for i in range(3)) 334 | assert m == HashTrieMap({0: 0, 1: 2, 2: 4}) 335 | 336 | 337 | def test_convert_hashtriemap(): 338 | m = HashTrieMap({i: i * 2 for i in range(3)}) 339 | assert HashTrieMap.convert({i: i * 2 for i in range(3)}) == m 340 | 341 | 342 | def test_fast_convert_hashtriemap(): 343 | m = HashTrieMap({i: i * 2 for i in range(3)}) 344 | assert HashTrieMap.convert(m) is m 345 | 346 | 347 | # Non-pyrsistent-test-suite tests 348 | 349 | 350 | def test_more_eq(): 351 | o = object() 352 | 353 | assert HashTrieMap([(o, o), (1, o)]) == HashTrieMap([(o, o), (1, o)]) 354 | assert HashTrieMap([(o, "foo")]) == HashTrieMap([(o, "foo")]) 355 | assert HashTrieMap() == HashTrieMap([]) 356 | 357 | assert HashTrieMap({1: 2}) != HashTrieMap({1: 3}) 358 | assert HashTrieMap({o: 1}) != HashTrieMap({o: o}) 359 | assert HashTrieMap([]) != HashTrieMap([(o, 1)]) 360 | 361 | 362 | def test_pickle(): 363 | assert pickle.loads( 364 | pickle.dumps(HashTrieMap([(1, 2), (3, 4)])), 365 | ) == HashTrieMap([(1, 2), (3, 4)]) 366 | 367 | 368 | def test_get(): 369 | m1 = HashTrieMap({"foo": "bar"}) 370 | assert m1.get("foo") == "bar" 371 | assert m1.get("baz") is None 372 | assert m1.get("spam", "eggs") == "eggs" 373 | 374 | 375 | @pytest.mark.parametrize( 376 | "view", 377 | [pytest.param(methodcaller(p), id=p) for p in ["keys", "values", "items"]], 378 | ) 379 | @pytest.mark.parametrize( 380 | "cls", 381 | [ 382 | abc.Set, 383 | abc.MappingView, 384 | abc.KeysView, 385 | abc.ValuesView, 386 | abc.ItemsView, 387 | ], 388 | ) 389 | def test_views_abc(view, cls): 390 | m, d = HashTrieMap(), {} 391 | assert isinstance(view(m), cls) == isinstance(view(d), cls) 392 | 393 | 394 | def test_keys(): 395 | d = HashTrieMap({1: 2, 3: 4}) 396 | k = d.keys() 397 | 398 | assert 1 in k 399 | assert 2 not in k 400 | assert object() not in k 401 | 402 | assert len(k) == 2 403 | 404 | assert k == d.keys() 405 | assert k == HashTrieMap({1: 2, 3: 4}).keys() 406 | assert k == {1, 3} 407 | 408 | assert k != iter({1, 3}) 409 | assert k != {1, 2, 3} 410 | assert k != {1, 4} 411 | assert not k == {1, 4} 412 | 413 | assert k != object() 414 | 415 | 416 | def test_keys_setlike(): 417 | assert {1: 2, 3: 4}.keys() & HashTrieMap({1: 2}).keys() == {1} 418 | assert {1: 2, 3: 4}.keys() & HashTrieMap({1: 2}).keys() != {1, 2} 419 | assert HashTrieMap({1: 2}).keys() & {1: 2, 3: 4}.keys() == {1} 420 | assert HashTrieMap({1: 2}).keys() & {1: 2, 3: 4}.keys() != {2} 421 | assert not HashTrieMap({1: 2}).keys() & {}.keys() 422 | assert HashTrieMap({1: 2}).keys() & {1} == {1} 423 | assert HashTrieMap({1: 2}).keys() & [1] == {1} 424 | 425 | assert HashTrieMap({1: 2}).keys() | {3} == {1, 3} 426 | assert HashTrieMap({1: 2}).keys() | [3] == {1, 3} 427 | 428 | # these don't really exist on the KeysView protocol but it's nice to have 429 | s = (1, "foo") 430 | assert HashTrieMap({1: 2, "foo": 7}).keys().intersection(s) == set(s) 431 | assert not HashTrieMap({1: 2}).keys().intersection({}) 432 | assert HashTrieMap({1: 2}).keys().union({3}) == {1, 3} 433 | 434 | assert HashTrieMap({1: 2, 3: 4}).keys() < {1, 2, 3} 435 | assert HashTrieMap({1: 2, 3: 4}).keys() <= {1, 2, 3} 436 | assert not HashTrieMap({1: 2}).keys() < {1} 437 | assert HashTrieMap({1: 2}).keys() > set() 438 | assert HashTrieMap({1: 2}).keys() >= set() 439 | 440 | 441 | def test_keys_repr(): 442 | m = HashTrieMap({"foo": 3, 37: "bar"}) 443 | assert repr(m.keys()) in { 444 | "keys_view({'foo', 37})", 445 | "keys_view({37, 'foo'})", 446 | } 447 | 448 | 449 | def test_values(): 450 | d = HashTrieMap({1: 2, 3: 4}) 451 | v = d.values() 452 | 453 | assert 2 in v 454 | assert 3 not in v 455 | assert object() not in v 456 | 457 | assert len(v) == 2 458 | 459 | assert v == v 460 | # https://bugs.python.org/issue12445 which was WONTFIXed 461 | assert v != HashTrieMap({1: 2, 3: 4}).values() 462 | assert v != [2, 4] 463 | 464 | assert set(v) == {2, 4} 465 | 466 | 467 | def test_values_repr(): 468 | m = HashTrieMap({"foo": 3, 37: "bar", "baz": 3}) 469 | assert repr(m.values()) in { 470 | "values_view(['bar', 3, 3])", 471 | "values_view([3, 'bar', 3])", 472 | "values_view([3, 3, 'bar'])", 473 | } 474 | 475 | 476 | def test_items(): 477 | d = HashTrieMap({1: 2, 3: 4}) 478 | i = d.items() 479 | 480 | assert (1, 2) in i 481 | assert (1, 4) not in i 482 | 483 | assert len(i) == 2 484 | 485 | assert i == d.items() 486 | assert i == HashTrieMap({1: 2, 3: 4}).items() 487 | assert i == {(1, 2), (3, 4)} 488 | 489 | assert i != iter({(1, 2), (3, 4)}) 490 | assert i != {(1, 2, 3), (3, 4, 5)} 491 | assert i == {1: 2, 3: 4}.items() 492 | assert i != {(1, 2), (3, 4), (5, 6)} 493 | assert i != {(1, 2)} 494 | assert not i == {1, 4} 495 | 496 | assert i != object() 497 | 498 | 499 | def test_items_setlike(): 500 | assert {1: 2, 3: 4}.items() & HashTrieMap({1: 2}).items() == {(1, 2)} 501 | assert {1: 2, 3: 4}.items() & HashTrieMap({1: 2}).items() != {(1, 2), 3} 502 | 503 | assert HashTrieMap({1: 2}).items() & {1: 2, 3: 4}.items() == {(1, 2)} 504 | assert HashTrieMap({1: 2}).items() & {1: 2, 3: 4}.items() != {(3, 4)} 505 | assert not HashTrieMap({1: 2}).items() & {}.items() 506 | 507 | assert HashTrieMap({1: 2}).items() & [(1, 2)] == {(1, 2)} 508 | assert HashTrieMap({1: 2}).items() & [[1, 2]] == set() 509 | 510 | assert HashTrieMap({1: 2}).items() | {(3, 4)} == {(1, 2), (3, 4)} 511 | assert HashTrieMap({1: 2}).items() | [7] == {(1, 2), 7} 512 | 513 | s = ((1, 2), ("foo", 37)) 514 | assert HashTrieMap({1: 2, "foo": 7}).items().intersection(s) == {(1, 2)} 515 | assert not HashTrieMap({1: 2}).items().intersection({}) 516 | 517 | assert HashTrieMap({1: 2}).items().union({3}) == {(1, 2), 3} 518 | 519 | assert HashTrieMap({1: 2, 3: 4}).items() < {(1, 2), (3, 4), ("foo", "bar")} 520 | assert HashTrieMap({1: 2, 3: 4}).items() <= {(1, 2), (3, 4)} 521 | assert not HashTrieMap({1: 2}).keys() < {1} 522 | assert HashTrieMap({1: 2}).items() > set() 523 | assert HashTrieMap({1: 2}).items() >= set() 524 | 525 | 526 | def test_items_repr(): 527 | m = HashTrieMap({"foo": 3, 37: "bar", "baz": 3}) 528 | assert repr(m.items()) in { 529 | "items_view([('foo', 3), (37, 'bar'), ('baz', 3)])", 530 | "items_view([('foo', 3), ('baz', 3), (37, 'bar')])", 531 | "items_view([(37, 'bar'), ('foo', 3), ('baz', 3)])", 532 | "items_view([(37, 'bar'), ('baz', 3), ('foo', 3)])", 533 | "items_view([('baz', 3), (37, 'bar'), ('foo', 3)])", 534 | "items_view([('baz', 3), ('foo', 3), (37, 'bar')])", 535 | } 536 | 537 | 538 | def test_fromkeys(): 539 | keys = list(range(10)) 540 | got = HashTrieMap.fromkeys(keys) 541 | expected = HashTrieMap((i, None) for i in keys) 542 | assert got == HashTrieMap(dict.fromkeys(keys)) == expected 543 | 544 | 545 | def test_fromkeys_explicit_value(): 546 | keys = list(range(10)) 547 | expected = HashTrieMap((i, "foo") for i in keys) 548 | got = HashTrieMap.fromkeys(keys, "foo") 549 | expected = HashTrieMap((i, "foo") for i in keys) 550 | assert got == HashTrieMap(dict.fromkeys(keys, "foo")) == expected 551 | 552 | 553 | def test_fromkeys_explicit_value_not_copied(): 554 | keys = list(range(5)) 555 | 556 | got = HashTrieMap.fromkeys(keys, []) 557 | got[3].append(1) 558 | 559 | assert got == HashTrieMap((i, [1]) for i in keys) 560 | 561 | 562 | def test_update_with_iterable_of_kvs(): 563 | assert HashTrieMap({1: 2}).update(iter([(3, 4), ("5", 6)])) == HashTrieMap( 564 | { 565 | 1: 2, 566 | 3: 4, 567 | "5": 6, 568 | }, 569 | ) 570 | -------------------------------------------------------------------------------- /tests/test_hash_trie_set.py: -------------------------------------------------------------------------------- 1 | """ 2 | Modified from the pyrsistent test suite. 3 | 4 | Pre-modification, these were MIT licensed, and are copyright: 5 | 6 | Copyright (c) 2022 Tobias Gustafsson 7 | 8 | Permission is hereby granted, free of charge, to any person 9 | obtaining a copy of this software and associated documentation 10 | files (the "Software"), to deal in the Software without 11 | restriction, including without limitation the rights to use, 12 | copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | copies of the Software, and to permit persons to whom the 14 | Software is furnished to do so, subject to the following 15 | conditions: 16 | 17 | The above copyright notice and this permission notice shall be 18 | included in all copies or substantial portions of the Software. 19 | 20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 21 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 22 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 23 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 24 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 25 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 26 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 27 | OTHER DEALINGS IN THE SOFTWARE. 28 | """ 29 | 30 | from collections import abc 31 | import pickle 32 | 33 | import pytest 34 | 35 | from rpds import HashTrieSet 36 | 37 | 38 | def test_key_is_tuple(): 39 | with pytest.raises(KeyError): 40 | HashTrieSet().remove((1, 1)) 41 | 42 | 43 | def test_key_is_not_tuple(): 44 | with pytest.raises(KeyError): 45 | HashTrieSet().remove("asdf") 46 | 47 | 48 | def test_hashing(): 49 | o = object() 50 | 51 | assert hash(HashTrieSet([o])) == hash(HashTrieSet([o])) 52 | assert hash(HashTrieSet([o, o])) == hash(HashTrieSet([o, o])) 53 | assert hash(HashTrieSet([])) == hash(HashTrieSet([])) 54 | assert hash(HashTrieSet([1, 2])) == hash(HashTrieSet([1, 2])) 55 | assert hash(HashTrieSet([1, 2])) == hash(HashTrieSet([2, 1])) 56 | assert not (HashTrieSet([1, 2]) == HashTrieSet([1, 3])) 57 | assert not (HashTrieSet([]) == HashTrieSet([o])) 58 | 59 | assert hash(HashTrieSet([1, 2])) != hash(HashTrieSet([1, 3])) 60 | assert hash(HashTrieSet([1, o])) != hash(HashTrieSet([1, 2])) 61 | assert hash(HashTrieSet([1, 2])) != hash(HashTrieSet([2, 1, 3])) 62 | assert not (HashTrieSet([o]) != HashTrieSet([o, o])) 63 | assert not (HashTrieSet([o, o]) != HashTrieSet([o, o])) 64 | assert not (HashTrieSet() != HashTrieSet([])) 65 | 66 | 67 | def test_empty_truthiness(): 68 | assert HashTrieSet([1]) 69 | assert not HashTrieSet() 70 | 71 | 72 | def test_contains_elements_that_it_was_initialized_with(): 73 | initial = [1, 2, 3] 74 | s = HashTrieSet(initial) 75 | 76 | assert set(s) == set(initial) 77 | assert len(s) == len(set(initial)) 78 | 79 | 80 | def test_is_immutable(): 81 | s1 = HashTrieSet([1]) 82 | s2 = s1.insert(2) 83 | 84 | assert s1 == HashTrieSet([1]) 85 | assert s2 == HashTrieSet([1, 2]) 86 | 87 | s3 = s2.remove(1) 88 | assert s2 == HashTrieSet([1, 2]) 89 | assert s3 == HashTrieSet([2]) 90 | 91 | 92 | def test_remove_when_not_present(): 93 | s1 = HashTrieSet([1, 2, 3]) 94 | with pytest.raises(KeyError): 95 | s1.remove(4) 96 | 97 | 98 | def test_discard(): 99 | s1 = HashTrieSet((1, 2, 3)) 100 | assert s1.discard(3) == HashTrieSet((1, 2)) 101 | assert s1.discard(4) == s1 102 | 103 | 104 | def test_is_iterable(): 105 | assert sum(HashTrieSet([1, 2, 3])) == 6 106 | 107 | 108 | def test_contains(): 109 | s = HashTrieSet([1, 2, 3]) 110 | 111 | assert 2 in s 112 | assert 4 not in s 113 | 114 | 115 | def test_supports_set_operations(): 116 | s1 = HashTrieSet([1, 2, 3]) 117 | s2 = HashTrieSet([3, 4, 5]) 118 | 119 | assert s1 | s2 == HashTrieSet([1, 2, 3, 4, 5]) 120 | assert s1.union(s2) == s1 | s2 121 | 122 | assert s1 & s2 == HashTrieSet([3]) 123 | assert s1.intersection(s2) == s1 & s2 124 | 125 | assert s1 - s2 == HashTrieSet([1, 2]) 126 | assert s1.difference(s2) == s1 - s2 127 | 128 | assert s1 ^ s2 == HashTrieSet([1, 2, 4, 5]) 129 | assert s1.symmetric_difference(s2) == s1 ^ s2 130 | 131 | 132 | def test_supports_set_comparisons(): 133 | s1 = HashTrieSet([1, 2, 3]) 134 | s3 = HashTrieSet([1, 2]) 135 | s4 = HashTrieSet([1, 2, 3]) 136 | 137 | assert HashTrieSet([1, 2, 3, 3, 5]) == HashTrieSet([1, 2, 3, 5]) 138 | assert s1 != s3 139 | 140 | assert s3 < s1 141 | assert s3 <= s1 142 | assert s3 <= s4 143 | 144 | assert s1 > s3 145 | assert s1 >= s3 146 | assert s4 >= s3 147 | 148 | 149 | def test_repr(): 150 | rep = repr(HashTrieSet([1, 2])) 151 | assert rep == "HashTrieSet({1, 2})" or rep == "HashTrieSet({2, 1})" 152 | 153 | rep = repr(HashTrieSet(["1", "2"])) 154 | assert rep == "HashTrieSet({'1', '2'})" or rep == "HashTrieSet({'2', '1'})" 155 | 156 | 157 | def test_update(): 158 | assert HashTrieSet([1, 2, 3]).update([3, 4, 4, 5]) == HashTrieSet( 159 | [1, 2, 3, 4, 5], 160 | ) 161 | 162 | 163 | def test_update_no_elements(): 164 | s1 = HashTrieSet([1, 2]) 165 | assert s1.update([]) == s1 166 | 167 | 168 | def test_iterable(): 169 | assert HashTrieSet(iter("a")) == HashTrieSet(iter("a")) 170 | 171 | 172 | def test_more_eq(): 173 | # Non-pyrsistent-test-suite test 174 | o = object() 175 | 176 | assert HashTrieSet([o]) == HashTrieSet([o]) 177 | assert HashTrieSet([o, o]) == HashTrieSet([o, o]) 178 | assert HashTrieSet([o]) == HashTrieSet([o, o]) 179 | assert HashTrieSet() == HashTrieSet([]) 180 | assert not (HashTrieSet([1, 2]) == HashTrieSet([1, 3])) 181 | assert not (HashTrieSet([o, 1]) == HashTrieSet([o, o])) 182 | assert not (HashTrieSet([]) == HashTrieSet([o])) 183 | 184 | assert HashTrieSet([1, 2]) != HashTrieSet([1, 3]) 185 | assert HashTrieSet([]) != HashTrieSet([o]) 186 | assert not (HashTrieSet([o]) != HashTrieSet([o])) 187 | assert not (HashTrieSet([o, o]) != HashTrieSet([o, o])) 188 | assert not (HashTrieSet([o]) != HashTrieSet([o, o])) 189 | assert not (HashTrieSet() != HashTrieSet([])) 190 | 191 | assert HashTrieSet([1, 2]) == {1, 2} 192 | assert HashTrieSet([1, 2]) != {1, 2, 3} 193 | assert HashTrieSet([1, 2]) != [1, 2] 194 | 195 | 196 | def test_more_set_comparisons(): 197 | s = HashTrieSet([1, 2, 3]) 198 | 199 | assert s == s 200 | assert not (s < s) 201 | assert s <= s 202 | assert not (s > s) 203 | assert s >= s 204 | 205 | 206 | def test_pickle(): 207 | assert pickle.loads( 208 | pickle.dumps(HashTrieSet([1, 2, 3, 4])), 209 | ) == HashTrieSet([1, 2, 3, 4]) 210 | 211 | 212 | def test_instance_of_set(): 213 | assert isinstance(HashTrieSet(), abc.Set) 214 | 215 | 216 | def test_lt_le_gt_ge(): 217 | assert HashTrieSet({}) < {1} 218 | assert HashTrieSet({}) <= {1} 219 | assert HashTrieSet({1}) > set() 220 | assert HashTrieSet({1}) >= set() 221 | -------------------------------------------------------------------------------- /tests/test_list.py: -------------------------------------------------------------------------------- 1 | """ 2 | Modified from the pyrsistent test suite. 3 | 4 | Pre-modification, these were MIT licensed, and are copyright: 5 | 6 | Copyright (c) 2022 Tobias Gustafsson 7 | 8 | Permission is hereby granted, free of charge, to any person 9 | obtaining a copy of this software and associated documentation 10 | files (the "Software"), to deal in the Software without 11 | restriction, including without limitation the rights to use, 12 | copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | copies of the Software, and to permit persons to whom the 14 | Software is furnished to do so, subject to the following 15 | conditions: 16 | 17 | The above copyright notice and this permission notice shall be 18 | included in all copies or substantial portions of the Software. 19 | 20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 21 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 22 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 23 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 24 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 25 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 26 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 27 | OTHER DEALINGS IN THE SOFTWARE. 28 | """ 29 | 30 | import pickle 31 | 32 | import pytest 33 | 34 | from rpds import List 35 | 36 | 37 | def test_literalish_works(): 38 | assert List(1, 2, 3) == List([1, 2, 3]) 39 | 40 | 41 | def test_first_and_rest(): 42 | pl = List([1, 2]) 43 | assert pl.first == 1 44 | assert pl.rest.first == 2 45 | assert pl.rest.rest == List() 46 | 47 | 48 | def test_instantiate_large_list(): 49 | assert List(range(1000)).first == 0 50 | 51 | 52 | def test_iteration(): 53 | assert list(List()) == [] 54 | assert list(List([1, 2, 3])) == [1, 2, 3] 55 | 56 | 57 | def test_push_front(): 58 | assert List([1, 2, 3]).push_front(0) == List([0, 1, 2, 3]) 59 | 60 | 61 | def test_push_front_empty_list(): 62 | assert List().push_front(0) == List([0]) 63 | 64 | 65 | def test_truthiness(): 66 | assert List([1]) 67 | assert not List() 68 | 69 | 70 | def test_len(): 71 | assert len(List([1, 2, 3])) == 3 72 | assert len(List()) == 0 73 | 74 | 75 | def test_first_illegal_on_empty_list(): 76 | with pytest.raises(IndexError): 77 | List().first 78 | 79 | 80 | def test_rest_return_self_on_empty_list(): 81 | assert List().rest == List() 82 | 83 | 84 | def test_reverse(): 85 | assert reversed(List([1, 2, 3])) == List([3, 2, 1]) 86 | 87 | assert reversed(List()) == List() 88 | 89 | 90 | def test_inequality(): 91 | assert List([1, 2]) != List([1, 3]) 92 | assert List([1, 2]) != List([1, 2, 3]) 93 | assert List() != List([1, 2, 3]) 94 | 95 | 96 | def test_repr(): 97 | assert str(List()) == "List([])" 98 | assert str(List([1, 2, 3])) in "List([1, 2, 3])" 99 | 100 | 101 | def test_hashing(): 102 | o = object() 103 | 104 | assert hash(List([o, o])) == hash(List([o, o])) 105 | assert hash(List([o])) == hash(List([o])) 106 | assert hash(List()) == hash(List([])) 107 | assert not (hash(List([1, 2])) == hash(List([1, 3]))) 108 | assert not (hash(List([1, 2])) == hash(List([2, 1]))) 109 | assert not (hash(List([o])) == hash(List([o, o]))) 110 | assert not (hash(List([])) == hash(List([o]))) 111 | 112 | assert hash(List([1, 2])) != hash(List([1, 3])) 113 | assert hash(List([1, 2])) != hash(List([2, 1])) 114 | assert hash(List([o])) != hash(List([o, o])) 115 | assert hash(List([])) != hash(List([o])) 116 | assert not (hash(List([o, o])) != hash(List([o, o]))) 117 | assert not (hash(List([o])) != hash(List([o]))) 118 | assert not (hash(List([])) != hash(List([]))) 119 | 120 | 121 | def test_sequence(): 122 | m = List("asdf") 123 | assert m == List(["a", "s", "d", "f"]) 124 | 125 | 126 | # Non-pyrsistent-test-suite tests 127 | 128 | 129 | def test_drop_first(): 130 | assert List([1, 2, 3]).drop_first() == List([2, 3]) 131 | 132 | 133 | def test_drop_first_empty(): 134 | """ 135 | rpds itself returns an Option here but we try IndexError instead. 136 | """ 137 | with pytest.raises(IndexError): 138 | List([]).drop_first() 139 | 140 | 141 | def test_more_eq(): 142 | o = object() 143 | 144 | assert List([o, o]) == List([o, o]) 145 | assert List([o]) == List([o]) 146 | assert List() == List([]) 147 | assert not (List([1, 2]) == List([1, 3])) 148 | assert not (List([o]) == List([o, o])) 149 | assert not (List([]) == List([o])) 150 | 151 | assert List([1, 2]) != List([1, 3]) 152 | assert List([o]) != List([o, o]) 153 | assert List([]) != List([o]) 154 | assert not (List([o, o]) != List([o, o])) 155 | assert not (List([o]) != List([o])) 156 | assert not (List() != List([])) 157 | 158 | 159 | def test_pickle(): 160 | assert pickle.loads(pickle.dumps(List([1, 2, 3, 4]))) == List([1, 2, 3, 4]) 161 | -------------------------------------------------------------------------------- /tests/test_queue.py: -------------------------------------------------------------------------------- 1 | """ 2 | Modified from the pyrsistent test suite. 3 | 4 | Pre-modification, these were MIT licensed, and are copyright: 5 | 6 | Copyright (c) 2022 Tobias Gustafsson 7 | 8 | Permission is hereby granted, free of charge, to any person 9 | obtaining a copy of this software and associated documentation 10 | files (the "Software"), to deal in the Software without 11 | restriction, including without limitation the rights to use, 12 | copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | copies of the Software, and to permit persons to whom the 14 | Software is furnished to do so, subject to the following 15 | conditions: 16 | 17 | The above copyright notice and this permission notice shall be 18 | included in all copies or substantial portions of the Software. 19 | 20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 21 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 22 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 23 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 24 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 25 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 26 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 27 | OTHER DEALINGS IN THE SOFTWARE. 28 | """ 29 | 30 | import pytest 31 | 32 | from rpds import Queue 33 | 34 | 35 | def test_literalish_works(): 36 | assert Queue(1, 2, 3) == Queue([1, 2, 3]) 37 | 38 | 39 | def test_peek_dequeue(): 40 | pl = Queue([1, 2]) 41 | assert pl.peek == 1 42 | assert pl.dequeue().peek == 2 43 | assert pl.dequeue().dequeue().is_empty 44 | with pytest.raises(IndexError): 45 | pl.dequeue().dequeue().dequeue() 46 | 47 | 48 | def test_instantiate_large_list(): 49 | assert Queue(range(1000)).peek == 0 50 | 51 | 52 | def test_iteration(): 53 | assert list(Queue()) == [] 54 | assert list(Queue([1, 2, 3])) == [1, 2, 3] 55 | 56 | 57 | def test_enqueue(): 58 | assert Queue([1, 2, 3]).enqueue(4) == Queue([1, 2, 3, 4]) 59 | 60 | 61 | def test_enqueue_empty_list(): 62 | assert Queue().enqueue(0) == Queue([0]) 63 | 64 | 65 | def test_truthiness(): 66 | assert Queue([1]) 67 | assert not Queue() 68 | 69 | 70 | def test_len(): 71 | assert len(Queue([1, 2, 3])) == 3 72 | assert len(Queue()) == 0 73 | 74 | 75 | def test_peek_illegal_on_empty_list(): 76 | with pytest.raises(IndexError): 77 | Queue().peek 78 | 79 | 80 | def test_inequality(): 81 | assert Queue([1, 2]) != Queue([1, 3]) 82 | assert Queue([1, 2]) != Queue([1, 2, 3]) 83 | assert Queue() != Queue([1, 2, 3]) 84 | 85 | 86 | def test_repr(): 87 | assert str(Queue()) == "Queue([])" 88 | assert str(Queue([1, 2, 3])) in "Queue([1, 2, 3])" 89 | 90 | 91 | def test_sequence(): 92 | m = Queue("asdf") 93 | assert m == Queue(["a", "s", "d", "f"]) 94 | 95 | 96 | # Non-pyrsistent-test-suite tests 97 | 98 | 99 | def test_dequeue(): 100 | assert Queue([1, 2, 3]).dequeue() == Queue([2, 3]) 101 | 102 | 103 | def test_dequeue_empty(): 104 | """ 105 | rpds itself returns an Option here but we try IndexError instead. 106 | """ 107 | with pytest.raises(IndexError): 108 | Queue([]).dequeue() 109 | 110 | 111 | def test_more_eq(): 112 | o = object() 113 | 114 | assert Queue([o, o]) == Queue([o, o]) 115 | assert Queue([o]) == Queue([o]) 116 | assert Queue() == Queue([]) 117 | assert not (Queue([1, 2]) == Queue([1, 3])) 118 | assert not (Queue([o]) == Queue([o, o])) 119 | assert not (Queue([]) == Queue([o])) 120 | 121 | assert Queue([1, 2]) != Queue([1, 3]) 122 | assert Queue([o]) != Queue([o, o]) 123 | assert Queue([]) != Queue([o]) 124 | assert not (Queue([o, o]) != Queue([o, o])) 125 | assert not (Queue([o]) != Queue([o])) 126 | assert not (Queue() != Queue([])) 127 | 128 | 129 | def test_hashing(): 130 | assert hash(Queue([1, 2])) == hash(Queue([1, 2])) 131 | assert hash(Queue([1, 2])) != hash(Queue([2, 1])) 132 | assert len({Queue([1, 2]), Queue([1, 2])}) == 1 133 | 134 | 135 | def test_unhashable_contents(): 136 | q = Queue([1, {1}]) 137 | with pytest.raises(TypeError): 138 | hash(q) 139 | --------------------------------------------------------------------------------