├── .github └── workflows │ └── test.yml ├── .gitignore ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-ZLIB ├── README.md ├── benches └── maps.rs ├── coca.dic ├── examples ├── huffman.rs └── icosphere.rs ├── spellcheck.toml └── src ├── arena.rs ├── collections ├── binary_heap.rs ├── cache.rs ├── deque.rs ├── list_map.rs ├── list_set.rs ├── mod.rs ├── option_group.rs ├── pool │ ├── direct.rs │ ├── mod.rs │ └── packed.rs └── vec.rs ├── lib.rs ├── object.rs ├── storage.rs └── string.rs /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | 12 | jobs: 13 | test: 14 | runs-on: ${{matrix.os}} 15 | strategy: 16 | matrix: 17 | include: 18 | - os: ubuntu-latest 19 | target: x86_64-unknown-linux-gnu 20 | toolchain: stable 21 | - os: macos-latest 22 | target: x86_64-apple-darwin 23 | toolchain: stable 24 | - os: windows-latest 25 | target: x86_64-pc-windows-msvc 26 | toolchain: stable 27 | - os: windows-latest 28 | target: x86_64-pc-windows-gnu 29 | toolchain: stable 30 | - os: ubuntu-latest 31 | target: x86_64-unknown-linux-gnu 32 | toolchain: 1.59.0 # MSRV 33 | - os: ubuntu-latest 34 | target: x86_64-unknown-linux-gnu 35 | toolchain: nightly 36 | 37 | steps: 38 | - uses: actions/checkout@v2 39 | - name: Install toolchain 40 | uses: actions-rs/toolchain@v1 41 | with: 42 | profile: minimal 43 | target: ${{ matrix.target }} 44 | toolchain: ${{ matrix.toolchain }} 45 | override: true 46 | - run: ${{ matrix.deps }} 47 | - name: Test unstable feature on nightly 48 | if: ${{ matrix.toolchain == 'nightly' }} 49 | run: | 50 | cargo test --target ${{ matrix.target }} --features=unstable 51 | cargo test --target ${{ matrix.target }} --features=unstable,alloc 52 | cargo test --target ${{ matrix.target }} --features=unstable,profile 53 | cargo test --target ${{ matrix.target }} --all-features 54 | - name: Test stable features 55 | run: | 56 | cargo test --target ${{ matrix.target }} --no-default-features 57 | cargo test --target ${{ matrix.target }} --features=alloc 58 | cargo test --target ${{ matrix.target }} --features=profile 59 | cargo test --target ${{ matrix.target }} --features=alloc,profile 60 | 61 | test-miri: 62 | runs-on: ubuntu-latest 63 | strategy: 64 | matrix: 65 | include: 66 | - features: --no-default-features 67 | - features: --features=alloc 68 | - features: --features=profile 69 | - features: --features=unstable 70 | - features: --features=alloc,profile 71 | - features: --features=alloc,unstable 72 | - features: --features=profile,unstable 73 | - features: --all-features 74 | steps: 75 | - uses: actions/checkout@v2 76 | - name: Install toolchain 77 | run: | 78 | MIRI_NIGHTLY=nightly-$(curl -s https://rust-lang.github.io/rustup-components-history/x86_64-unknown-linux-gnu/miri) 79 | rustup default "$MIRI_NIGHTLY" 80 | rustup component add miri 81 | - name: Run all tests 82 | run: | 83 | cargo miri test ${{ matrix.features }} 84 | 85 | test-example-programs: 86 | runs-on: ubuntu-latest 87 | steps: 88 | - uses: actions/checkout@v2 89 | - name: Install toolchain 90 | run: | 91 | MIRI_NIGHTLY=nightly-$(curl -s https://rust-lang.github.io/rustup-components-history/x86_64-unknown-linux-gnu/miri) 92 | rustup default "$MIRI_NIGHTLY" 93 | rustup component add miri 94 | - name: Run example programs 95 | run: | 96 | cargo miri run --example huffman 97 | cargo miri run --example icosphere 98 | 99 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.vscode 2 | /target 3 | Cargo.lock 4 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # 0.3.1 (WIP) 2 | 3 | ## Breaking Changes 4 | ## New Features 5 | 6 | - Make `Vec::set_len` public and implement `Vec::{spare_capacity_mut, split_at_spare_mut}`. 7 | - New methods `Vec::{extend_to_capacity, split_borrowed}`, `SliceVec::split_and_shrink_to` 8 | - Generalize implementations of `PartialEq`, `PartialOrd` for `arena::Box`. 9 | 10 | ## Bugfixes 11 | 12 | - Relax unnecessarily strict trait bounds on `{AllocVec, AllocDeque, AllocHeap}::{with_capacity, clone}`. 13 | 14 | # 0.3.0 (2022-03-04) 15 | ## Breaking Changes 16 | 17 | - Rename `ContiguousStorage` to `Storage` and overhaul the interface; 18 | see [this blog post on the design process][storage-abstraction-v2] for motivation 19 | - Add super traits to `Capacity` trait, add `const MAX_REPRESENTABLE`; implementors 20 | are no longer required to perform validation on _every_ call 21 | - Remove the `nightly` feature flag and the `feature(min_const_generics)` attribute. 22 | - Rework the module hierarchy, introducing the `collections` module 23 | - Rename `Array{Vec, Deque, Heap}` to `Inline*` for consistency with `InlineObject`; 24 | remove `TiArrayVec`, `TiArrayDeque`, and `TiArrayHeap` in favor of default type 25 | parameters on `Inline{Vec, Deque, Heap}`, raising minimum supported compiler version 26 | to 1.59.0. 27 | - Redefine `ArenaStorage` as a struct for compatibility with non-array-like layouts 28 | - Remove `HeapStorage` type alias and add `AllocStorage` struct (similar to `ArenaStorage`) 29 | - Rename `Arena::{collect, try_collect}` to `Arena::{collect_slice, try_collect_slice}` 30 | - Remove `Arena::{try_vec, try_deque, try_heap, vec, deque, heap}` in favor of 31 | the generic `Arena::{try_with_capacity, with_capacity}` 32 | - Add the `CapacityError` type, changing the return type of several fallible methods. 33 | 34 | [storage-abstraction-v2]: https://gist.github.com/teryror/7b9a23fd0cd8dcfbcb6ebd34ee2639f8 35 | 36 | ## New Features 37 | 38 | - New `string` module for working with UTF-8 encoded text 39 | - New `cache` module for forgetful map data structures 40 | - `DirectPool`, a direct analogue to `slotmap::SlotMap` 41 | - `PackedPool`, a direct analogue to `slotmap::DenseSlotMap` 42 | - New `option_group` module for bit-packing discriminants of multiple optional values 43 | - Experimental `object` module for owned, allocation-free trait objects 44 | - Implement `Vec::drain_filter` and `Vec::drain_filter_range` 45 | - New methods `Deque::force_push_front` and `Deque::force_push_back` 46 | for using `Deque` as a classic ring buffer 47 | - New methods `Arena::static_with_capacity` for ergonomically constructing arenas when 48 | the `alloc` crate is available, and `Arena::{collect_with_capacity, try_collect_with_capacity}`, 49 | which more closely approximate `Iterator::collect` than the old `collect` methods 50 | - Add support for multiple type declarations in a single `index_type!` invocation 51 | 52 | ## Bugfixes 53 | 54 | - Leaking a `vec::Drain` or `deque::Drain` no longer leaves the underlying data structure 55 | in an invalid state, potentially causing undefined behaviour 56 | - Failing to allocate an array from an `Arena` no longer creates a null reference to an 57 | empty slice, causing undefined behaviour 58 | 59 | # 0.2.0 (2020-12-28) 60 | 61 | - Add `BinaryHeap` and `Deque` implementations 62 | - Add `Vec::{into_raw_parts, from_raw_parts}` 63 | - Rename `HeapVec` to `AllocVec` for consistency with `AllocHeap` (chosen in favour of `HeapHeap`) 64 | - Rename `Capacity::into_usize` to `as_usize` and `ArenaWriter` to `Writer` to comply with naming conventions 65 | - Fix a bug in `Vec::drain` that made right-open ranges not work correctly 66 | - `HeapVec::with_capacity` takes a value of generic type `I: Capacity` rather than `usize` 67 | - Fix potential undefined behavior in `Vec::{deref, deref_mut, into_iter}` detected by Miri 68 | 69 | # 0.1.0 (2020-12-03) 70 | 71 | Initial Release -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "coca" 3 | version = "0.3.0" # When updating this, remember to update install instructions in README and html_root_url in lib.rs 4 | authors = ["Tristan Dannenberg "] 5 | edition = "2018" 6 | license = "Zlib OR Apache-2.0" 7 | description = "Data structures with constant capacity" 8 | keywords = ["no_std", "allocator", "vec", "queue"] 9 | categories = ["no-std", "data-structures", "memory-management"] 10 | repository = "https://github.com/teryror/coca" 11 | readme = "README.md" 12 | 13 | [package.metadata.docs.rs] 14 | features = ["alloc", "unstable", "profile"] 15 | rustdoc-args = ["--cfg", "docs_rs"] 16 | 17 | [profile.test] 18 | opt-level = 3 19 | 20 | [features] 21 | default = [] 22 | alloc = [] 23 | unstable = [] 24 | profile = [] 25 | 26 | [dev-dependencies] 27 | rand = { version = "0.8", features = ["small_rng"] } 28 | rustc-hash = { version = "1.1", default-features = false } -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2020 Tristan L. Dannenberg 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-ZLIB: -------------------------------------------------------------------------------- 1 | Copyright (c) 2020 Tristan Dannenberg. 2 | 3 | This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. 4 | 5 | Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 6 | 7 | 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 8 | 9 | 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 10 | 11 | 3. This notice may not be removed or altered from any source distribution. 12 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # coca - Data Structures with Constant Capacity 2 | 3 | [![Crates.io](https://img.shields.io/crates/v/coca.svg)](https://crates.io/crates/coca) 4 | [![Documentation](https://docs.rs/coca/badge.svg)](https://docs.rs/coca) 5 | [![Min. rustc version 1.59.0](https://img.shields.io/badge/Min.%20rustc-v1.59.0-blue)](https://img.shields.io/badge/Min%20rustc-v1.59.0-blue) 6 | 7 | Allocation-free data structures that make do with the memory they're given. 8 | 9 | ```toml 10 | [dependencies] 11 | coca = "0.3" 12 | ``` 13 | 14 | ## Overview 15 | 16 | Rust's [standard collection library][std-collections] provides efficient 17 | implementations of the most common general purpose programming data structures. 18 | By using the standard implementations, it should be possible for two libraries 19 | to communicate without significant data conversion. 20 | 21 | [std-collections]: https://doc.rust-lang.org/std/collections/index.html 22 | 23 | However, these standard implementations manage their own memory using an 24 | [Allocator][allocator-trait], defaulting to the global heap. This is generally 25 | convenient, but may pose a problem in some cases, e.g. in (soft) real-time 26 | applications, or in memory-constrained embedded systems, where the 27 | [`alloc`][alloc-crate] crate may not even be available. 28 | 29 | [allocator-trait]: https://doc.rust-lang.org/core/alloc/trait.Allocator.html 30 | [alloc-crate]: https://doc.rust-lang.org/alloc/index.html 31 | 32 | `coca` aims to serve as a replacement in such environments by providing data 33 | structures that operate on a given block of backing memory, without allocating 34 | on their own. They are generic over the storage type, which may be any of the 35 | following: 36 | 37 | - `InlineStorage`: Store the contents inside the `struct`, without indirection. 38 | Requires capacities that are truly `const`, i.e. statically known at compile time. 39 | - `AllocStorage`: Store the contents in globally allocated memory. 40 | Requires the `alloc` feature flag. 41 | - `SliceStorage`: For array-like data structures only, store the contents in 42 | any given slice of uninitialized memory. 43 | - `ArenaStorage`: `coca` includes an [arena allocator][arena-allocator], and 44 | allows ergonomic construction of data structures using arena-allocated memory 45 | as storage. 46 | 47 | [arena-allocator]: https://en.wikipedia.org/wiki/Region-based_memory_management 48 | 49 | Within this paradigm, direct analogs to the following types are provided: 50 | 51 | - [`alloc::vec::Vec`](https://doc.rust-lang.org/alloc/vec/struct.Vec.html) 52 | - [`alloc::string::String`](https://doc.rust-lang.org/alloc/string/struct.String.html) 53 | - [`alloc::collections::VecDeque`](https://doc.rust-lang.org/alloc/collections/vec_deque/index.html) 54 | - [`alloc::collections::BinaryHeap`](https://doc.rust-lang.org/alloc/collections/binary_heap/struct.BinaryHeap.html) 55 | - [`slotmap::{SlotMap, DenseSlotMap}`](https://docs.rs/slotmap/latest/slotmap/) 56 | 57 | Additionally, `coca` also includes the following container types: 58 | 59 | - `ListSet`, a set implemented as a `Vec`. 60 | - `ListMap`, an [association list](https://en.wikipedia.org/wiki/Association_list) 61 | implemented as a pair of parallel arrays. 62 | - `CacheTable`, a forgetful hash map with a configurable eviction policy; 63 | ideal for caching, hence the name. 64 | - `OptionGroup`, a tuple or array of optional values with the occupancy flags 65 | packed into a single bitmap field. 66 | - `InlineObject`, a statically-sized container for dynamically-sized types, 67 | mainly trait objects; this requires unstable language features, and therefore 68 | needs the `unstable` feature flag to be enabled. 69 | 70 | ## Comparison with Other Libraries 71 | 72 | First of all, unless you are trying to avoid hitting the global allocator, or 73 | don't have one in your target environment, you are almost certainly better off 74 | just using Rust's standard collections, or in the case of `coca::collections::pool`, 75 | the [`slotmap` crate](https://crates.io/crates/slotmap). Even in such a scenario, 76 | however, there are several crates filling a similar niche that are more mature 77 | than `coca`. 78 | 79 | ### `coca::arena` vs [`bumpalo`](https://crates.io/crates/bumpalo) 80 | 81 | - Bumpalo is a `no_std` crate, but it does have a hard dependency on the `alloc` 82 | crate, which it uses to automatically add chunks to its arenas whenever they 83 | run out of space. This helps in avoiding failed allocations, but makes it harder 84 | to bound memory usage. By contrast, `coca`'s dependency on `alloc` is optional; 85 | a `coca::arena::Arena` always returns `None` (or panics, at your option) when 86 | it runs out of space, but can be constructed from any mutable byte slice. 87 | - Bumpalo has its own forks of Rust's standard `Vec` and `String` that use its 88 | `Bump` allocator, and optionally supports the nightly-only `Allocator` API for 89 | compatibility with the other standard collections. On stable Rust, `coca::arena` 90 | supports a wider variety of data structures, but it won't benefit from the 91 | stabilization of `feature(allocator_api)`. 92 | - Uniquely, `coca`'s arenas can be nested, allowing for stack-like de/allocation 93 | patterns. 94 | 95 | ### `coca::collections` vs [`heapless`](https://crates.io/crates/heapless) 96 | 97 | - `heapless` provides a variety of data structures with statically known 98 | capacity, the equivalent of `coca`'s `InlineStorage`. It has no support for 99 | dynamic allocations. 100 | - `heapless` provides direct analogs to `std::collections::HashMap` and 101 | `HashSet`, which `coca` does not have (yet). 102 | - None of `coca`'s data structures are thread-safe, while `heapless` provides 103 | multiple synchronization mechanisms: a lock-free memory pool with atomically 104 | reference-counting pointers, and both MPMC and SPSC lock-free queues. 105 | - `heapless` does not provide equivalents to `std::collections::VecDeque`, 106 | `slotmap::SlotMap` or `slotmap::DenseSlotMap`, while `coca` does, on top of 107 | the more niche data structures (`CacheTable`, `OptionGroup`, `InlineObject`). 108 | 109 | ### `coca::collections::vec` vs [`tinyvec`][1] and [`arrayvec`][2] 110 | 111 | [1]: https://crates.io/crates/tinyvec 112 | [2]: https://crates.io/crates/arrayvec 113 | 114 | - tinyvec uses no unsafe code, but requires the element type to implement the 115 | `Default` trait. `coca` has no such restriction, and offers equivalents to 116 | `tinyvec::ArrayVec` and `tinyvec::SliceVec`, but not `tinyvec::TinyVec`, which 117 | is a small-size optimized vector with the ability to reallocate. `coca` also 118 | requires a newer rust version (min. 1.59) than tinyvec (min. 1.34). 119 | - Both arrayvec and tinyvec have optional [`serde`](https://crates.io/crates/serde) 120 | support, while `coca` does not. 121 | - `coca::collections::Vec` supports more storage modes with just one implementation, 122 | meaning its instantiations inter-operate more easily, and you can write generic 123 | code to handle all of them. It is also generic over the index type, similar to 124 | what is offered by the [`typed_index_collections`][3] crate. 125 | 126 | [3]: (https://crates.io/crates/typed-index-collections) 127 | 128 | ## Feature Flags 129 | 130 | - `alloc`: By default, coca is `no_std` compatible; this feature flag enables 131 | some trait implementations for conveniently working with heap-allocated storage. 132 | - `profile`: Enables memory profiling in arenas; see the module-level documentation 133 | for details. 134 | - `unstable`: If you're working with the nightly rust toolchain, and don't mind 135 | depending on unstable features, you can enable this feature to get access to 136 | `InlineObject`, allowing you to create trait objects without indirection. 137 | 138 | ## License 139 | 140 | Licensed under either [Apache License, Version 2.0](LICENSE-APACHE) or 141 | [Zlib license](LICENSE-ZLIB) at your option. 142 | 143 | Unless you explicitly state otherwise, any contribution intentionally submitted 144 | for inclusion in this crate by you, as defined in the Apache-2.0 license, shall 145 | be dual licensed as above, without any additional terms or conditions. -------------------------------------------------------------------------------- /benches/maps.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "alloc")] 2 | #![feature(test)] 3 | 4 | extern crate std; 5 | extern crate test; 6 | 7 | use rand::{rngs::SmallRng, seq::SliceRandom, RngCore, SeedableRng}; 8 | use test::Bencher; 9 | 10 | trait Map { 11 | fn with_capacity(capacity: usize) -> Self; 12 | fn get(&self, k: &K) -> Option<&V>; 13 | fn insert(&mut self, k: K, v: V); 14 | fn remove(&mut self, k: &K) -> Option; 15 | } 16 | 17 | macro_rules! insertions { 18 | ($fnn:ident, $ty:ty, $n:expr) => { 19 | #[bench] 20 | fn $fnn(b: &mut Bencher) { 21 | let mut rng = SmallRng::seed_from_u64(0x5432_1012_3454_3210); 22 | b.iter(|| { 23 | let mut map = <$ty as Map>::with_capacity($n); 24 | for _ in 0..$n { 25 | map.insert(rng.next_u32(), rng.next_u32()); 26 | } 27 | }) 28 | } 29 | }; 30 | } 31 | 32 | const LOOKUP_BENCH_N: usize = 500; 33 | macro_rules! lookups { 34 | ($fnn:ident, $ty:ty, $n:expr) => { 35 | #[bench] 36 | fn $fnn(b: &mut Bencher) { 37 | let mut rng = SmallRng::seed_from_u64(0x5432_1012_3454_3210); 38 | let mut map = <$ty as Map>::with_capacity($n); 39 | for _ in 0..$n { 40 | map.insert(rng.next_u32(), rng.next_u32()); 41 | } 42 | 43 | b.iter(|| { 44 | for _ in 0..LOOKUP_BENCH_N { 45 | let k = rng.next_u32(); 46 | let _ = map.get(&k); 47 | } 48 | }) 49 | } 50 | }; 51 | } 52 | 53 | macro_rules! removals { 54 | ($fnn:ident, $ty:ty, $n:expr) => { 55 | #[bench] 56 | fn $fnn(b: &mut Bencher) { 57 | let mut rng = SmallRng::seed_from_u64(0x5432_1012_3454_3210); 58 | let mut pairs = coca::AllocVec::<(u32, u32), usize>::with_capacity($n); 59 | for _ in 0..$n { 60 | pairs.push((rng.next_u32(), rng.next_u32())); 61 | } 62 | 63 | b.iter(|| { 64 | let mut map = <$ty as Map>::with_capacity($n); 65 | for &(k, v) in pairs.iter() { 66 | map.insert(k, v); 67 | } 68 | 69 | pairs.shuffle(&mut rng); 70 | for &(k, v) in pairs.iter() { 71 | assert_eq!(map.remove(&k), Some(v)); 72 | } 73 | }); 74 | } 75 | }; 76 | } 77 | 78 | mod unordered { 79 | use super::*; 80 | use coca::AllocVec; 81 | use std::collections::HashMap as StdHashMap; 82 | 83 | #[allow(unconditional_recursion)] // false positive! 84 | impl Map for StdHashMap { 85 | fn with_capacity(capacity: usize) -> Self { 86 | StdHashMap::with_capacity(capacity) 87 | } 88 | 89 | fn get(&self, k: &K) -> Option<&V> { 90 | self.get(k) 91 | } 92 | 93 | fn insert(&mut self, k: K, v: V) { 94 | self.insert(k, v); 95 | } 96 | 97 | fn remove(&mut self, k: &K) -> Option { 98 | self.remove(k) 99 | } 100 | } 101 | 102 | insertions!(std_hashmap_16_inserts, StdHashMap<_, _>, 16); 103 | insertions!(std_hashmap_64_inserts, StdHashMap<_, _>, 64); 104 | insertions!(std_hashmap_256_inserts, StdHashMap<_, _>, 256); 105 | insertions!(std_hashmap_1024_inserts, StdHashMap<_, _>, 1024); 106 | 107 | lookups!(std_hashmap_16_lookups, StdHashMap<_, _>, 16); 108 | lookups!(std_hashmap_64_lookups, StdHashMap<_, _>, 64); 109 | lookups!(std_hashmap_256_lookups, StdHashMap<_, _>, 256); 110 | lookups!(std_hashmap_1024_lookups, StdHashMap<_, _>, 1024); 111 | 112 | removals!(std_hashmap_16_removals, StdHashMap<_, _>, 16); 113 | removals!(std_hashmap_64_removals, StdHashMap<_, _>, 64); 114 | removals!(std_hashmap_256_removals, StdHashMap<_, _>, 256); 115 | removals!(std_hashmap_1024_removals, StdHashMap<_, _>, 1024); 116 | 117 | struct VecLinearMap { 118 | keys: AllocVec, 119 | values: AllocVec, 120 | } 121 | 122 | impl Map for VecLinearMap 123 | where 124 | K: PartialEq + Copy, 125 | V: Copy, 126 | { 127 | fn with_capacity(capacity: usize) -> Self { 128 | let keys = AllocVec::with_capacity(capacity); 129 | let values = AllocVec::with_capacity(capacity); 130 | VecLinearMap { keys, values } 131 | } 132 | 133 | fn get(&self, k: &K) -> Option<&V> { 134 | let idx = self.keys.iter().position(|candidate| candidate == k); 135 | idx.map(|i| &self.values[i]) 136 | } 137 | 138 | fn insert(&mut self, k: K, v: V) { 139 | self.keys.push(k); 140 | self.values.push(v); 141 | } 142 | 143 | fn remove(&mut self, k: &K) -> Option { 144 | let idx = self.keys.iter().position(|candidate| candidate == k); 145 | idx.map(|i| { 146 | self.keys.swap_remove(i); 147 | self.values.swap_remove(i) 148 | }) 149 | } 150 | } 151 | 152 | insertions!(vec_linear_map_16_inserts, VecLinearMap<_, _>, 16); 153 | insertions!(vec_linear_map_64_inserts, VecLinearMap<_, _>, 64); 154 | insertions!(vec_linear_map_256_inserts, VecLinearMap<_, _>, 256); 155 | insertions!(vec_linear_map_1024_inserts, VecLinearMap<_, _>, 1024); 156 | 157 | lookups!(vec_linear_map_16_lookups, VecLinearMap<_, _>, 16); 158 | lookups!(vec_linear_map_64_lookups, VecLinearMap<_, _>, 64); 159 | lookups!(vec_linear_map_256_lookups, VecLinearMap<_, _>, 256); 160 | lookups!(vec_linear_map_1024_lookups, VecLinearMap<_, _>, 1024); 161 | 162 | removals!(vec_linear_map_16_removals, VecLinearMap<_, _>, 16); 163 | removals!(vec_linear_map_64_removals, VecLinearMap<_, _>, 64); 164 | removals!(vec_linear_map_256_removals, VecLinearMap<_, _>, 256); 165 | removals!(vec_linear_map_1024_removals, VecLinearMap<_, _>, 1024); 166 | } 167 | 168 | mod ordered { 169 | use super::*; 170 | use coca::AllocVec; 171 | use std::collections::BTreeMap; 172 | 173 | impl Map for BTreeMap 174 | where 175 | K: Ord, 176 | { 177 | fn with_capacity(_: usize) -> Self { 178 | BTreeMap::new() 179 | } 180 | 181 | fn get(&self, k: &K) -> Option<&V> { 182 | self.get(k) 183 | } 184 | 185 | fn insert(&mut self, k: K, v: V) { 186 | self.insert(k, v); 187 | } 188 | 189 | fn remove(&mut self, k: &K) -> Option { 190 | self.remove(k) 191 | } 192 | } 193 | 194 | insertions!(std_btree_map_16_inserts, BTreeMap<_, _>, 16); 195 | insertions!(std_btree_map_64_inserts, BTreeMap<_, _>, 64); 196 | insertions!(std_btree_map_256_inserts, BTreeMap<_, _>, 256); 197 | insertions!(std_btree_map_1024_inserts, BTreeMap<_, _>, 1024); 198 | 199 | lookups!(std_btree_map_16_lookups, BTreeMap<_, _>, 16); 200 | lookups!(std_btree_map_64_lookups, BTreeMap<_, _>, 64); 201 | lookups!(std_btree_map_256_lookups, BTreeMap<_, _>, 256); 202 | lookups!(std_btree_map_1024_lookups, BTreeMap<_, _>, 1024); 203 | 204 | removals!(std_btree_map_16_removals, BTreeMap<_, _>, 16); 205 | removals!(std_btree_map_64_removals, BTreeMap<_, _>, 64); 206 | removals!(std_btree_map_256_removals, BTreeMap<_, _>, 256); 207 | removals!(std_btree_map_1024_removals, BTreeMap<_, _>, 1024); 208 | 209 | struct OrderedVecMap { 210 | keys: AllocVec, 211 | values: AllocVec, 212 | } 213 | 214 | impl Map for OrderedVecMap { 215 | fn with_capacity(capacity: usize) -> Self { 216 | let keys = AllocVec::with_capacity(capacity); 217 | let values = AllocVec::with_capacity(capacity); 218 | 219 | OrderedVecMap { keys, values } 220 | } 221 | 222 | fn get(&self, k: &K) -> Option<&V> { 223 | self.keys.binary_search(k).ok().map(|idx| &self.values[idx]) 224 | } 225 | 226 | fn insert(&mut self, k: K, v: V) { 227 | match self.keys.binary_search(&k) { 228 | Ok(idx) => { 229 | self.values.replace(idx, v); 230 | } 231 | Err(idx) => { 232 | self.keys.insert(idx, k); 233 | self.values.insert(idx, v); 234 | } 235 | } 236 | } 237 | 238 | fn remove(&mut self, k: &K) -> Option { 239 | self.keys.binary_search(k).ok().map(|idx| { 240 | self.keys.remove(idx); 241 | self.values.remove(idx) 242 | }) 243 | } 244 | } 245 | 246 | insertions!(ordered_vec_map_16_inserts, OrderedVecMap<_, _>, 16); 247 | insertions!(ordered_vec_map_64_inserts, OrderedVecMap<_, _>, 64); 248 | insertions!(ordered_vec_map_256_inserts, OrderedVecMap<_, _>, 256); 249 | insertions!(ordered_vec_map_1024_inserts, OrderedVecMap<_, _>, 1024); 250 | 251 | lookups!(ordered_vec_map_16_lookups, OrderedVecMap<_, _>, 16); 252 | lookups!(ordered_vec_map_64_lookups, OrderedVecMap<_, _>, 64); 253 | lookups!(ordered_vec_map_256_lookups, OrderedVecMap<_, _>, 256); 254 | lookups!(ordered_vec_map_1024_lookups, OrderedVecMap<_, _>, 1024); 255 | 256 | removals!(ordered_vec_map_16_removals, OrderedVecMap<_, _>, 16); 257 | removals!(ordered_vec_map_64_removals, OrderedVecMap<_, _>, 64); 258 | removals!(ordered_vec_map_256_removals, OrderedVecMap<_, _>, 256); 259 | removals!(ordered_vec_map_1024_removals, OrderedVecMap<_, _>, 1024); 260 | } 261 | -------------------------------------------------------------------------------- /coca.dic: -------------------------------------------------------------------------------- 1 | 38 2 | accessor 3 | alloc 4 | allocator 5 | arrayvec 6 | bumpalo 7 | deque 8 | deque's 9 | discriminants 10 | enum 11 | growable 12 | implementor 13 | implementors 14 | inconstructible 15 | incontiguously 16 | incrementing 17 | inlined 18 | inlining 19 | io 20 | Lokathor 21 | lookups 22 | mpmc 23 | n² 24 | panics 25 | prepends 26 | reallocations 27 | representable 28 | rustc 29 | spsc 30 | struct 31 | structs 32 | superset 33 | tinyvec 34 | toolchain 35 | unsafety 36 | vec 37 | VecDeque 38 | Zlib 39 | · 40 | -------------------------------------------------------------------------------- /examples/huffman.rs: -------------------------------------------------------------------------------- 1 | use coca::arena::{Arena, Box}; 2 | 3 | use std::mem::{size_of, MaybeUninit}; 4 | 5 | const SOURCE: &str = include_str!("huffman.rs"); 6 | 7 | fn calc_huff_encoded_size(input: &[u8]) -> usize { 8 | const MEM_FOR_FREQS_AND_CODELEN: usize = (1 + 256 + 256) * size_of::(); // + 1 usize for alignment 9 | const MEM_FOR_TREES: usize = 512 * size_of::(); 10 | const ARENA_SIZE: usize = MEM_FOR_FREQS_AND_CODELEN + MEM_FOR_TREES; 11 | 12 | let mut backing = std::boxed::Box::new([MaybeUninit::::uninit(); ARENA_SIZE]); 13 | let mut arena = Arena::from(&mut backing[..]); 14 | 15 | let mut freqs = arena.array(0isize, 256); 16 | for &b in input { 17 | freqs[b as usize] += 1; 18 | } 19 | 20 | #[derive(Debug)] 21 | enum CodeTree<'a> { 22 | Symbol { 23 | sym: u8, 24 | }, 25 | Composite { 26 | left: Box<'a, CodeTree<'a>>, 27 | right: Box<'a, CodeTree<'a>>, 28 | }, 29 | } 30 | 31 | struct Item<'a>(isize, Box<'a, CodeTree<'a>>); 32 | impl<'a> std::cmp::PartialEq for Item<'a> { 33 | fn eq(&self, rhs: &Self) -> bool { 34 | self.0.eq(&rhs.0) 35 | } 36 | } 37 | impl<'a> std::cmp::PartialOrd for Item<'a> { 38 | fn partial_cmp(&self, rhs: &Self) -> Option { 39 | Some(self.cmp(rhs)) 40 | } 41 | } 42 | impl<'a> std::cmp::Eq for Item<'a> {} 43 | impl<'a> std::cmp::Ord for Item<'a> { 44 | fn cmp(&self, rhs: &Self) -> std::cmp::Ordering { 45 | self.0.cmp(&rhs.0).reverse() 46 | } 47 | } 48 | 49 | let mut items: coca::collections::ArenaVec<'_, Item> = arena.with_capacity(256usize); 50 | for (idx, freq) in freqs.iter().enumerate() { 51 | if *freq == 0 { 52 | continue; 53 | } 54 | let leaf_node = arena.alloc(CodeTree::Symbol { sym: idx as u8 }); 55 | items.push(Item(*freq, leaf_node)); 56 | } 57 | 58 | let mut queue = coca::collections::ArenaHeap::, _>::from(items); 59 | while queue.len() > 1 { 60 | let Item(freq_l, left) = queue.pop().unwrap(); 61 | let Item(freq_r, right) = queue.pop().unwrap(); 62 | 63 | let parent_node = arena.alloc(CodeTree::Composite { left, right }); 64 | let combined_freq = freq_l + freq_r; 65 | 66 | queue.push(Item(combined_freq, parent_node)); 67 | } 68 | 69 | fn find_code_lens(node: &CodeTree, recursion_level: usize, code_len_table: &mut [usize; 256]) { 70 | match &*node { 71 | CodeTree::Symbol { sym } => code_len_table[*sym as usize] = recursion_level, 72 | CodeTree::Composite { left, right } => { 73 | find_code_lens(left.as_ref(), recursion_level + 1, code_len_table); 74 | find_code_lens(right.as_ref(), recursion_level + 1, code_len_table); 75 | } 76 | } 77 | } 78 | 79 | let mut code_lens = arena.alloc([usize::MAX; 256]); 80 | find_code_lens(queue.pop().unwrap().1.as_ref(), 0, code_lens.as_mut()); 81 | let bits_needed: usize = (0..256).map(|i| code_lens[i] * freqs[i] as usize).sum(); 82 | 83 | (bits_needed + 7) / 8 84 | } 85 | 86 | fn main() { 87 | let args = std::env::args().collect::>(); 88 | let input_file_content = match args.len() { 89 | 1 => std::vec::Vec::from(SOURCE.as_bytes()), 90 | 2 => { 91 | let input_file_path = &args[1]; 92 | match std::fs::read(input_file_path) { 93 | Ok(content) => content, 94 | Err(e) => { 95 | println!("Failed to read file: {}", e); 96 | std::process::exit(1); 97 | } 98 | } 99 | } 100 | _ => { 101 | println!("Usage: huffman "); 102 | std::process::exit(1); 103 | } 104 | }; 105 | 106 | println!("Uncompressed: {} bytes", input_file_content.len()); 107 | println!( 108 | "Huffman compressed: {} bytes", 109 | calc_huff_encoded_size(&input_file_content) 110 | ); 111 | } 112 | -------------------------------------------------------------------------------- /examples/icosphere.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::many_single_char_names)] 2 | 3 | use std::mem::MaybeUninit; 4 | 5 | use coca::arena::Arena; 6 | use coca::collections::{ArenaDeque, ArenaVec}; 7 | use coca::storage::Capacity; 8 | 9 | coca::index_type! { VertexID: u32 } 10 | 11 | type Vertex = (f32, f32, f32); 12 | type Face = (VertexID, VertexID, VertexID); 13 | const fn face(a: u32, b: u32, c: u32) -> Face { 14 | (VertexID(a), VertexID(b), VertexID(c)) 15 | } 16 | 17 | struct Mesh<'a> { 18 | vertices: ArenaVec<'a, Vertex, VertexID>, 19 | faces: ArenaVec<'a, Face, u32>, 20 | } 21 | 22 | fn generate_icosphere<'a>(arena: &mut Arena<'a>, subdivision_frequency: u32) -> Mesh<'a> { 23 | assert!(subdivision_frequency.is_power_of_two()); 24 | 25 | let triangulation_number = subdivision_frequency.pow(2); 26 | let n_vertices = 10 * triangulation_number + 2; 27 | let mut vertices: ArenaVec<'_, Vertex, VertexID> = arena.with_capacity(n_vertices as usize); 28 | 29 | let n_faces = 20 * triangulation_number; 30 | let mut faces: ArenaDeque<'_, Face, u32> = arena.with_capacity(n_faces as usize); 31 | 32 | // each edge's midpoint is calculated twice, once for each adjacent face 33 | // so we can cache the calculated midpoint the first time, 34 | // and remove it from the cache when it is retrieved 35 | // 36 | // this way, the maximum number of midpoints stored at any point is 37 | // the number of edges of the icosphere with half the final subdivision 38 | // frequency: 39 | 40 | let mut tmp = arena.make_sub_arena(); 41 | let mut edge_subdivision_cache: ArenaVec<(VertexID, VertexID, VertexID), u32> = 42 | tmp.with_capacity(30 * (subdivision_frequency / 2).pow(2) as usize); 43 | 44 | fn subdivide_edge( 45 | a: VertexID, 46 | b: VertexID, 47 | vertices: &mut ArenaVec<'_, Vertex, VertexID>, 48 | cache: &mut ArenaVec<'_, (VertexID, VertexID, VertexID), u32>, 49 | ) -> VertexID { 50 | let (idx_a, idx_b) = if a.as_usize() < b.as_usize() { 51 | (a, b) 52 | } else { 53 | (b, a) 54 | }; 55 | 56 | if let Some(idx) = cache 57 | .iter() 58 | .position(|&(fst, snd, _)| fst == idx_a && snd == idx_b) 59 | { 60 | cache.remove(idx as u32).2 61 | } else { 62 | let a = vertices[idx_a]; 63 | let b = vertices[idx_b]; 64 | 65 | let x = (a.0 + b.0) / 2.0; 66 | let y = (a.1 + b.1) / 2.0; 67 | let z = (a.2 + b.2) / 2.0; 68 | 69 | let len = (x.powi(2) + y.powi(2) + z.powi(2)).sqrt(); 70 | let x = x / len; 71 | let y = y / len; 72 | let z = z / len; 73 | 74 | let idx_mid = VertexID::from_usize(vertices.len()); 75 | vertices.push((x, y, z)); 76 | 77 | cache.push((idx_a, idx_b, idx_mid)); 78 | idx_mid 79 | } 80 | } 81 | 82 | { 83 | // Initialization: start with a regular icosahedron 84 | // From Wikipedia: "The vertices of an icosahedron centered at the 85 | // origin with an edge-length of 2 and a circumradius of `(phi + 2).sqrt()` 86 | // are the circular permutations of `(0, +/- 1, +/- phi)`" 87 | // https://en.wikipedia.org/wiki/Regular_icosahedron#Cartesian_coordinates 88 | 89 | // we start with the normalized coordinates, so that the distance from 90 | // each vertex to the origin equals one 91 | let phi = (1.0 + 5.0f32.sqrt()) / 2.0; 92 | let len = (1.0 + phi.powi(2)).sqrt(); 93 | 94 | let one = 1.0 / len; 95 | let phi = phi / len; 96 | 97 | vertices.push((-one, phi, 0.0)); 98 | vertices.push((one, phi, 0.0)); 99 | vertices.push((-one, -phi, 0.0)); 100 | vertices.push((one, -phi, 0.0)); 101 | 102 | vertices.push((0.0, -one, phi)); 103 | vertices.push((0.0, one, phi)); 104 | vertices.push((0.0, -one, -phi)); 105 | vertices.push((0.0, one, -phi)); 106 | 107 | vertices.push((phi, 0.0, -one)); 108 | vertices.push((phi, 0.0, one)); 109 | vertices.push((-phi, 0.0, -one)); 110 | vertices.push((-phi, 0.0, one)); 111 | 112 | faces.push_back(face(0, 11, 5)); 113 | faces.push_back(face(0, 5, 1)); 114 | faces.push_back(face(0, 1, 7)); 115 | faces.push_back(face(0, 7, 10)); 116 | faces.push_back(face(0, 10, 11)); 117 | 118 | faces.push_back(face(1, 5, 9)); 119 | faces.push_back(face(5, 11, 4)); 120 | faces.push_back(face(11, 10, 2)); 121 | faces.push_back(face(10, 7, 6)); 122 | faces.push_back(face(7, 1, 8)); 123 | 124 | faces.push_back(face(3, 9, 4)); 125 | faces.push_back(face(3, 4, 2)); 126 | faces.push_back(face(3, 2, 6)); 127 | faces.push_back(face(3, 6, 8)); 128 | faces.push_back(face(3, 8, 9)); 129 | 130 | faces.push_back(face(4, 9, 5)); 131 | faces.push_back(face(2, 4, 11)); 132 | faces.push_back(face(6, 2, 10)); 133 | faces.push_back(face(8, 6, 7)); 134 | faces.push_back(face(9, 8, 1)); 135 | } 136 | 137 | let mut threshold = 10000; 138 | while !faces.is_full() { 139 | let tri = faces.pop_front().unwrap(); 140 | let a = subdivide_edge(tri.0, tri.1, &mut vertices, &mut edge_subdivision_cache); 141 | let b = subdivide_edge(tri.1, tri.2, &mut vertices, &mut edge_subdivision_cache); 142 | let c = subdivide_edge(tri.2, tri.0, &mut vertices, &mut edge_subdivision_cache); 143 | 144 | faces.push_back((tri.0, a, c)); 145 | faces.push_back((tri.1, b, a)); 146 | faces.push_back((tri.2, c, b)); 147 | faces.push_back((a, b, c)); 148 | 149 | if vertices.len() > threshold { 150 | println!( 151 | "generated {} / {} vertices", 152 | vertices.len(), 153 | vertices.capacity() 154 | ); 155 | threshold += 10000; 156 | } 157 | } 158 | 159 | assert!(edge_subdivision_cache.is_empty()); 160 | assert!(vertices.is_full()); 161 | 162 | let faces = unsafe { 163 | let (storage, _, len) = faces.into_raw_parts(); 164 | ArenaVec::from_raw_parts(storage, len) 165 | }; 166 | 167 | Mesh { vertices, faces } 168 | } 169 | 170 | fn main() { 171 | let mut backing = vec![MaybeUninit::::uninit(); 32 * 1024]; 172 | let mut arena = Arena::from(&mut backing[..]); 173 | let icosphere = generate_icosphere(&mut arena, 8); 174 | 175 | // Verify the generated mesh is indeed an icosphere: 176 | 177 | // 1. count the number of neighbours for each vertex 178 | // this is equivalent to counting the faces each vertex is a part of: 179 | // two other vertices from the same face are neighbours (i.e. connected 180 | // by an edge), but each edge also connects two faces; 181 | // these factors cancel out 182 | 183 | let mut neighbour_counts = arena.array(0u32, icosphere.vertices.len()); 184 | for tri in icosphere.faces { 185 | neighbour_counts[tri.0.as_usize()] += 1; 186 | neighbour_counts[tri.1.as_usize()] += 1; 187 | neighbour_counts[tri.2.as_usize()] += 1; 188 | } 189 | 190 | // 2. check the distribution of numbers-of-neighbours 191 | // each vertex added during the subdivision phase should have six 192 | // neighbours; the 12 vertices of an icosahedron have five neighbors 193 | // each, and the subdivision process does not change this property 194 | 195 | let mut num_five_neighbours = 0u32; 196 | let mut num_six_neighbours = 0u32; 197 | for (vertex_id, &count) in neighbour_counts.iter().enumerate() { 198 | if count == 5 { 199 | assert!(vertex_id < 12); 200 | num_five_neighbours += 1; 201 | } else if count == 6 { 202 | num_six_neighbours += 1; 203 | } else { 204 | panic!("vertex {} has {} neighbours", vertex_id, count); 205 | } 206 | } 207 | 208 | assert_eq!(num_five_neighbours, 12); 209 | assert_eq!(num_six_neighbours, icosphere.vertices.len() as u32 - 12); 210 | 211 | println!("OK."); 212 | } 213 | -------------------------------------------------------------------------------- /spellcheck.toml: -------------------------------------------------------------------------------- 1 | # Configuration file for cargo-spellcheck 2 | 3 | [Hunspell] 4 | lang = "en_US" 5 | search_dirs = ["./"] 6 | extra_dictionaries = ["coca.dic"] -------------------------------------------------------------------------------- /src/collections/binary_heap.rs: -------------------------------------------------------------------------------- 1 | //! A fixed-capacity priority queue implemented with a binary heap. 2 | //! 3 | //! Insertion and popping the largest element have O(log(n)) time complexity. 4 | //! Checking the largest element is O(1). 5 | //! 6 | //! [`BinaryHeap`](BinaryHeap) wraps a [`Vec`](Vec) and 7 | //! can therefore be converted into the underlying vector type at zero cost. 8 | //! Converting a vector to a binary heap can be done in-place, and has O(n) 9 | //! complexity. A binary heap can also be converted to a sorted vector in-place, 10 | //! allowing it to be used for an O(n log(n)) in-place heap sort. 11 | 12 | use crate::collections::vec::{Drain, Vec}; 13 | use crate::storage::{ArrayLayout, Capacity, Storage}; 14 | 15 | use core::fmt::{self, Debug, Formatter}; 16 | use core::iter::{FromIterator, FusedIterator}; 17 | #[allow(unused_imports)] 18 | use core::mem::MaybeUninit; 19 | use core::ops::{Deref, DerefMut}; 20 | 21 | /// A fixed-capacity priority queue implemented with a binary heap. 22 | /// 23 | /// This will be a max-heap, i.e. [`heap.pop()`](BinaryHeap::pop) will return 24 | /// the largest value in the queue. [`core::cmp::Reverse`] or a custom `Ord` 25 | /// implementation can be used to make a min-heap instead. 26 | /// 27 | /// It is a logic error for an item to be modified in such a way that the 28 | /// item's ordering relative to any other item, as determined by the `Ord` 29 | /// trait, changes while it is in the heap. This is normally only possible 30 | /// through `Cell`, `RefCell`, global state, I/O, or unsafe code. 31 | pub struct BinaryHeap>, I: Capacity = usize> { 32 | a: Vec, 33 | } 34 | 35 | /// Structure wrapping a mutable reference to the greatest item on a `BinaryHeap`. 36 | /// 37 | /// This `struct` is created by the [`BinaryHeap::peek_mut()`] method. See its 38 | /// documentation for more. 39 | pub struct PeekMut<'a, T: 'a + Ord, S: Storage>, I: Capacity = usize> { 40 | heap: &'a mut BinaryHeap, 41 | } 42 | 43 | impl>, I: Capacity> Debug for PeekMut<'_, T, S, I> { 44 | fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { 45 | f.debug_tuple("PeekMut").field(&self.heap.peek()).finish() 46 | } 47 | } 48 | 49 | impl>, I: Capacity> Drop for PeekMut<'_, T, S, I> { 50 | fn drop(&mut self) { 51 | heapify(self.heap.a.as_mut_slice(), 0); 52 | } 53 | } 54 | 55 | impl>, I: Capacity> Deref for PeekMut<'_, T, S, I> { 56 | type Target = T; 57 | 58 | fn deref(&self) -> &Self::Target { 59 | debug_assert!(!self.heap.is_empty()); 60 | unsafe { self.heap.a.get_unchecked(0) } 61 | } 62 | } 63 | 64 | impl>, I: Capacity> DerefMut for PeekMut<'_, T, S, I> { 65 | fn deref_mut(&mut self) -> &mut Self::Target { 66 | debug_assert!(!self.heap.is_empty()); 67 | unsafe { self.heap.a.get_unchecked_mut(0) } 68 | } 69 | } 70 | 71 | impl>, I: Capacity> PeekMut<'_, T, S, I> { 72 | /// Removes the peeked value from the heap and returns it. 73 | pub fn pop(this: PeekMut<'_, T, S, I>) -> T { 74 | debug_assert!(!this.heap.is_empty()); 75 | if let Some(value) = this.heap.pop() { 76 | core::mem::forget(this); 77 | value 78 | } else { 79 | unreachable!() 80 | } 81 | } 82 | } 83 | 84 | impl>, I: Capacity> From for BinaryHeap { 85 | /// Converts a contiguous block of memory into an empty binary heap. 86 | /// 87 | /// # Panics 88 | /// This may panic if the index type I cannot represent `buf.capacity()`. 89 | fn from(buf: S) -> Self { 90 | BinaryHeap { a: Vec::from(buf) } 91 | } 92 | } 93 | 94 | // This implementatin is largely based on the pseudocode given in 95 | // CLRS - Introduction to Algorithms (third edition), Chapter 6 96 | 97 | // These utility functions for binary tree traversal differ from the reference 98 | // because we're using 0-based indexing, i.e. these are equivalent to 99 | // `PARENT(i + 1) - 1`, `LEFT(i + 1) - 1`, and `RIGHT(i + 1) - 1`, respectively. 100 | #[inline(always)] 101 | fn parent(i: usize) -> usize { 102 | (i + 1) / 2 - 1 103 | } 104 | 105 | #[inline(always)] 106 | fn left(i: usize) -> usize { 107 | 2 * (i + 1) - 1 108 | } 109 | 110 | #[inline(always)] 111 | fn right(i: usize) -> usize { 112 | 2 * (i + 1) 113 | } 114 | 115 | fn heapify(a: &mut [T], i: usize) { 116 | let l = left(i); 117 | let r = right(i); 118 | let mut largest = if l < a.len() && a[l] > a[i] { l } else { i }; 119 | if r < a.len() && a[r] > a[largest] { 120 | largest = r; 121 | } 122 | if largest != i { 123 | a.swap(i, largest); 124 | heapify(a, largest); 125 | } 126 | } 127 | 128 | impl>, I: Capacity> Debug for BinaryHeap { 129 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 130 | f.debug_list().entries(self.iter()).finish() 131 | } 132 | } 133 | 134 | impl>, I: Capacity> From> for BinaryHeap { 135 | /// Converts a [`Vec`] into a binary heap. 136 | /// 137 | /// This conversion happens in-place, and has O(n) time complexity. 138 | fn from(mut vec: Vec) -> Self { 139 | let a = vec.as_mut_slice(); 140 | for i in (0..(a.len() / 2)).rev() { 141 | heapify(a, i); 142 | } 143 | BinaryHeap { a: vec } 144 | } 145 | } 146 | 147 | impl>, I: Capacity> BinaryHeap { 148 | /// Returns a reference to the greatest item in the binary heap, or [`None`] if it is empty. 149 | #[inline] 150 | pub fn peek(&self) -> Option<&T> { 151 | self.a.first() 152 | } 153 | 154 | /// Returns a mutable reference to the greatest item in the binary heap, or 155 | /// [`None`] if it is empty. 156 | /// 157 | /// Note: If the `PeekMut` value is leaked, the heap may be left in an 158 | /// inconsistent state. 159 | /// 160 | /// # Examples 161 | /// ``` 162 | /// let mut backing_region = [core::mem::MaybeUninit::::uninit(); 3]; 163 | /// let mut heap = coca::collections::SliceHeap::<_>::from(&mut backing_region[..]); 164 | /// heap.try_push(3); 165 | /// heap.try_push(5); 166 | /// heap.try_push(1); 167 | /// 168 | /// { 169 | /// let mut val = heap.peek_mut().unwrap(); 170 | /// *val = 0; 171 | /// } 172 | /// 173 | /// assert_eq!(heap.pop(), Some(3)); 174 | /// assert_eq!(heap.pop(), Some(1)); 175 | /// assert_eq!(heap.pop(), Some(0)); 176 | /// ``` 177 | #[inline] 178 | pub fn peek_mut(&mut self) -> Option> { 179 | if self.is_empty() { 180 | None 181 | } else { 182 | Some(PeekMut { heap: self }) 183 | } 184 | } 185 | 186 | /// Removes the greatest element from the binary heap and returns it, or [`None`] if it is empty. 187 | /// 188 | /// # Examples 189 | /// ``` 190 | /// use coca::collections::{SliceHeap, SliceVec}; 191 | /// let mut backing_region = [core::mem::MaybeUninit::::uninit(); 3]; 192 | /// let mut vec = SliceVec::::from(&mut backing_region[..]); 193 | /// vec.push(1); vec.push(3); 194 | /// 195 | /// let mut heap = SliceHeap::from(vec); 196 | /// 197 | /// assert_eq!(heap.pop(), Some(3)); 198 | /// assert_eq!(heap.pop(), Some(1)); 199 | /// assert_eq!(heap.pop(), None); 200 | /// ``` 201 | pub fn pop(&mut self) -> Option { 202 | if self.is_empty() { 203 | return None; 204 | } 205 | 206 | let result = self.a.swap_remove(I::from_usize(0)); 207 | heapify(self.a.as_mut_slice(), 0); 208 | Some(result) 209 | } 210 | 211 | /// Pushes an item onto the binary heap. 212 | /// 213 | /// # Panics 214 | /// Panics if the heap is already at capacity. See [`try_push`](BinaryHeap::try_push) 215 | /// for a checked version that never panics. 216 | #[inline] 217 | pub fn push(&mut self, item: T) { 218 | #[cold] 219 | #[inline(never)] 220 | fn assert_failed() -> ! { 221 | panic!("binary heap is already at capacity") 222 | } 223 | 224 | if self.try_push(item).is_err() { 225 | assert_failed(); 226 | } 227 | } 228 | 229 | /// Pushes an item onto the binary heap, returning `Err(item)` if it is full. 230 | /// 231 | /// # Examples 232 | /// ``` 233 | /// let mut backing_region = [core::mem::MaybeUninit::::uninit(); 3]; 234 | /// let mut heap = coca::collections::SliceHeap::<_>::from(&mut backing_region[..]); 235 | /// heap.try_push(3); 236 | /// heap.try_push(5); 237 | /// heap.try_push(1); 238 | /// 239 | /// assert_eq!(heap.len(), 3); 240 | /// assert_eq!(heap.peek(), Some(&5)); 241 | /// ``` 242 | pub fn try_push(&mut self, item: T) -> Result<(), T> { 243 | self.a.try_push(item)?; 244 | let a = self.a.as_mut_slice(); 245 | let mut i = a.len() - 1; 246 | while i > 0 && a[parent(i)] < a[i] { 247 | a.swap(i, parent(i)); 248 | i = parent(i); 249 | } 250 | Ok(()) 251 | } 252 | 253 | /// Returns the number of elements the binary heap can hold. 254 | #[inline] 255 | pub fn capacity(&self) -> usize { 256 | self.a.capacity() 257 | } 258 | 259 | /// Returns the number of elements in the binary heap, also referred to as its *length*. 260 | #[inline] 261 | pub fn len(&self) -> usize { 262 | self.a.len() 263 | } 264 | 265 | /// Returns `true` if the binary heap contains no elements. 266 | #[inline] 267 | pub fn is_empty(&self) -> bool { 268 | self.a.is_empty() 269 | } 270 | 271 | /// Returns `true` if the binary heap contains the maximum number of elements. 272 | #[inline] 273 | pub fn is_full(&self) -> bool { 274 | self.a.is_full() 275 | } 276 | 277 | /// Returns an iterator visiting all values in the underlying vector in arbitrary order. 278 | pub fn iter(&self) -> impl Iterator { 279 | self.a.iter() 280 | } 281 | 282 | /// Clears the binary heap, returning an iterator over the removed elements. 283 | /// The elements are removed in arbitrary order. 284 | /// 285 | /// # Examples 286 | /// ``` 287 | /// let mut backing_region = [core::mem::MaybeUninit::::uninit(); 3]; 288 | /// let mut heap = coca::collections::SliceHeap::<_>::from(&mut backing_region[..]); 289 | /// heap.push(1); heap.push(3); 290 | /// assert!(!heap.is_empty()); 291 | /// 292 | /// let mut iter = heap.drain(); 293 | /// assert!(iter.next().is_some()); 294 | /// assert!(iter.next().is_some()); 295 | /// assert!(iter.next().is_none()); 296 | /// drop(iter); 297 | /// 298 | /// assert!(heap.is_empty()); 299 | /// ``` 300 | #[inline] 301 | pub fn drain(&mut self) -> Drain<'_, T, S, I> { 302 | self.a.drain(..) 303 | } 304 | 305 | /// Returns an iterator which retrieves elements in heap order. The retrieved 306 | /// elements are removed from the original heap. The remaining elements will 307 | /// be removed on drop in heap order. 308 | /// 309 | /// # Remarks 310 | /// `.drain_sorted()` is O(n log(n)), much slower than [`.drain()`](BinaryHeap::drain). 311 | /// The latter is preferable in most cases. 312 | /// 313 | /// # Examples 314 | /// ``` 315 | /// let mut backing_region = [core::mem::MaybeUninit::::uninit(); 3]; 316 | /// let mut heap = coca::collections::SliceHeap::<_>::from(&mut backing_region[..]); 317 | /// heap.push(1); heap.push(3); heap.push(5); 318 | /// 319 | /// let mut iter = heap.drain_sorted(); 320 | /// assert_eq!(iter.next(), Some(5)); 321 | /// drop(iter); 322 | /// assert!(heap.is_empty()); 323 | /// ``` 324 | #[inline] 325 | pub fn drain_sorted(&mut self) -> DrainSorted<'_, T, S, I> { 326 | DrainSorted { heap: self } 327 | } 328 | 329 | /// Drops all items from the binary heap. 330 | #[inline] 331 | pub fn clear(&mut self) { 332 | self.a.clear(); 333 | } 334 | 335 | /// Consumes the `BinaryHeap` and returns the underlying vector in arbitrary order. 336 | #[inline] 337 | pub fn into_vec(self) -> Vec { 338 | self.a 339 | } 340 | 341 | /// Consumes the `BinaryHeap` and returns a vector in sorted (ascending) order. 342 | /// 343 | /// # Examples 344 | /// ``` 345 | /// let mut backing_region = [core::mem::MaybeUninit::::uninit(); 5]; 346 | /// let mut heap = coca::collections::SliceHeap::<_>::from(&mut backing_region[..]); 347 | /// heap.push(1); heap.push(5); heap.push(3); heap.push(2); heap.push(4); 348 | /// let vec = heap.into_sorted_vec(); 349 | /// assert_eq!(vec, &[1, 2, 3, 4, 5][..]); 350 | /// ``` 351 | pub fn into_sorted_vec(self) -> Vec { 352 | let mut result = self.into_vec(); 353 | let a = result.as_mut_slice(); 354 | for i in (1..a.len()).rev() { 355 | a.swap(0, i); 356 | heapify(&mut a[..i], 0); 357 | } 358 | result 359 | } 360 | 361 | /// Consumes the `BinaryHeap` and returns an iterator which yields elements 362 | /// in heap order. 363 | /// 364 | /// When dropped, the remaining elements will be dropped in heap order. 365 | /// 366 | /// # Remarks 367 | /// `.into_iter_sorted()` is O(n log(n)), much slower than [`.into_iter()`](BinaryHeap::into_iter). 368 | /// The latter is preferable in most cases. 369 | /// 370 | /// # Examples 371 | /// ``` 372 | /// let mut backing_region = [core::mem::MaybeUninit::::uninit(); 3]; 373 | /// let mut heap = coca::collections::SliceHeap::<_>::from(&mut backing_region[..]); 374 | /// heap.push(1); heap.push(3); heap.push(5); 375 | /// 376 | /// let mut iter = heap.into_iter_sorted(); 377 | /// assert_eq!(iter.next(), Some(5)); 378 | /// assert_eq!(iter.next(), Some(3)); 379 | /// assert_eq!(iter.next(), Some(1)); 380 | /// ``` 381 | pub fn into_iter_sorted(self) -> IntoIterSorted { 382 | IntoIterSorted { heap: self } 383 | } 384 | } 385 | 386 | impl>, I: Capacity> IntoIterator for BinaryHeap { 387 | type Item = T; 388 | type IntoIter = as IntoIterator>::IntoIter; 389 | fn into_iter(self) -> Self::IntoIter { 390 | self.a.into_iter() 391 | } 392 | } 393 | 394 | impl>, I: Capacity> Extend for BinaryHeap 395 | where 396 | Vec: Extend, 397 | { 398 | fn extend>(&mut self, iter: T) { 399 | self.a.extend(iter); 400 | for i in (0..(self.a.len() / 2)).rev() { 401 | heapify(self.a.as_mut_slice(), i); 402 | } 403 | } 404 | } 405 | 406 | impl>, I: Capacity> FromIterator for BinaryHeap 407 | where 408 | Vec: FromIterator, 409 | { 410 | /// Creates a binary heap from an iterator. 411 | /// 412 | /// # Panics 413 | /// Panics if the iterator yields more elements than the binary heap can hold. 414 | fn from_iter>(iter: It) -> Self { 415 | let a = Vec::::from_iter(iter); 416 | Self::from(a) 417 | } 418 | } 419 | 420 | /// A draining iterator over the elements of a `BinaryHeap`. 421 | /// 422 | /// This `struct` is created by [`BinaryHeap::drain_sorted()`]. 423 | /// See its documentation for more. 424 | pub struct DrainSorted<'a, T: Ord, S: Storage>, I: Capacity> { 425 | heap: &'a mut BinaryHeap, 426 | } 427 | 428 | impl>, I: Capacity> Iterator for DrainSorted<'_, T, S, I> { 429 | type Item = T; 430 | 431 | fn size_hint(&self) -> (usize, Option) { 432 | let size = self.len(); 433 | (size, Some(size)) 434 | } 435 | 436 | fn next(&mut self) -> Option { 437 | self.heap.pop() 438 | } 439 | } 440 | 441 | impl>, I: Capacity> ExactSizeIterator 442 | for DrainSorted<'_, T, S, I> 443 | { 444 | } 445 | impl>, I: Capacity> FusedIterator for DrainSorted<'_, T, S, I> {} 446 | 447 | impl>, I: Capacity> Drop for DrainSorted<'_, T, S, I> { 448 | fn drop(&mut self) { 449 | self.for_each(drop); 450 | } 451 | } 452 | 453 | /// A consuming iterator that moves out of a `BinaryHeap`. 454 | /// 455 | /// This `struct` is created by [`BinaryHeap::into_iter_sorted()`]. 456 | /// See its documentation for more. 457 | #[derive(Debug)] 458 | pub struct IntoIterSorted>, I: Capacity> { 459 | heap: BinaryHeap, 460 | } 461 | 462 | impl>, I: Capacity> Iterator for IntoIterSorted { 463 | type Item = T; 464 | 465 | #[inline] 466 | fn size_hint(&self) -> (usize, Option) { 467 | let size = self.heap.len(); 468 | (size, Some(size)) 469 | } 470 | 471 | #[inline] 472 | fn next(&mut self) -> Option { 473 | self.heap.pop() 474 | } 475 | } 476 | 477 | impl>, I: Capacity> ExactSizeIterator 478 | for IntoIterSorted 479 | { 480 | } 481 | impl>, I: Capacity> FusedIterator for IntoIterSorted {} 482 | 483 | impl>, I: Capacity> Clone for IntoIterSorted 484 | where 485 | BinaryHeap: Clone, 486 | { 487 | fn clone(&self) -> Self { 488 | self.heap.clone().into_iter_sorted() 489 | } 490 | } 491 | 492 | impl>, I: Capacity> Drop for IntoIterSorted { 493 | fn drop(&mut self) { 494 | self.for_each(drop); 495 | } 496 | } 497 | 498 | #[cfg(feature = "alloc")] 499 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 500 | impl crate::collections::AllocHeap { 501 | /// Constructs a new, empty `AllocHeap` with the specified capacity. 502 | /// 503 | /// # Panics 504 | /// Panics if the specified capacity cannot be represented by a `usize`. 505 | pub fn with_capacity(capacity: I) -> Self { 506 | BinaryHeap { 507 | a: Vec::with_capacity(capacity), 508 | } 509 | } 510 | } 511 | 512 | #[cfg(feature = "alloc")] 513 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 514 | impl Clone for crate::collections::AllocHeap { 515 | fn clone(&self) -> Self { 516 | BinaryHeap { a: self.a.clone() } 517 | } 518 | } 519 | 520 | impl BinaryHeap; C], I> { 521 | /// Constructs a new, empty `BinaryHeap` backed by an inline array. 522 | /// 523 | /// # Panics 524 | /// Panics if `C` cannot be represented as a value of type `I`. 525 | /// 526 | /// # Examples 527 | /// ``` 528 | /// let heap = coca::collections::InlineHeap::::new(); 529 | /// assert_eq!(heap.capacity(), 4); 530 | /// assert!(heap.is_empty()); 531 | /// ``` 532 | pub fn new() -> Self { 533 | let a = Vec::new(); 534 | BinaryHeap { a } 535 | } 536 | } 537 | 538 | impl Default for BinaryHeap; C], I> { 539 | fn default() -> Self { 540 | Self::new() 541 | } 542 | } 543 | 544 | impl Clone for BinaryHeap; C], I> { 545 | fn clone(&self) -> Self { 546 | BinaryHeap { a: self.a.clone() } 547 | } 548 | } 549 | 550 | #[cfg(test)] 551 | mod tests { 552 | use super::*; 553 | 554 | #[test] 555 | fn tree_traversal_utilities() { 556 | assert_eq!(left(0), 1); 557 | assert_eq!(right(0), 2); 558 | assert_eq!(parent(1), 0); 559 | assert_eq!(parent(2), 0); 560 | 561 | for i in 1..=1000 { 562 | let l = left(i); 563 | let r = right(i); 564 | assert_eq!(l + 1, r); 565 | assert_eq!(parent(l), i); 566 | assert_eq!(parent(r), i); 567 | 568 | let ll = left(l); 569 | let lr = right(l); 570 | let rl = left(r); 571 | let rr = right(r); 572 | 573 | assert_eq!(ll + 1, lr); 574 | assert_eq!(rl + 1, rr); 575 | assert_eq!(parent(parent(ll)), i); 576 | assert_eq!(parent(parent(lr)), i); 577 | assert_eq!(parent(parent(rl)), i); 578 | assert_eq!(parent(parent(rr)), i); 579 | } 580 | } 581 | 582 | #[test] 583 | fn push_and_pop_randomized_inputs() { 584 | use rand::{rngs::SmallRng, RngCore, SeedableRng}; 585 | 586 | let mut backing_region = [core::mem::MaybeUninit::::uninit(); 32]; 587 | let mut heap = crate::collections::SliceHeap::<_>::from(&mut backing_region[..]); 588 | 589 | let mut rng = SmallRng::from_seed(crate::test_utils::RNG_SEED); 590 | 591 | let mut newest = 0; 592 | for _ in 0..32 { 593 | newest = rng.next_u32(); 594 | heap.push(newest); 595 | } 596 | 597 | let mut prev = u32::max_value(); 598 | for _ in 0..1000 { 599 | let x = heap.pop().unwrap(); 600 | assert!(x <= prev || x == newest); 601 | prev = x; 602 | 603 | newest = rng.next_u32(); 604 | heap.push(newest); 605 | } 606 | } 607 | 608 | #[test] 609 | fn iterators_take_and_drop_correctly() { 610 | use core::cell::RefCell; 611 | 612 | #[derive(Clone)] 613 | struct Droppable<'a, 'b> { 614 | value: usize, 615 | log: &'a RefCell>, 616 | } 617 | 618 | impl PartialEq for Droppable<'_, '_> { 619 | fn eq(&self, rhs: &Self) -> bool { 620 | self.value == rhs.value 621 | } 622 | } 623 | 624 | impl Eq for Droppable<'_, '_> {} 625 | 626 | impl PartialOrd for Droppable<'_, '_> { 627 | fn partial_cmp(&self, rhs: &Self) -> Option { 628 | Some(self.cmp(rhs)) 629 | } 630 | } 631 | 632 | impl Ord for Droppable<'_, '_> { 633 | fn cmp(&self, rhs: &Self) -> core::cmp::Ordering { 634 | self.value.cmp(&rhs.value) 635 | } 636 | } 637 | 638 | impl Drop for Droppable<'_, '_> { 639 | fn drop(&mut self) { 640 | self.log.borrow_mut().push(self.value); 641 | } 642 | } 643 | 644 | let mut backing_array = [MaybeUninit::::uninit(); 16]; 645 | let drop_log = RefCell::new(crate::collections::SliceVec::<_>::from( 646 | &mut backing_array[..], 647 | )); 648 | 649 | let mut backing_region = [ 650 | core::mem::MaybeUninit::::uninit(), 651 | core::mem::MaybeUninit::::uninit(), 652 | core::mem::MaybeUninit::::uninit(), 653 | core::mem::MaybeUninit::::uninit(), 654 | core::mem::MaybeUninit::::uninit(), 655 | core::mem::MaybeUninit::::uninit(), 656 | core::mem::MaybeUninit::::uninit(), 657 | core::mem::MaybeUninit::::uninit(), 658 | ]; 659 | 660 | let mut heap = crate::collections::SliceHeap::::from(&mut backing_region[..]); 661 | for i in 1..=8 { 662 | heap.push(Droppable { 663 | value: i, 664 | log: &drop_log, 665 | }); 666 | } 667 | 668 | let mut drain_iter = heap.drain_sorted(); 669 | assert_eq!(drain_iter.next().unwrap().value, 8); 670 | assert_eq!(drain_iter.next().unwrap().value, 7); 671 | assert_eq!(drop_log.borrow().len(), 2); 672 | 673 | drop(drain_iter); 674 | assert_eq!(drop_log.borrow().len(), 8); 675 | assert_eq!(heap.len(), 0); 676 | 677 | for i in 1..=8 { 678 | heap.push(Droppable { 679 | value: i, 680 | log: &drop_log, 681 | }); 682 | } 683 | 684 | let mut into_iter = heap.into_iter_sorted(); 685 | assert_eq!(into_iter.next().unwrap().value, 8); 686 | assert_eq!(into_iter.next().unwrap().value, 7); 687 | assert_eq!(into_iter.next().unwrap().value, 6); 688 | assert_eq!(drop_log.borrow().len(), 11); 689 | 690 | drop(into_iter); 691 | assert_eq!(drop_log.borrow().len(), 16); 692 | 693 | assert_eq!( 694 | drop_log.borrow().as_slice(), 695 | &[8, 7, 6, 5, 4, 3, 2, 1, 8, 7, 6, 5, 4, 3, 2, 1] 696 | ); 697 | } 698 | } 699 | -------------------------------------------------------------------------------- /src/collections/cache.rs: -------------------------------------------------------------------------------- 1 | //! Forgetful map data structures. 2 | //! 3 | //! Useful for approximate search tasks and caching the results of expensive 4 | //! computations. 5 | 6 | #![allow(clippy::cast_possible_truncation)] 7 | 8 | use core::hash::{BuildHasher, BuildHasherDefault, Hash, Hasher}; 9 | use core::marker::PhantomData; 10 | use core::mem::MaybeUninit; 11 | use core::{borrow::Borrow, cell::Cell}; 12 | 13 | use crate::storage::{ArrayLayout, InlineStorage, Storage}; 14 | 15 | // TODO: wider cache line types! 16 | 17 | /// Types that can be used as the cache line type `L` of a [`CacheTable`]. 18 | pub trait CacheLine { 19 | /// The maximum number of entries that can be cached in a line. 20 | const CAPACITY: usize; 21 | /// Initialize the pointed to cache line to be empty. 22 | /// 23 | /// # Safety 24 | /// Implementors may assume the provided pointer to be valid and non-null; 25 | /// they may *not* assume the referenced memory to be initialized. 26 | unsafe fn init(this: *mut Self); 27 | /// Returns a reference to the value corresponding to the key. 28 | /// 29 | /// The key may be any borrowed form of the cache's key type, but [`Eq`] on 30 | /// the borrowed form *must* match that of the key type. 31 | fn get>(&self, k: &Q) -> Option<&V>; 32 | /// Returns a mutable reference to the value corresponding to the key. 33 | /// 34 | /// The key may be any borrowed form of the cache's key type, but [`Eq`] on 35 | /// the borrowed form *must* match that of the key type. 36 | fn get_mut>(&mut self, k: &Q) -> Option<&mut V>; 37 | /// Inserts a key-value pair into the cache line. 38 | /// 39 | /// If the cache line is already full, another key-value pair must be 40 | /// evicted from the cache line and returned. Otherwise returns [`None`]. 41 | fn insert(&mut self, k: K, v: V) -> Option<(K, V)>; 42 | /// Ensures a value corresponding to the provided key is cached by inserting 43 | /// the result of the default function if none is found, and returns a reference 44 | /// to the cached value. 45 | fn get_or_insert_with V>(&mut self, k: K, default: F) -> &V; 46 | /// Clears the cache line, removing all key-value pairs and resetting any 47 | /// additional state. 48 | fn clear(&mut self); 49 | } 50 | 51 | /// The smallest possible cache, storing only the single most recently accessed key-value pair. 52 | /// 53 | /// Intended primarily for use as the [`CacheLine`] type `L` of a [`CacheTable`]. 54 | /// 55 | /// # Examples 56 | /// ``` 57 | /// use coca::collections::cache::{UnitCache, CacheLine}; 58 | /// let mut cache = UnitCache::<&'static str, i32>::default(); 59 | /// 60 | /// assert!(cache.get(&"hello").is_none()); 61 | /// assert!(cache.insert("hello", 1).is_none()); 62 | /// assert_eq!(cache.get(&"hello"), Some(&1)); 63 | /// 64 | /// assert_eq!(cache.insert("world", 2), Some(("hello", 1))); 65 | /// assert_eq!(cache.get(&"world"), Some(&2)); 66 | /// assert!(cache.get(&"hello").is_none()); 67 | /// ``` 68 | pub struct UnitCache { 69 | key: MaybeUninit, 70 | value: MaybeUninit, 71 | occupied: bool, 72 | } 73 | 74 | impl Default for UnitCache { 75 | fn default() -> Self { 76 | let mut result = MaybeUninit::uninit(); 77 | unsafe { 78 | Self::init(result.as_mut_ptr()); 79 | result.assume_init() 80 | } 81 | } 82 | } 83 | 84 | impl CacheLine for UnitCache { 85 | const CAPACITY: usize = 1; 86 | 87 | unsafe fn init(this: *mut Self) { 88 | (*this).occupied = false; 89 | } 90 | 91 | fn get>(&self, k: &Q) -> Option<&V> { 92 | if !self.occupied { 93 | return None; 94 | } 95 | let my_key = unsafe { &*self.key.as_ptr() }; 96 | if my_key == k.borrow() { 97 | Some(unsafe { &*self.value.as_ptr() }) 98 | } else { 99 | None 100 | } 101 | } 102 | 103 | fn get_mut>(&mut self, k: &Q) -> Option<&mut V> { 104 | if !self.occupied { 105 | return None; 106 | } 107 | let my_key = unsafe { &*self.key.as_ptr() }; 108 | if my_key == k.borrow() { 109 | Some(unsafe { &mut *self.value.as_mut_ptr() }) 110 | } else { 111 | None 112 | } 113 | } 114 | 115 | fn insert(&mut self, k: K, v: V) -> Option<(K, V)> { 116 | let evicted = self.occupied.then(|| unsafe { 117 | let key = self.key.as_ptr().read(); 118 | let value = self.value.as_ptr().read(); 119 | (key, value) 120 | }); 121 | 122 | self.occupied = true; 123 | unsafe { 124 | self.key.as_mut_ptr().write(k); 125 | self.value.as_mut_ptr().write(v); 126 | } 127 | 128 | evicted 129 | } 130 | 131 | fn get_or_insert_with V>(&mut self, k: K, default: F) -> &V { 132 | if !self.occupied { 133 | self.value = MaybeUninit::new(default(&k)); 134 | self.key = MaybeUninit::new(k); 135 | self.occupied = true; 136 | return unsafe { &*self.value.as_ptr() }; 137 | } 138 | 139 | if unsafe { &*self.key.as_ptr() } == &k { 140 | return unsafe { &*self.value.as_ptr() }; 141 | } 142 | 143 | let key_ptr = self.key.as_mut_ptr(); 144 | let value_ptr = self.value.as_mut_ptr(); 145 | 146 | unsafe { 147 | value_ptr.drop_in_place(); 148 | value_ptr.write(default(&k)); 149 | 150 | key_ptr.drop_in_place(); 151 | key_ptr.write(k); 152 | 153 | &*value_ptr 154 | } 155 | } 156 | 157 | fn clear(&mut self) { 158 | if !self.occupied { 159 | return; 160 | } 161 | 162 | unsafe { 163 | self.key.as_mut_ptr().drop_in_place(); 164 | self.value.as_mut_ptr().drop_in_place(); 165 | } 166 | 167 | self.occupied = false; 168 | } 169 | } 170 | 171 | impl Drop for UnitCache { 172 | fn drop(&mut self) { 173 | if !self.occupied { 174 | return; 175 | } 176 | 177 | unsafe { 178 | self.key.as_mut_ptr().drop_in_place(); 179 | self.value.as_mut_ptr().drop_in_place(); 180 | } 181 | } 182 | } 183 | 184 | macro_rules! get_methods { 185 | () => { 186 | fn get>(&self, k: &Q) -> Option<&V> { 187 | for i in 0..self.len() { 188 | let my_key = unsafe { &*self.keys[i].as_ptr() }; 189 | if my_key == k.borrow() { 190 | self.mark_used(i); 191 | return Some(unsafe { &*self.values[i].as_ptr() }); 192 | } 193 | } 194 | 195 | None 196 | } 197 | 198 | fn get_mut>(&mut self, k: &Q) -> Option<&mut V> { 199 | for i in 0..self.len() { 200 | let my_key = unsafe { &*self.keys[i].as_ptr() }; 201 | if my_key == k.borrow() { 202 | self.mark_used(i); 203 | return Some(unsafe { &mut *self.values[i].as_mut_ptr() }); 204 | } 205 | } 206 | 207 | None 208 | } 209 | }; 210 | } 211 | 212 | /// A cache storing the two most recently accessed key-value pairs. 213 | /// 214 | /// Intended primarily for use as the [`CacheLine`] type `L` of a [`CacheTable`]. 215 | /// 216 | /// # Examples 217 | /// ``` 218 | /// use coca::collections::cache::{LruCache2, CacheLine}; 219 | /// let mut cache = LruCache2::::default(); 220 | /// 221 | /// assert!(cache.get(&1).is_none()); 222 | /// assert!(cache.insert(1, "A").is_none()); 223 | /// assert_eq!(cache.get(&1), Some(&"A")); 224 | /// 225 | /// assert!(cache.get(&2).is_none()); 226 | /// assert!(cache.insert(2, "B").is_none()); 227 | /// assert_eq!(cache.get(&1), Some(&"A")); // Entry 1 is now most recently used... 228 | /// 229 | /// assert_eq!(cache.insert(3, "C"), Some((2, "B"))); // ...so entry 2 will be evicted first. 230 | /// assert_eq!(cache.get(&1), Some(&"A")); 231 | /// assert_eq!(cache.get(&3), Some(&"C")); 232 | /// assert!(cache.get(&2).is_none()); 233 | /// ``` 234 | pub struct LruCache2 { 235 | keys: [MaybeUninit; 2], 236 | values: [MaybeUninit; 2], 237 | state: Cell, 238 | } 239 | 240 | impl Default for LruCache2 { 241 | fn default() -> Self { 242 | let mut result = MaybeUninit::uninit(); 243 | unsafe { 244 | Self::init(result.as_mut_ptr()); 245 | result.assume_init() 246 | } 247 | } 248 | } 249 | 250 | impl LruCache2 { 251 | #[inline(always)] 252 | fn len(&self) -> usize { 253 | (self.state.get() & 0b11) as usize 254 | } 255 | 256 | #[inline(always)] 257 | fn least_recently_used(&self) -> usize { 258 | 1 ^ (self.state.get() >> 2) as usize 259 | } 260 | 261 | #[inline(always)] 262 | fn mark_used(&self, i: usize) { 263 | let len = self.len(); 264 | debug_assert!(i < len); 265 | self.state.set(len as u8 | (i << 2) as u8); 266 | } 267 | } 268 | 269 | impl CacheLine for LruCache2 { 270 | const CAPACITY: usize = 2; 271 | 272 | unsafe fn init(this: *mut Self) { 273 | (*this).state = Cell::new(0); 274 | } 275 | 276 | get_methods!(); 277 | 278 | fn insert(&mut self, k: K, v: V) -> Option<(K, V)> { 279 | let len = self.len(); 280 | for i in 0..len { 281 | let my_key = unsafe { &*self.keys[i].as_ptr() }; 282 | if my_key == k.borrow() { 283 | self.mark_used(i); 284 | 285 | let evicted = 286 | unsafe { (self.keys[i].as_ptr().read(), self.values[i].as_ptr().read()) }; 287 | 288 | self.keys[i] = MaybeUninit::new(k); 289 | self.values[i] = MaybeUninit::new(v); 290 | 291 | return Some(evicted); 292 | } 293 | } 294 | 295 | if len < 2 { 296 | self.keys[len] = MaybeUninit::new(k); 297 | self.values[len] = MaybeUninit::new(v); 298 | self.state.set((len + 1) as u8 | (len << 2) as u8); 299 | None 300 | } else { 301 | let lru = self.least_recently_used(); 302 | 303 | let evicted = unsafe { 304 | let key = self.keys[lru].as_ptr().read(); 305 | let value = self.values[lru].as_ptr().read(); 306 | (key, value) 307 | }; 308 | 309 | self.keys[lru] = MaybeUninit::new(k); 310 | self.values[lru] = MaybeUninit::new(v); 311 | self.state.set((lru << 2) as u8 | 2); 312 | 313 | Some(evicted) 314 | } 315 | } 316 | 317 | fn get_or_insert_with V>(&mut self, k: K, default: F) -> &V { 318 | let len = self.len(); 319 | for i in 0..len { 320 | let my_key = unsafe { &*self.keys[i].as_ptr() }; 321 | if my_key == k.borrow() { 322 | self.mark_used(i); 323 | return unsafe { &*self.values[i].as_ptr() }; 324 | } 325 | } 326 | 327 | let value = default(&k); 328 | if len < 2 { 329 | self.keys[len] = MaybeUninit::new(k); 330 | self.values[len] = MaybeUninit::new(value); 331 | self.state.set((len + 1) as u8 | (len << 2) as u8); 332 | unsafe { &*self.values[len].as_ptr() } 333 | } else { 334 | let lru = self.least_recently_used(); 335 | self.mark_used(lru); 336 | 337 | unsafe { 338 | self.keys[lru].as_mut_ptr().drop_in_place(); 339 | self.values[lru].as_mut_ptr().drop_in_place(); 340 | } 341 | 342 | self.keys[lru] = MaybeUninit::new(k); 343 | self.values[lru] = MaybeUninit::new(value); 344 | 345 | unsafe { &*self.values[lru].as_ptr() } 346 | } 347 | } 348 | 349 | fn clear(&mut self) { 350 | for i in 0..self.len() { 351 | unsafe { 352 | self.keys[i].as_mut_ptr().drop_in_place(); 353 | self.values[i].as_mut_ptr().drop_in_place(); 354 | } 355 | } 356 | 357 | self.state.set(0); 358 | } 359 | } 360 | 361 | impl Drop for LruCache2 { 362 | fn drop(&mut self) { 363 | for i in 0..self.len() { 364 | unsafe { 365 | self.keys[i].as_mut_ptr().drop_in_place(); 366 | self.values[i].as_mut_ptr().drop_in_place(); 367 | } 368 | } 369 | } 370 | } 371 | 372 | /// A map implemented with an array of [`CacheLine`]s indexed by the keys' [`Hash`]. 373 | /// 374 | /// The choice of cache line type has several implications for runtime performance, 375 | /// memory overhead, and caching behavior: 376 | /// 377 | /// * Using [`UnitCache`] requires an additional [`bool`] per cache slot for tracking 378 | /// occupancy, and results in a direct-mapped cache. 379 | /// * Using [`LruCache2`] results in a 2-way set-associative cache with a least 380 | /// recently used eviction policy tracked per cache line with a single [`u8`]. 381 | /// 382 | /// Note that the cache's capacity is always an integer multiple of the cache line's 383 | /// capacity. 384 | /// 385 | /// For `no_std` compatibility, no default hash builder is provided, but when using 386 | /// [`Hasher`] types implementing [`Default`], the constructors [`new`](CacheTable::new), 387 | /// and [`with_capacity`](CacheTable::with_capacity) 388 | /// are provided. Otherwise, use [`with_hasher`](CacheTable::with_hasher), 389 | /// [`with_capacity_and_hasher`](CacheTable::with_capacity_and_hasher) or 390 | /// [`Arena::try_cache_with_hasher`](crate::arena::Arena::try_cache_with_hasher). 391 | pub struct CacheTable>, L: CacheLine, H> { 392 | buf: S, 393 | hash_builder: H, 394 | lines: PhantomData, 395 | keys: PhantomData, 396 | values: PhantomData, 397 | } 398 | 399 | impl>, L: CacheLine, H: Default> From 400 | for CacheTable 401 | { 402 | fn from(buf: S) -> Self { 403 | let mut result = CacheTable { 404 | buf, 405 | hash_builder: H::default(), 406 | lines: PhantomData, 407 | keys: PhantomData, 408 | values: PhantomData, 409 | }; 410 | result.init_cache_lines(); 411 | result 412 | } 413 | } 414 | 415 | impl>, L: CacheLine, H> CacheTable { 416 | fn init_cache_lines(&mut self) { 417 | let line_ptr = self.buf.get_mut_ptr().cast::(); 418 | for i in 0..self.buf.capacity() { 419 | unsafe { 420 | L::init(line_ptr.add(i)); 421 | } 422 | } 423 | } 424 | 425 | /// Returns the number of key-value pairs the cache can hold. 426 | #[inline(always)] 427 | pub fn capacity(&self) -> usize { 428 | self.buf.capacity() * L::CAPACITY 429 | } 430 | 431 | fn get_cache_line_for_hash(&self, hash: u64) -> &L { 432 | let line_index = hash as usize % self.buf.capacity(); 433 | let line_ptr = self.buf.get_ptr().cast::(); 434 | unsafe { &*line_ptr.add(line_index) } 435 | } 436 | 437 | fn get_cache_line_for_hash_mut(&mut self, hash: u64) -> &mut L { 438 | let line_index = hash as usize % self.buf.capacity(); 439 | let line_ptr = self.buf.get_mut_ptr().cast::(); 440 | unsafe { &mut *line_ptr.add(line_index) } 441 | } 442 | 443 | /// Clears the cache, removing all key-value pairs. 444 | /// 445 | /// # Examples 446 | /// ``` 447 | /// # extern crate rustc_hash; 448 | /// use rustc_hash::FxHasher; 449 | /// use coca::collections::InlineDirectMappedCache; 450 | /// use core::hash::BuildHasherDefault; 451 | /// 452 | /// let mut cache = InlineDirectMappedCache::, 4>::new(); 453 | /// cache.insert(1, "A"); 454 | /// cache.clear(); 455 | /// assert!(cache.get(&1).is_none()); 456 | /// ``` 457 | pub fn clear(&mut self) { 458 | let num_lines = self.buf.capacity(); 459 | let line_ptr = self.buf.get_mut_ptr().cast::(); 460 | for i in 0..num_lines { 461 | let line = unsafe { &mut *line_ptr.add(i) }; 462 | line.clear(); 463 | } 464 | } 465 | } 466 | 467 | impl>, L: CacheLine, H: BuildHasher> 468 | CacheTable 469 | { 470 | /// Constructs a new cache table using the specified storage and hash builder. 471 | pub fn from_storage_and_hasher(buf: S, hash_builder: H) -> Self { 472 | let mut result = CacheTable { 473 | buf, 474 | hash_builder, 475 | lines: PhantomData, 476 | keys: PhantomData, 477 | values: PhantomData, 478 | }; 479 | result.init_cache_lines(); 480 | result 481 | } 482 | 483 | fn make_hash(&self, val: &K) -> u64 { 484 | let mut state = self.hash_builder.build_hasher(); 485 | val.hash(&mut state); 486 | state.finish() 487 | } 488 | 489 | /// Returns a reference to the value corresponding to the key. 490 | /// 491 | /// The key may be any borrowed form of the map's key type, but [`Hash`] 492 | /// and [`Eq`] on the borrowed form *must* match those for the key type. 493 | /// 494 | /// # Examples 495 | /// ``` 496 | /// # extern crate rustc_hash; 497 | /// use rustc_hash::FxHasher; 498 | /// use coca::collections::InlineDirectMappedCache; 499 | /// use core::hash::BuildHasherDefault; 500 | /// 501 | /// let mut cache = InlineDirectMappedCache::, 4>::new(); 502 | /// cache.insert(1, "A"); 503 | /// assert_eq!(cache.get(&1), Some(&"A")); 504 | /// assert_eq!(cache.get(&2), None); 505 | /// ``` 506 | pub fn get>(&self, k: &Q) -> Option<&V> { 507 | let key = k.borrow(); 508 | let hash = self.make_hash(key); 509 | let cache_line = self.get_cache_line_for_hash(hash); 510 | cache_line.get(key) 511 | } 512 | 513 | /// Returns a mutable reference to the value corresponding to the key. 514 | /// 515 | /// # Examples 516 | /// ``` 517 | /// # extern crate rustc_hash; 518 | /// use rustc_hash::FxHasher; 519 | /// use coca::collections::InlineDirectMappedCache; 520 | /// use core::hash::BuildHasherDefault; 521 | /// 522 | /// let mut cache = InlineDirectMappedCache::, 4>::new(); 523 | /// cache.insert(1, "A"); 524 | /// if let Some(x) = cache.get_mut(&1) { 525 | /// *x = "B"; 526 | /// } 527 | /// assert_eq!(cache.get(&1), Some(&"B")); 528 | pub fn get_mut>(&mut self, k: &Q) -> Option<&mut V> { 529 | let key = k.borrow(); 530 | let hash = self.make_hash(key); 531 | let cache_line = self.get_cache_line_for_hash_mut(hash); 532 | cache_line.get_mut(key) 533 | } 534 | 535 | /// Inserts a value computed from `f` into the cache if the given key is 536 | /// not present, then returns a reference to the value in the cache. 537 | /// 538 | /// # Examples 539 | /// ``` 540 | /// # extern crate rustc_hash; 541 | /// use rustc_hash::FxHasher; 542 | /// use coca::collections::InlineDirectMappedCache; 543 | /// use core::hash::BuildHasherDefault; 544 | /// 545 | /// let mut cache = InlineDirectMappedCache::, 4>::new(); 546 | /// cache.insert(1, "A"); 547 | /// assert_eq!(cache.get_or_insert_with(1, |_| "B"), &"A"); 548 | /// # assert_eq!(cache.get(&1), Some(&"A")); 549 | /// assert_eq!(cache.get_or_insert_with(2, |_| "B"), &"B"); 550 | /// # assert_eq!(cache.get(&2), Some(&"B")); 551 | /// ``` 552 | pub fn get_or_insert_with V>(&mut self, k: K, f: F) -> &V { 553 | let hash = self.make_hash(&k); 554 | let cache_line = self.get_cache_line_for_hash_mut(hash); 555 | cache_line.get_or_insert_with(k, f) 556 | } 557 | 558 | /// Inserts a key-value pair into the cache. 559 | /// 560 | /// Returns the evicted key-value pair if the cache line corresponding to 561 | /// the key is already full, or [`None`] otherwise. 562 | /// 563 | /// # Examples 564 | /// ``` 565 | /// # extern crate rustc_hash; 566 | /// use rustc_hash::FxHasher; 567 | /// use coca::collections::InlineDirectMappedCache; 568 | /// use core::hash::BuildHasherDefault; 569 | /// 570 | /// let mut cache = InlineDirectMappedCache::, 4>::new(); 571 | /// assert_eq!(cache.insert(37, "a"), None); 572 | /// assert_eq!(cache.insert(37, "b"), Some((37, "a"))); 573 | /// ``` 574 | pub fn insert(&mut self, k: K, v: V) -> Option<(K, V)> { 575 | let hash = self.make_hash(&k); 576 | let cache_line = self.get_cache_line_for_hash_mut(hash); 577 | cache_line.insert(k, v) 578 | } 579 | } 580 | 581 | impl>, L: CacheLine, H> Drop 582 | for CacheTable 583 | { 584 | fn drop(&mut self) { 585 | self.clear(); 586 | } 587 | } 588 | 589 | impl, H: BuildHasher, const N: usize> 590 | CacheTable, L, H> 591 | { 592 | /// Constructs a new, empty `CacheTable` using inline storage and the specified [`BuildHasher`]. 593 | /// 594 | /// # Examples 595 | /// ``` 596 | /// # extern crate rustc_hash; 597 | /// type HashBuilder = core::hash::BuildHasherDefault; 598 | /// type CacheTable = coca::collections::Inline2WayLruCache; 599 | /// let mut cache = CacheTable::with_hasher(HashBuilder::default()); 600 | /// assert_eq!(cache.capacity(), 64); 601 | /// ``` 602 | pub fn with_hasher(hash_builder: H) -> Self { 603 | let mut result = CacheTable { 604 | buf: unsafe { MaybeUninit::uninit().assume_init() }, 605 | hash_builder, 606 | lines: PhantomData, 607 | keys: PhantomData, 608 | values: PhantomData, 609 | }; 610 | result.init_cache_lines(); 611 | result 612 | } 613 | } 614 | 615 | impl, H: Hasher + Default, const N: usize> 616 | CacheTable, L, BuildHasherDefault> 617 | { 618 | /// Constructs a new, empty `CacheTable` using inline storage and the default [`BuildHasherDefault`]. 619 | /// 620 | /// # Examples 621 | /// ``` 622 | /// # extern crate rustc_hash; 623 | /// type HashBuilder = core::hash::BuildHasherDefault; 624 | /// type CacheTable = coca::collections::Inline2WayLruCache; 625 | /// let mut cache = CacheTable::new(); 626 | /// assert_eq!(cache.capacity(), 64); 627 | /// ``` 628 | pub fn new() -> Self { 629 | let mut result = CacheTable { 630 | buf: unsafe { MaybeUninit::uninit().assume_init() }, 631 | hash_builder: BuildHasherDefault::default(), 632 | lines: PhantomData, 633 | keys: PhantomData, 634 | values: PhantomData, 635 | }; 636 | result.init_cache_lines(); 637 | result 638 | } 639 | } 640 | 641 | impl, H: Hasher + Default, const N: usize> Default 642 | for CacheTable, L, BuildHasherDefault> 643 | { 644 | fn default() -> Self { 645 | Self::new() 646 | } 647 | } 648 | 649 | #[cfg(feature = "alloc")] 650 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 651 | impl, H: BuildHasher> 652 | CacheTable>, L, H> 653 | { 654 | /// Constructs a new, empty `CacheTable` with the specified [`BuildHasher`] 655 | /// and heap-allocated storage of the specified capacity, rounded up to the 656 | /// next largest multiple of `L::CAPACITY`. 657 | /// 658 | /// # Examples 659 | /// ``` 660 | /// # extern crate rustc_hash; 661 | /// type HashBuilder = core::hash::BuildHasherDefault; 662 | /// type CacheTable = coca::collections::Alloc2WayLruCache; 663 | /// let mut cache = CacheTable::with_capacity_and_hasher(63, HashBuilder::default()); 664 | /// assert_eq!(cache.capacity(), 64); 665 | /// ``` 666 | pub fn with_capacity_and_hasher(capacity: usize, hash_builder: H) -> Self { 667 | let capacity = (capacity + L::CAPACITY - 1) / L::CAPACITY; 668 | let buf = crate::storage::AllocStorage::with_capacity(capacity); 669 | Self::from_storage_and_hasher(buf, hash_builder) 670 | } 671 | } 672 | 673 | #[cfg(feature = "alloc")] 674 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 675 | impl, H: Hasher + Default> 676 | CacheTable>, L, BuildHasherDefault> 677 | { 678 | /// Constructs a new, empty `CacheTable` with the default [`BuildHasherDefault`] 679 | /// and heap-allocated storage the specified capacity, rounded up to the next 680 | /// largest multiple of `L::CAPACITY`. 681 | /// 682 | /// # Examples 683 | /// ``` 684 | /// # extern crate rustc_hash; 685 | /// type HashBuilder = core::hash::BuildHasherDefault; 686 | /// type CacheTable = coca::collections::Alloc2WayLruCache; 687 | /// let mut cache = CacheTable::with_capacity(63); 688 | /// assert_eq!(cache.capacity(), 64); 689 | /// ``` 690 | pub fn with_capacity(capacity: usize) -> Self { 691 | let capacity = (capacity + L::CAPACITY - 1) / L::CAPACITY; 692 | let buf = crate::storage::AllocStorage::with_capacity(capacity); 693 | let hash_builder = BuildHasherDefault::default(); 694 | Self::from_storage_and_hasher(buf, hash_builder) 695 | } 696 | } 697 | -------------------------------------------------------------------------------- /src/collections/list_set.rs: -------------------------------------------------------------------------------- 1 | //! A set implemented with a vector. 2 | 3 | use core::borrow::Borrow; 4 | use core::fmt::Debug; 5 | use core::iter::FusedIterator; 6 | use core::slice::Iter; 7 | 8 | use crate::collections::vec::{Drain, Vec}; 9 | use crate::storage::{ArrayLayout, Capacity, InlineStorage, Storage}; 10 | 11 | /// A set implemented with a vector, using a linear scan to find a given value. 12 | /// 13 | /// This is simple and cache-friendly, though algorithmically inefficient: 14 | /// converting an *n*-element [`Vec`] into a set requires *O*(*n*²) comparisons 15 | /// and looking up a value requires *O*(*n*) comparisons. Set operators such as 16 | /// [`difference`], [`intersection`], and [`union`] require *O*(*n* · *m*) 17 | /// comparisons. 18 | /// 19 | /// [`difference`]: ListSet::difference 20 | /// [`intersection`]: ListSet::intersection 21 | /// [`union`]: ListSet::union 22 | /// 23 | /// Newly inserted elements are appended to the internal vector, and a removed 24 | /// element is replaced by the last one in the list, meaning modifications have 25 | /// constant overhead after the initial lookup. This also means insertion order 26 | /// is **not** preserved. 27 | /// 28 | /// As with the [`ListMap`](crate::collections::list_map::ListMap) type, a 29 | /// `ListSet` requires that the element type implements the [`Eq`] trait. 30 | /// This can frequently be achieved by using `#[derive(PartialEq, Eq)]`. 31 | /// 32 | /// It is a logic error for an item to be modified in such a way that its 33 | /// equality, as determined by the `Eq` trait, changes while it is in the set. 34 | /// This is normally only possible through [`Cell`](core::cell::Cell), 35 | /// [`RefCell`](core::cell::RefCell), global state, I/O, or unsafe code. The 36 | /// behavior resulting from such a logic error is not specified, but will not 37 | /// result in undefined behavior. This could include panics, incorrect results, 38 | /// aborts, memory leaks, and non-termination. 39 | pub struct ListSet>, I: Capacity> { 40 | vec: Vec, 41 | } 42 | 43 | impl>, I: Capacity> From for ListSet { 44 | fn from(buf: S) -> Self { 45 | ListSet { 46 | vec: Vec::from(buf), 47 | } 48 | } 49 | } 50 | 51 | impl>, I: Capacity> From> for ListSet { 52 | fn from(mut vec: Vec) -> Self { 53 | let mut i = 1; 54 | 'outer: while i < vec.len() { 55 | for j in 0..i { 56 | let i = I::from_usize(i); 57 | let j = I::from_usize(j); 58 | 59 | if vec[i] == vec[j] { 60 | vec.swap_remove(i); 61 | continue 'outer; 62 | } 63 | } 64 | 65 | i += 1; 66 | } 67 | 68 | ListSet { vec } 69 | } 70 | } 71 | 72 | impl>, I: Capacity> ListSet { 73 | /// Constructs a `ListSet` from a `Vec` without checking for duplicate elements. 74 | /// 75 | /// It is a logic error to pass a vector containing elements that compare 76 | /// equal to one another; the behavior of the resulting set is unspecified, 77 | /// though it is guaranteed to be memory-safe. 78 | /// 79 | /// If you cannot guarantee that this precondition holds, use the `ListSet::from` 80 | /// method provided by the [`From`] implementation instead, which detects 81 | /// and removes duplicate entries. 82 | /// 83 | /// # Examples 84 | /// ``` 85 | /// use coca::collections::{InlineListSet, InlineVec}; 86 | /// 87 | /// let mut vec = InlineVec::::new(); 88 | /// vec.push(1); 89 | /// vec.push(2); 90 | /// vec.push(3); 91 | /// 92 | /// let set = InlineListSet::from_vec_unchecked(vec); 93 | /// assert!(set.contains(&1)); 94 | /// assert!(set.contains(&2)); 95 | /// assert!(set.contains(&3)); 96 | /// assert_eq!(set.len(), 3); 97 | /// ``` 98 | #[inline] 99 | pub fn from_vec_unchecked(vec: Vec) -> Self { 100 | ListSet { vec } 101 | } 102 | 103 | /// Returns the number of elements the set can hold. 104 | #[inline] 105 | pub fn capacity(&self) -> usize { 106 | self.vec.capacity() 107 | } 108 | 109 | /// Returns the number of elements in the set. 110 | #[inline] 111 | pub fn len(&self) -> usize { 112 | self.vec.len() 113 | } 114 | 115 | /// Returns `true` if the set contains no elements, or `false` otherwise. 116 | #[inline] 117 | pub fn is_empty(&self) -> bool { 118 | self.vec.is_empty() 119 | } 120 | 121 | /// Returns `true` if the set contains the maximum number of elements it can hold, or `false` otherwise. 122 | #[inline] 123 | pub fn is_full(&self) -> bool { 124 | self.vec.is_full() 125 | } 126 | 127 | /// Clears the set, removing all values. 128 | /// 129 | /// # Examples 130 | /// ``` 131 | /// let mut set = coca::collections::InlineListSet::::new(); 132 | /// set.insert(1); 133 | /// set.clear(); 134 | /// assert!(set.is_empty()); 135 | /// ``` 136 | #[inline] 137 | pub fn clear(&mut self) { 138 | self.vec.clear(); 139 | } 140 | 141 | /// Converts a `ListSet` into the underlying `Vec`. 142 | #[inline] 143 | pub fn into_vec(self) -> Vec { 144 | self.vec 145 | } 146 | 147 | /// Returns a slice of all elements contained in the set in arbitrary order. 148 | #[inline] 149 | pub fn as_slice(&self) -> &[T] { 150 | self.vec.as_slice() 151 | } 152 | 153 | /// An iterator visiting all elements of the set in arbitrary order. The iterator element type is `&'a T`. 154 | #[inline] 155 | pub fn iter(&self) -> Iter<'_, T> { 156 | self.vec.iter() 157 | } 158 | 159 | /// Returns an iterator visiting the values representing the set difference, 160 | /// i.e. the values that are in `self` but not in `other`. 161 | /// 162 | /// # Examples 163 | /// ``` 164 | /// use coca::collections::InlineListSet; 165 | /// let mut a = InlineListSet::::new(); 166 | /// (1..4).for_each(|x| { a.insert(x); }); 167 | /// 168 | /// let mut b = InlineListSet::::new(); 169 | /// (2..5).for_each(|x| { b.insert(x); }); 170 | /// 171 | /// let mut d = InlineListSet::::new(); 172 | /// a.difference(&b).for_each(|x| { d.insert(*x); }); 173 | /// assert_eq!(d.as_slice(), &[1]); 174 | /// 175 | /// d.clear(); 176 | /// b.difference(&a).for_each(|x| { d.insert(*x); }); 177 | /// assert_eq!(d.as_slice(), &[4]); 178 | /// ``` 179 | #[inline] 180 | pub fn difference<'a, S2: Storage>, I2: Capacity>( 181 | &'a self, 182 | other: &'a ListSet, 183 | ) -> Difference<'a, T> { 184 | Difference { 185 | this: self.as_slice(), 186 | other: other.as_slice(), 187 | front: 0, 188 | } 189 | } 190 | 191 | /// Returns an iterator visiting the values representing the symmetric difference, 192 | /// i.e. the values that are in `self` or in `other`, but not both. 193 | /// 194 | /// # Examples 195 | /// ``` 196 | /// use coca::collections::{InlineListSet, InlineVec}; 197 | /// let mut a = InlineListSet::::new(); 198 | /// (1..4).for_each(|x| { a.insert(x); }); 199 | /// 200 | /// let mut b = InlineListSet::::new(); 201 | /// (2..5).for_each(|x| { b.insert(x); }); 202 | /// 203 | /// let mut d = InlineVec::::new(); 204 | /// d.extend(a.symmetric_difference(&b).cloned()); 205 | /// d.sort(); 206 | /// 207 | /// assert_eq!(&d, &[1, 4]); 208 | /// ``` 209 | #[inline] 210 | pub fn symmetric_difference<'a, S2: Storage>, I2: Capacity>( 211 | &'a self, 212 | other: &'a ListSet, 213 | ) -> SymmetricDifference<'a, T> { 214 | SymmetricDifference { 215 | this: self.as_slice(), 216 | other: other.as_slice(), 217 | front: 0, 218 | } 219 | } 220 | 221 | /// Returns an iterator visiting the values representing the intersection, 222 | /// i.e. the values that are both in `self` and `other`. 223 | /// 224 | /// # Examples 225 | /// ``` 226 | /// use coca::collections::{InlineListSet, InlineVec}; 227 | /// let mut a = InlineListSet::::new(); 228 | /// (1..4).for_each(|x| { a.insert(x); }); 229 | /// 230 | /// let mut b = InlineListSet::::new(); 231 | /// (2..5).for_each(|x| { b.insert(x); }); 232 | /// 233 | /// let mut i = InlineVec::::new(); 234 | /// i.extend(a.intersection(&b).cloned()); 235 | /// i.sort(); 236 | /// 237 | /// assert_eq!(&i, &[2, 3]); 238 | /// ``` 239 | #[inline] 240 | pub fn intersection<'a, S2: Storage>, I2: Capacity>( 241 | &'a self, 242 | other: &'a ListSet, 243 | ) -> Intersection<'a, T> { 244 | Intersection { 245 | this: self.as_slice(), 246 | other: other.as_slice(), 247 | front: 0, 248 | } 249 | } 250 | 251 | /// Returns an iterator visiting the values representing the union, 252 | /// i.e. all values in `self` or `other`, without duplicates. 253 | /// 254 | /// # Examples 255 | /// ``` 256 | /// use coca::collections::InlineListSet; 257 | /// let mut a = InlineListSet::::new(); 258 | /// (1..4).for_each(|x| { a.insert(x); }); 259 | /// 260 | /// let mut b = InlineListSet::::new(); 261 | /// (2..5).for_each(|x| { b.insert(x); }); 262 | /// 263 | /// let mut u = InlineListSet::::new(); 264 | /// a.union(&b).for_each(|x| { u.insert_unique_unchecked(*x); }); 265 | /// (1..5).for_each(|x| assert!(u.contains(&x))); 266 | /// ``` 267 | #[inline] 268 | pub fn union<'a, S2: Storage>, I2: Capacity>( 269 | &'a self, 270 | other: &'a ListSet, 271 | ) -> Union<'a, T> { 272 | Union { 273 | this: self.as_slice(), 274 | other: other.as_slice(), 275 | front: 0, 276 | } 277 | } 278 | 279 | /// Clears the set, returning all elements in an iterator. 280 | /// 281 | /// # Examples 282 | /// ``` 283 | /// let mut set = coca::collections::InlineListSet::::new(); 284 | /// set.insert(1); set.insert(2); set.insert(3); 285 | /// 286 | /// for i in set.drain() { 287 | /// println!("{}", i); 288 | /// } 289 | /// 290 | /// assert!(set.is_empty()); 291 | /// ``` 292 | #[inline] 293 | pub fn drain(&mut self) -> Drain<'_, T, S, I> { 294 | self.vec.drain(..) 295 | } 296 | 297 | /// Returns `true` if the set contains a value equal to the given value, or `false` otherwise. 298 | /// 299 | /// The value may be any borrowed form of the set's element type, but `Eq` 300 | /// on the borrowed form *must* match that of the element type. 301 | /// 302 | /// # Examples 303 | /// ``` 304 | /// let mut set = coca::collections::InlineListSet::::new(); 305 | /// set.insert(1); 306 | /// 307 | /// assert_eq!(set.contains(&1), true); 308 | /// assert_eq!(set.contains(&2), false); 309 | /// ``` 310 | #[inline] 311 | pub fn contains(&self, value: &Q) -> bool 312 | where 313 | T: Borrow, 314 | { 315 | self.vec.iter().any(|item| item.borrow() == value) 316 | } 317 | 318 | /// Returns a reference to the value in the set, if any, that is equal to the given value. 319 | /// 320 | /// The value may be any borrowed form of the set's element type, but `Eq` 321 | /// on the borrowed form *must* match that of the element type. 322 | /// 323 | /// # Examples 324 | /// ``` 325 | /// let mut set = coca::collections::InlineListSet::::new(); 326 | /// set.insert(1); 327 | /// 328 | /// assert_eq!(set.get(&1), Some(&1)); 329 | /// assert_eq!(set.get(&2), None); 330 | /// ``` 331 | #[inline] 332 | pub fn get(&self, value: &Q) -> Option<&T> 333 | where 334 | T: Borrow, 335 | { 336 | self.vec.iter().find(|item| (*item).borrow() == value) 337 | } 338 | 339 | /// Returns `true` if the set has no elements in common with `other`. 340 | /// This is equivalent to checking for an empty intersection. 341 | /// 342 | /// # Examples 343 | /// ``` 344 | /// use coca::collections::InlineListSet; 345 | /// 346 | /// let mut a = InlineListSet::::new(); 347 | /// a.insert(1); a.insert(2); a.insert(3); 348 | /// 349 | /// let mut b = InlineListSet::::new(); 350 | /// assert_eq!(a.is_disjoint(&b), true); 351 | /// 352 | /// b.insert(4); 353 | /// assert_eq!(a.is_disjoint(&b), true); 354 | /// 355 | /// b.insert(1); 356 | /// assert_eq!(a.is_disjoint(&b), false); 357 | /// ``` 358 | pub fn is_disjoint>, I2: Capacity>( 359 | &self, 360 | other: &ListSet, 361 | ) -> bool { 362 | for item in other.iter() { 363 | if self.contains(item) { 364 | return false; 365 | } 366 | } 367 | 368 | true 369 | } 370 | 371 | /// Returns `true` if the set is a subset of another, 372 | /// i.e. `other` contains at least all the values in `self`. 373 | /// 374 | /// # Examples 375 | /// ``` 376 | /// use coca::collections::InlineListSet; 377 | /// 378 | /// let mut sup = InlineListSet::::new(); 379 | /// sup.insert(1); sup.insert(2); sup.insert(3); 380 | /// 381 | /// let mut set = InlineListSet::::new(); 382 | /// assert_eq!(set.is_subset_of(&sup), true); 383 | /// 384 | /// set.insert(2); 385 | /// assert_eq!(set.is_subset_of(&sup), true); 386 | /// 387 | /// set.insert(5); 388 | /// assert_eq!(set.is_subset_of(&sup), false) 389 | /// ``` 390 | pub fn is_subset_of>, I2: Capacity>( 391 | &self, 392 | other: &ListSet, 393 | ) -> bool { 394 | for item in self.iter() { 395 | if !other.contains(item) { 396 | return false; 397 | } 398 | } 399 | 400 | true 401 | } 402 | 403 | /// Returns `true` if the set is a superset of another, 404 | /// i.e. `self` contains at least all the values in `other`. 405 | /// 406 | /// # Examples 407 | /// ``` 408 | /// use coca::collections::InlineListSet; 409 | /// 410 | /// let mut sub = InlineListSet::::new(); 411 | /// sub.insert(1); sub.insert(2); 412 | /// 413 | /// let mut set = InlineListSet::::new(); 414 | /// assert_eq!(set.is_superset_of(&sub), false); 415 | /// 416 | /// set.insert(0); 417 | /// set.insert(1); 418 | /// assert_eq!(set.is_superset_of(&sub), false); 419 | /// 420 | /// set.insert(2); 421 | /// assert_eq!(set.is_superset_of(&sub), true); 422 | /// ``` 423 | #[inline] 424 | pub fn is_superset_of>, I2: Capacity>( 425 | &self, 426 | other: &ListSet, 427 | ) -> bool { 428 | other.is_subset_of(self) 429 | } 430 | 431 | /// Adds a value to the set. 432 | /// 433 | /// Returns `false` if the set already contained a value equal to 434 | /// the given value, or `true` otherwise. If such a value was already 435 | /// present, it is not updated; this matters for types that can be `==` 436 | /// without being identical. 437 | /// 438 | /// # Panics 439 | /// Panics if the set did not contain the value, but no space remains to 440 | /// insert it. See [`try_insert`](ListSet::try_insert) for a checked version 441 | /// that never panics. 442 | #[inline] 443 | pub fn insert(&mut self, value: T) -> bool { 444 | self.try_insert(value).ok().expect("insufficient capacity") 445 | } 446 | 447 | /// Adds a value to the set. 448 | /// 449 | /// Returns `Ok(false)` if the set already contained a value equal to 450 | /// the given value. Otherwise, returns `Ok(true)` if the given value 451 | /// was successfully inserted, or `Err(value)` if the remaining space 452 | /// is insufficient. 453 | /// 454 | /// # Examples 455 | /// ``` 456 | /// let mut set = coca::collections::InlineListSet::::new(); 457 | /// assert_eq!(set.capacity(), 4); 458 | /// 459 | /// assert_eq!(set.try_insert(1), Ok(true)); 460 | /// assert_eq!(set.try_insert(2), Ok(true)); 461 | /// assert_eq!(set.try_insert(2), Ok(false)); 462 | /// assert_eq!(set.len(), 2); 463 | /// 464 | /// assert_eq!(set.try_insert(3), Ok(true)); 465 | /// assert_eq!(set.try_insert(4), Ok(true)); 466 | /// assert_eq!(set.try_insert(5), Err(5)); 467 | /// assert_eq!(set.len(), 4); 468 | /// ``` 469 | pub fn try_insert(&mut self, value: T) -> Result { 470 | if self.contains(&value) { 471 | return Ok(false); 472 | } 473 | self.vec.try_push(value).map(|_| true) 474 | } 475 | 476 | /// Inserts a value into the set without checking if it was already part of 477 | /// the set, and returns a reference to it. 478 | /// 479 | /// It is a logic error to insert a duplicate element, and the behavior 480 | /// of the resulting set is unspecified, though it is guaranteed to be 481 | /// memory-safe. 482 | /// 483 | /// This operation is faster than regular [`insert`](ListSet::insert), 484 | /// because it does not perform a lookup before insertion. This is useful 485 | /// during initial population of the set, e.g. when constructing a set from 486 | /// another set, which guarantees unique values. 487 | /// 488 | /// # Panics 489 | /// Panics if the set is already at capacity. See 490 | /// [`try_insert_unique_unchecked`](ListSet::try_insert_unique_unchecked) 491 | /// for a checked version that never panics. 492 | #[inline] 493 | pub fn insert_unique_unchecked(&mut self, value: T) -> &T { 494 | self.try_insert_unique_unchecked(value) 495 | .ok() 496 | .expect("insufficient capacity") 497 | } 498 | 499 | /// Inserts a value into the set without checking if it was already part of 500 | /// the set. 501 | /// 502 | /// Returns a reference to the inserted value, or `Err(value)` if the set 503 | /// is already full. 504 | /// 505 | /// It is a logic error to insert a duplicate element, and the behavior 506 | /// of the resulting set is unspecified, though it is guaranteed to be 507 | /// memory-safe. 508 | /// 509 | /// This operation is faster than regular [`try_insert`](ListSet::try_insert), 510 | /// because it does not perform a lookup before insertion. This is useful 511 | /// during initial population of the set, e.g. when constructing a set from 512 | /// another set, which guarantees unique values. 513 | pub fn try_insert_unique_unchecked(&mut self, value: T) -> Result<&T, T> { 514 | if self.vec.is_full() { 515 | Err(value) 516 | } else { 517 | self.vec.push(value); 518 | self.vec.last().ok_or_else(|| unreachable!()) 519 | } 520 | } 521 | 522 | /// Adds a value to the set, replacing the existing value, if any, that is 523 | /// equal to the given one. This matters for types that can be `==` without 524 | /// being identical. 525 | /// 526 | /// Returns the replaced value. 527 | /// 528 | /// # Panics 529 | /// Panics if the set is full and does not contain any values equal to 530 | /// the given one. See [`try_replace`](ListSet::try_replace) for a checked 531 | /// version that never panics. 532 | pub fn replace(&mut self, value: T) -> Option { 533 | self.try_replace(value).ok().expect("insufficient capacity") 534 | } 535 | 536 | /// Adds a value to the set, replacing the existing value, if any, that is 537 | /// equal to the given one. This matters for types that can be `==` without 538 | /// being identical. 539 | /// 540 | /// Returns the replaced value, or `Ok(None)` if the value was successfully 541 | /// inserted without replacing any other element, or `Err(value)` if the 542 | /// set was already full. 543 | /// 544 | /// # Examples 545 | /// ``` 546 | /// struct Foo(u32, u32); 547 | /// impl Eq for Foo {} 548 | /// impl PartialEq for Foo { 549 | /// fn eq(&self, other: &Foo) -> bool { 550 | /// self.0 == other.0 551 | /// } 552 | /// } 553 | /// 554 | /// let mut set = coca::collections::InlineListSet::::new(); 555 | /// set.insert(Foo(1, 0)); 556 | /// 557 | /// if let Ok(Some(foo)) = set.try_replace(Foo(1, 10)) { 558 | /// assert_eq!(foo.0, 1); 559 | /// assert_eq!(foo.1, 0); 560 | /// } 561 | /// # else { 562 | /// # unreachable!(); 563 | /// # } 564 | /// 565 | /// set.insert(Foo(2, 0)); 566 | /// set.insert(Foo(3, 0)); 567 | /// 568 | /// assert!(set.try_replace(Foo(4, 10)).is_err()); 569 | /// ``` 570 | pub fn try_replace(&mut self, value: T) -> Result, T> { 571 | if let Some((idx, _)) = self 572 | .vec 573 | .iter() 574 | .enumerate() 575 | .find(|(_, item)| *item == &value) 576 | { 577 | Ok(Some(self.vec.replace(I::from_usize(idx), value))) 578 | } else if self.is_full() { 579 | Err(value) 580 | } else { 581 | self.vec.push(value); 582 | Ok(None) 583 | } 584 | } 585 | 586 | /// Removes a value from the set, and returns whether it was previously present. 587 | /// 588 | /// The given value may be any borrowed form of the set's element type, 589 | /// but `Eq` on the borrowed form *must* match that of the element type. 590 | /// 591 | /// # Examples 592 | /// ``` 593 | /// let mut set = coca::collections::InlineListSet::::new(); 594 | /// set.insert(2); 595 | /// 596 | /// assert_eq!(set.remove(&2), true); 597 | /// assert_eq!(set.remove(&2), false); 598 | /// ``` 599 | pub fn remove(&mut self, value: &Q) -> bool 600 | where 601 | T: Borrow, 602 | { 603 | if let Some((idx, _)) = self 604 | .vec 605 | .iter() 606 | .enumerate() 607 | .find(|(_, item)| (*item).borrow() == value) 608 | { 609 | self.vec.swap_remove(I::from_usize(idx)); 610 | true 611 | } else { 612 | false 613 | } 614 | } 615 | 616 | /// Removes and returns the value from the set, if any, that is equal to the given one. 617 | /// 618 | /// The given value may be any borrowed form of the set's element type, 619 | /// but `Eq` on the borrowed form *must* match that of the element type. 620 | /// 621 | /// # Examples 622 | /// ``` 623 | /// let mut set = coca::collections::InlineListSet::::new(); 624 | /// set.insert(2); 625 | /// 626 | /// assert_eq!(set.take(&2), Some(2)); 627 | /// assert_eq!(set.take(&2), None); 628 | /// ``` 629 | pub fn take(&mut self, value: &Q) -> Option 630 | where 631 | T: Borrow, 632 | { 633 | if let Some((idx, _)) = self 634 | .vec 635 | .iter() 636 | .enumerate() 637 | .find(|(_, item)| (*item).borrow() == value) 638 | { 639 | Some(self.vec.swap_remove(I::from_usize(idx))) 640 | } else { 641 | None 642 | } 643 | } 644 | 645 | /// Retains only the elements specified by the predicate. 646 | /// 647 | /// In other words, removes all elements `e` such that `f(&e)` returns `false`. 648 | /// The elements are visited in unsorted (and unspecified) order. 649 | /// 650 | /// # Examples 651 | /// ``` 652 | /// let mut set = coca::collections::InlineListSet::::new(); 653 | /// (0..8).for_each(|x| { set.insert(x); }); 654 | /// set.retain(|&x| x % 2 == 0); 655 | /// assert_eq!(set.len(), 4); 656 | /// ``` 657 | #[inline] 658 | pub fn retain bool>(&mut self, pred: F) { 659 | self.vec.retain(pred); 660 | } 661 | } 662 | 663 | impl core::ops::BitAndAssign<&'_ ListSet> for ListSet 664 | where 665 | S1: Storage>, 666 | S2: Storage>, 667 | I1: Capacity, 668 | I2: Capacity, 669 | { 670 | fn bitand_assign(&mut self, rhs: &ListSet) { 671 | let mut i = 0; 672 | while i < self.len() { 673 | if rhs.contains(&self.vec.as_slice()[i]) { 674 | i += 1; 675 | } else { 676 | self.vec.swap_remove(I1::from_usize(i)); 677 | } 678 | } 679 | } 680 | } 681 | 682 | impl core::ops::BitOrAssign<&'_ ListSet> for ListSet 683 | where 684 | T: Clone + Eq, 685 | S1: Storage>, 686 | S2: Storage>, 687 | I1: Capacity, 688 | I2: Capacity, 689 | { 690 | fn bitor_assign(&mut self, rhs: &ListSet) { 691 | for x in rhs.iter() { 692 | if !self.contains(x) { 693 | self.insert_unique_unchecked(x.clone()); 694 | } 695 | } 696 | } 697 | } 698 | 699 | impl>, I: Capacity> Debug for ListSet { 700 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 701 | f.debug_set().entries(self.vec.as_slice()).finish() 702 | } 703 | } 704 | 705 | impl>, I: Capacity> Extend for ListSet { 706 | fn extend>(&mut self, iter: It) { 707 | iter.into_iter().for_each(|x| { 708 | self.insert(x); 709 | }); 710 | } 711 | } 712 | 713 | impl<'a, T: Clone + Eq, S: Storage>, I: Capacity> Extend<&'a T> 714 | for ListSet 715 | { 716 | fn extend>(&mut self, iter: It) { 717 | iter.into_iter().for_each(|x| { 718 | self.insert(x.clone()); 719 | }); 720 | } 721 | } 722 | 723 | impl>, I: Capacity> IntoIterator for ListSet { 724 | type IntoIter = crate::collections::vec::IntoIterator; 725 | type Item = T; 726 | 727 | fn into_iter(self) -> Self::IntoIter { 728 | self.vec.into_iter() 729 | } 730 | } 731 | 732 | impl<'a, T: Eq, S: Storage>, I: Capacity> IntoIterator for &'a ListSet { 733 | type Item = &'a T; 734 | type IntoIter = Iter<'a, T>; 735 | 736 | fn into_iter(self) -> Self::IntoIter { 737 | self.as_slice().iter() 738 | } 739 | } 740 | 741 | impl PartialEq> for ListSet 742 | where 743 | S1: Storage>, 744 | S2: Storage>, 745 | I1: Capacity, 746 | I2: Capacity, 747 | T: Eq, 748 | { 749 | fn eq(&self, other: &ListSet) -> bool { 750 | self.is_subset_of(other) && self.is_superset_of(other) 751 | } 752 | } 753 | 754 | impl>, I: Capacity> Eq for ListSet {} 755 | 756 | #[cfg(feature = "alloc")] 757 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 758 | impl crate::collections::AllocListSet { 759 | /// Constructs a new, empty [`AllocListSet`](crate::collections::AllocListSet) 760 | /// with the given capacity. 761 | pub fn with_capacity(capacity: usize) -> Self { 762 | let storage = crate::storage::AllocStorage::with_capacity(capacity); 763 | Self::from(storage) 764 | } 765 | } 766 | 767 | impl ListSet, I> { 768 | /// Constructs a new, empty [`InlineListSet`](crate::collections::InlineListSet). 769 | pub fn new() -> Self { 770 | ListSet { 771 | vec: crate::collections::InlineVec::::new(), 772 | } 773 | } 774 | } 775 | 776 | impl Default for ListSet, I> { 777 | fn default() -> Self { 778 | Self::new() 779 | } 780 | } 781 | 782 | /// A lazy iterator producing elements in the difference between two [`ListSet`]s. 783 | /// 784 | /// This `struct` is created by the [`difference`](ListSet::difference) 785 | /// method on `ListSet`. See its documentation for more. 786 | pub struct Difference<'a, T> { 787 | this: &'a [T], 788 | other: &'a [T], 789 | front: usize, 790 | } 791 | 792 | impl<'a, T: Eq> Iterator for Difference<'a, T> { 793 | type Item = &'a T; 794 | 795 | fn next(&mut self) -> Option { 796 | 'outer: while self.front < self.this.len() { 797 | let my_item = &self.this[self.front]; 798 | self.front += 1; 799 | 800 | for their_item in self.other { 801 | if my_item == their_item { 802 | continue 'outer; 803 | } 804 | } 805 | 806 | return Some(my_item); 807 | } 808 | 809 | None 810 | } 811 | 812 | fn size_hint(&self) -> (usize, Option) { 813 | let max_len = self.this.len() - self.front; 814 | (0, Some(max_len)) 815 | } 816 | } 817 | 818 | impl FusedIterator for Difference<'_, T> {} 819 | 820 | /// A lazy iterator producing elements in the symmetric difference of [`ListSet`]s. 821 | /// 822 | /// This `struct` is created by the [`symmetric_difference`](ListSet::symmetric_difference) 823 | /// method on `ListSet`. See its documentation for more. 824 | pub struct SymmetricDifference<'a, T> { 825 | this: &'a [T], 826 | other: &'a [T], 827 | front: usize, 828 | } 829 | 830 | impl<'a, T: Eq> Iterator for SymmetricDifference<'a, T> { 831 | type Item = &'a T; 832 | 833 | fn next(&mut self) -> Option { 834 | 'outer1: while self.front < self.this.len() { 835 | let my_item = &self.this[self.front]; 836 | self.front += 1; 837 | 838 | for their_item in self.other { 839 | if my_item == their_item { 840 | continue 'outer1; 841 | } 842 | } 843 | 844 | return Some(my_item); 845 | } 846 | 847 | 'outer2: while (self.front - self.this.len()) < self.other.len() { 848 | let their_item = &self.other[self.front - self.this.len()]; 849 | self.front += 1; 850 | 851 | for my_item in self.this { 852 | if my_item == their_item { 853 | continue 'outer2; 854 | } 855 | } 856 | 857 | return Some(their_item); 858 | } 859 | 860 | None 861 | } 862 | 863 | fn size_hint(&self) -> (usize, Option) { 864 | let max_len = if self.front < self.this.len() { 865 | (self.this.len() - self.front) + self.other.len() 866 | } else { 867 | self.other.len() - (self.front - self.this.len()) 868 | }; 869 | 870 | (0, Some(max_len)) 871 | } 872 | } 873 | 874 | impl FusedIterator for SymmetricDifference<'_, T> {} 875 | 876 | /// A lazy iterator producing elements in the intersection of [`ListSet`]s. 877 | /// 878 | /// This struct is created by the [`intersection`](ListSet::intersection) 879 | /// method on `ListSet`. See its documentation for more. 880 | pub struct Intersection<'a, T> { 881 | this: &'a [T], 882 | other: &'a [T], 883 | front: usize, 884 | } 885 | 886 | impl<'a, T: Eq> Iterator for Intersection<'a, T> { 887 | type Item = &'a T; 888 | 889 | fn next(&mut self) -> Option { 890 | while self.front < self.this.len() { 891 | let my_item = &self.this[self.front]; 892 | self.front += 1; 893 | 894 | for their_item in self.other { 895 | if my_item == their_item { 896 | return Some(my_item); 897 | } 898 | } 899 | } 900 | 901 | None 902 | } 903 | 904 | fn size_hint(&self) -> (usize, Option) { 905 | let max_len = self.this.len() - self.front; 906 | (0, Some(max_len)) 907 | } 908 | } 909 | 910 | impl FusedIterator for Intersection<'_, T> {} 911 | 912 | /// A lazy iterator producing elements in the union of [`ListSet`]s. 913 | /// 914 | /// This `struct` is created by the [`union`](ListSet::union) 915 | /// method on `ListSet`. See its documentation for more. 916 | pub struct Union<'a, T> { 917 | this: &'a [T], 918 | other: &'a [T], 919 | front: usize, 920 | } 921 | 922 | impl<'a, T: Eq> Iterator for Union<'a, T> { 923 | type Item = &'a T; 924 | 925 | fn next(&mut self) -> Option { 926 | if self.front < self.this.len() { 927 | let result = &self.this[self.front]; 928 | self.front += 1; 929 | return Some(result); 930 | } 931 | 932 | 'outer: while (self.front - self.this.len()) < self.other.len() { 933 | let their_item = &self.other[self.front - self.this.len()]; 934 | self.front += 1; 935 | 936 | for my_item in self.this { 937 | if my_item == their_item { 938 | continue 'outer; 939 | } 940 | } 941 | 942 | return Some(their_item); 943 | } 944 | 945 | None 946 | } 947 | 948 | fn size_hint(&self) -> (usize, Option) { 949 | let min_len = self.this.len().saturating_sub(self.front); 950 | let max_len = if self.front < self.this.len() { 951 | self.other.len() + min_len 952 | } else { 953 | self.other.len() - self.front 954 | }; 955 | 956 | (min_len, Some(max_len)) 957 | } 958 | } 959 | 960 | impl FusedIterator for Union<'_, T> {} 961 | -------------------------------------------------------------------------------- /src/collections/mod.rs: -------------------------------------------------------------------------------- 1 | //! Collection types. 2 | 3 | pub mod binary_heap; 4 | pub mod cache; 5 | pub mod deque; 6 | pub mod list_map; 7 | pub mod list_set; 8 | pub mod option_group; 9 | pub mod pool; 10 | pub mod vec; 11 | 12 | use crate::storage::{ArenaStorage, ArrayLayout, InlineStorage, SliceStorage}; 13 | 14 | use binary_heap::BinaryHeap; 15 | use cache::{CacheTable, LruCache2, UnitCache}; 16 | use deque::Deque; 17 | use list_map::{ListMap, ListMapLayout}; 18 | use list_set::ListSet; 19 | use option_group::OptionGroup; 20 | use pool::direct::{DirectPool, DirectPoolLayout}; 21 | use pool::packed::{PackedPool, PackedPoolLayout}; 22 | use pool::DefaultHandle; 23 | use vec::Vec; 24 | 25 | /// A binary heap using a mutable slice for storage. 26 | /// 27 | /// # Examples 28 | /// ``` 29 | /// use core::mem::MaybeUninit; 30 | /// let mut backing_array = [MaybeUninit::::uninit(); 32]; 31 | /// let (slice1, slice2) = (&mut backing_array[..]).split_at_mut(16); 32 | /// let mut heap1 = coca::collections::SliceHeap::<_>::from(slice1); 33 | /// let mut heap2 = coca::collections::SliceHeap::<_>::from(slice2); 34 | /// assert_eq!(heap1.capacity(), 16); 35 | /// assert_eq!(heap2.capacity(), 16); 36 | /// ``` 37 | pub type SliceHeap<'a, T, I = usize> = BinaryHeap, I>; 38 | /// A binary heap using an arena-allocated slice for storage. 39 | /// 40 | /// # Examples 41 | /// ``` 42 | /// use coca::arena::Arena; 43 | /// use coca::collections::ArenaHeap; 44 | /// use core::mem::MaybeUninit; 45 | /// 46 | /// let mut backing_region = [MaybeUninit::uninit(); 1024]; 47 | /// let mut arena = Arena::from(&mut backing_region[..]); 48 | /// 49 | /// let heap: ArenaHeap<'_, i64, usize> = arena.try_with_capacity(100).unwrap(); 50 | /// assert!(arena.try_with_capacity::<_, ArenaHeap<'_, i64, usize>>(100).is_none()); 51 | /// ``` 52 | pub type ArenaHeap<'a, T, I = usize> = BinaryHeap>, I>; 53 | 54 | /// A binary heap using an inline array for storage. 55 | /// 56 | /// # Examples 57 | /// ``` 58 | /// let mut heap = coca::collections::InlineHeap::::new(); 59 | /// heap.push('a'); 60 | /// let vec = heap.into_vec(); 61 | /// assert_eq!(vec[0u8], 'a'); 62 | /// ``` 63 | pub type InlineHeap = BinaryHeap, I>; 64 | 65 | #[cfg(feature = "alloc")] 66 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 67 | /// A binary heap using a heap-allocated slice for storage. 68 | /// 69 | /// Note this still has a fixed capacity, and will never reallocate. 70 | /// 71 | /// # Examples 72 | /// ``` 73 | /// let mut heap = coca::collections::AllocHeap::::with_capacity(3); 74 | /// heap.push('a'); 75 | /// heap.push('b'); 76 | /// heap.push('c'); 77 | /// assert!(heap.try_push('d').is_err()); 78 | /// ``` 79 | pub type AllocHeap = BinaryHeap>, I>; 80 | 81 | /// A direct-mapped cache using an arena-allocated slice for storage. 82 | /// 83 | /// # Examples 84 | /// ``` 85 | /// # extern crate rustc_hash; 86 | /// use rustc_hash::FxHasher; 87 | /// use coca::{arena::Arena, collections::ArenaDirectMappedCache}; 88 | /// use core::{hash::BuildHasherDefault, mem::MaybeUninit}; 89 | /// 90 | /// # fn test() -> Option<()> { 91 | /// let mut backing_region = [MaybeUninit::uninit(); 1024]; 92 | /// let mut arena = Arena::from(&mut backing_region[..]); 93 | /// let mut cache: ArenaDirectMappedCache<'_, i32, &'static str, BuildHasherDefault> = arena.try_with_capacity(8)?; 94 | /// cache.insert(1, "a"); 95 | /// assert_eq!(cache.get(&1), Some(&"a")); 96 | /// # Some(()) 97 | /// # } 98 | /// # assert!(test().is_some()); 99 | /// ``` 100 | pub type ArenaDirectMappedCache<'src, K, V, H> = 101 | CacheTable>>, UnitCache, H>; 102 | /// A 2-way set-associative cache with a least recently used eviction policy, 103 | /// using an arena-allocated slice for storage. 104 | /// 105 | /// # Examples 106 | /// ``` 107 | /// # extern crate rustc_hash; 108 | /// use rustc_hash::FxHasher; 109 | /// use coca::{arena::Arena, collections::Arena2WayLruCache}; 110 | /// use core::{hash::BuildHasherDefault, mem::MaybeUninit}; 111 | /// 112 | /// # fn test() -> Option<()> { 113 | /// let mut backing_region = [MaybeUninit::uninit(); 1024]; 114 | /// let mut arena = Arena::from(&mut backing_region[..]); 115 | /// let mut cache: Arena2WayLruCache<'_, i32, &'static str, BuildHasherDefault> = arena.try_with_capacity(8)?; 116 | /// cache.insert(1, "a"); 117 | /// assert_eq!(cache.get(&1), Some(&"a")); 118 | /// # Some(()) 119 | /// # } 120 | /// # assert!(test().is_some()); 121 | /// ``` 122 | pub type Arena2WayLruCache<'src, K, V, H> = 123 | CacheTable>>, LruCache2, H>; 124 | 125 | /// A direct-mapped cache using an inline array for storage. 126 | /// 127 | /// # Examples 128 | /// ``` 129 | /// # extern crate rustc_hash; 130 | /// use rustc_hash::FxHasher; 131 | /// # use coca::collections::InlineDirectMappedCache; 132 | /// # use core::hash::BuildHasherDefault; 133 | /// # fn main() { 134 | /// let keys = ["Alice", "Bob", "Charlie", "Eve"]; 135 | /// let mut cache = InlineDirectMappedCache::<&'static str, usize, BuildHasherDefault, 3>::new(); 136 | /// 137 | /// for k in &keys { 138 | /// cache.insert(k, k.len()); 139 | /// assert_eq!(cache.get(k), Some(&k.len())); 140 | /// } 141 | /// 142 | /// let mut remembered = 0; 143 | /// for k in &keys { 144 | /// if let Some(len) = cache.get(k) { 145 | /// assert_eq!(len, &k.len()); 146 | /// remembered += 1; 147 | /// } 148 | /// } 149 | /// 150 | /// assert!(0 < remembered); 151 | /// assert!(remembered < keys.len()); 152 | /// # } 153 | /// ``` 154 | pub type InlineDirectMappedCache = 155 | CacheTable, N>, UnitCache, H>; 156 | /// A 2-way set-associative cache with a least recently used eviction policy, 157 | /// using an inline array for storage. 158 | /// 159 | /// Note that the constant generic parameter `N` is the number of cache lines, 160 | /// i.e. caches of this type have capacity for `2 * N` key-value pairs. 161 | /// 162 | /// # Examples 163 | /// ``` 164 | /// # extern crate rustc_hash; 165 | /// use rustc_hash::FxHasher; 166 | /// # use coca::collections::Inline2WayLruCache; 167 | /// # use core::hash::BuildHasherDefault; 168 | /// # fn main() { 169 | /// let keys = ["Alice", "Bob", "Charlie", "David", "Eve", "Faythe", "Grace"]; 170 | /// let mut cache = Inline2WayLruCache::<&'static str, usize, BuildHasherDefault, 3>::new(); 171 | /// assert_eq!(cache.capacity(), 6); 172 | /// 173 | /// for k in &keys { 174 | /// cache.insert(k, k.len()); 175 | /// assert_eq!(cache.get(k), Some(&k.len())); 176 | /// } 177 | /// 178 | /// let mut remembered = 0; 179 | /// for k in &keys { 180 | /// if let Some(len) = cache.get(k) { 181 | /// assert_eq!(len, &k.len()); 182 | /// remembered += 1; 183 | /// } 184 | /// } 185 | /// 186 | /// assert!(0 < remembered); 187 | /// assert!(remembered < keys.len()); 188 | /// # } 189 | /// ``` 190 | pub type Inline2WayLruCache = 191 | CacheTable, N>, LruCache2, H>; 192 | 193 | /// A direct-mapped cache using a heap-allocated array for storage. 194 | /// 195 | /// # Examples 196 | /// ``` 197 | /// # extern crate rustc_hash; 198 | /// use rustc_hash::FxHasher; 199 | /// # use coca::collections::AllocDirectMappedCache; 200 | /// # use core::hash::BuildHasherDefault; 201 | /// let mut cache = AllocDirectMappedCache::<&'static str, usize, BuildHasherDefault>::with_capacity(3); 202 | /// assert_eq!(cache.capacity(), 3); 203 | /// 204 | /// let keys = ["Alice", "Bob", "Charlie", "Eve"]; 205 | /// for k in &keys { 206 | /// cache.insert(k, k.len()); 207 | /// assert_eq!(cache.get(k), Some(&k.len())); 208 | /// } 209 | /// 210 | /// let mut remembered = 0; 211 | /// for k in &keys { 212 | /// if let Some(len) = cache.get(k) { 213 | /// assert_eq!(len, &k.len()); 214 | /// remembered += 1; 215 | /// } 216 | /// } 217 | /// 218 | /// assert!(0 < remembered); 219 | /// assert!(remembered < keys.len()); 220 | /// ``` 221 | #[cfg(feature = "alloc")] 222 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 223 | pub type AllocDirectMappedCache = CacheTable< 224 | K, 225 | V, 226 | crate::storage::AllocStorage>>, 227 | UnitCache, 228 | H, 229 | >; 230 | 231 | /// A 2-way set-associative cache with a least recently used eviction policy, 232 | /// using a heap-allocated array for storage. 233 | /// 234 | /// # Examples 235 | /// ``` 236 | /// # extern crate rustc_hash; 237 | /// use rustc_hash::FxHasher; 238 | /// # use coca::collections::Alloc2WayLruCache; 239 | /// # use core::hash::BuildHasherDefault; 240 | /// let mut cache = Alloc2WayLruCache::<&'static str, usize, BuildHasherDefault>::with_capacity(6); 241 | /// assert_eq!(cache.capacity(), 6); 242 | /// 243 | /// let keys = ["Alice", "Bob", "Charlie", "David", "Eve", "Faythe", "Grace"]; 244 | /// for k in &keys { 245 | /// cache.insert(k, k.len()); 246 | /// assert_eq!(cache.get(k), Some(&k.len())); 247 | /// } 248 | /// 249 | /// let mut remembered = 0; 250 | /// for k in &keys { 251 | /// if let Some(len) = cache.get(k) { 252 | /// assert_eq!(len, &k.len()); 253 | /// remembered += 1; 254 | /// } 255 | /// } 256 | /// 257 | /// assert!(0 < remembered); 258 | /// assert!(remembered < keys.len()); 259 | /// ``` 260 | #[cfg(feature = "alloc")] 261 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 262 | pub type Alloc2WayLruCache = CacheTable< 263 | K, 264 | V, 265 | crate::storage::AllocStorage>>, 266 | LruCache2, 267 | H, 268 | >; 269 | 270 | /// A double-ended queue using any mutable slice for storage. 271 | /// 272 | /// # Examples 273 | /// ``` 274 | /// use core::mem::MaybeUninit; 275 | /// let mut backing_array = [MaybeUninit::::uninit(); 32]; 276 | /// let (slice1, slice2) = (&mut backing_array[..]).split_at_mut(16); 277 | /// let mut deque1 = coca::collections::SliceDeque::<_>::from(slice1); 278 | /// let mut deque2 = coca::collections::SliceDeque::<_>::from(slice2); 279 | /// assert_eq!(deque1.capacity(), 16); 280 | /// assert_eq!(deque2.capacity(), 16); 281 | /// ``` 282 | pub type SliceDeque<'a, T, I = usize> = Deque, I>; 283 | /// A double-ended queue using an arena-allocated slice for storage. 284 | /// 285 | /// # Examples 286 | /// ``` 287 | /// use core::mem::MaybeUninit; 288 | /// use coca::arena::Arena; 289 | /// use coca::collections::ArenaDeque; 290 | /// 291 | /// # fn test() -> Option<()> { 292 | /// let mut backing_region = [MaybeUninit::uninit(); 1024]; 293 | /// let mut arena = Arena::from(&mut backing_region[..]); 294 | /// let mut deque: ArenaDeque<'_, char, usize> = arena.try_with_capacity(4)?; 295 | /// 296 | /// deque.push_front('b'); 297 | /// deque.push_front('a'); 298 | /// deque.push_back('c'); 299 | /// deque.push_back('d'); 300 | /// 301 | /// assert_eq!(deque, &['a', 'b', 'c', 'd']); 302 | /// assert_eq!(deque.try_push_back('e'), Err('e')); 303 | /// # Some(()) 304 | /// # } 305 | /// # assert!(test().is_some()); 306 | /// ``` 307 | pub type ArenaDeque<'a, T, I = usize> = Deque>, I>; 308 | 309 | #[cfg(feature = "alloc")] 310 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 311 | /// A deque using a heap-allocated slice for storage. 312 | /// 313 | /// Note that this still has a fixed capacity, and will never reallocate. 314 | /// 315 | /// # Examples 316 | /// ``` 317 | /// let mut deque = coca::collections::AllocDeque::::with_capacity(4); 318 | /// 319 | /// deque.push_front('b'); 320 | /// deque.push_front('a'); 321 | /// deque.push_back('c'); 322 | /// deque.push_back('d'); 323 | /// 324 | /// assert_eq!(deque, &['a', 'b', 'c', 'd']); 325 | /// assert_eq!(deque.try_push_back('e'), Err('e')); 326 | /// ``` 327 | pub type AllocDeque = Deque>, I>; 328 | 329 | /// A deque using an inline array for storage. 330 | /// 331 | /// # Examples 332 | /// ``` 333 | /// let mut deque = coca::collections::InlineDeque::::new(); 334 | /// deque.push_front('a'); 335 | /// assert_eq!(deque[0u8], 'a'); 336 | /// ``` 337 | pub type InlineDeque = Deque, I>; 338 | 339 | /// An association list that stores its contents in an arena-allocated memory block. 340 | /// 341 | /// # Examples 342 | /// ``` 343 | /// use coca::arena::Arena; 344 | /// use coca::collections::ArenaListMap; 345 | /// use core::mem::MaybeUninit; 346 | /// 347 | /// let mut backing_region = [MaybeUninit::uninit(); 2048]; 348 | /// let mut arena = Arena::from(&mut backing_region[..]); 349 | /// 350 | /// let map: ArenaListMap<'_, &'static str, u32> = arena.try_with_capacity(100).unwrap(); 351 | /// assert!(arena.try_with_capacity::<_, ArenaListMap<'_, &'static str, u32>>(100).is_none()); 352 | /// ``` 353 | pub type ArenaListMap<'a, K, V, I = usize> = 354 | ListMap>, I>; 355 | /// An association list that stores its contents in globally allocated memory. 356 | /// 357 | /// # Examples 358 | /// ``` 359 | /// use coca::collections::AllocListMap; 360 | /// let mut map = AllocListMap::<&'static str, u32>::with_capacity(13); 361 | /// assert_eq!(map.capacity(), 13); 362 | /// ``` 363 | #[cfg(feature = "alloc")] 364 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 365 | pub type AllocListMap = 366 | ListMap>, I>; 367 | /// An association list that stores its contents inline. 368 | /// 369 | /// # Examples 370 | /// ``` 371 | /// use coca::collections::InlineListMap; 372 | /// let mut map = InlineListMap::<&'static str, u32, 3, u8>::new(); 373 | /// # assert!(map.is_empty()); 374 | /// ``` 375 | pub type InlineListMap = 376 | ListMap, I>; 377 | 378 | /// A set based on an arena-allocated array. 379 | /// 380 | /// # Examples 381 | /// ``` 382 | /// use coca::arena::Arena; 383 | /// use coca::collections::ArenaListSet; 384 | /// use core::mem::MaybeUninit; 385 | /// 386 | /// let mut backing_region = [MaybeUninit::uninit(); 2048]; 387 | /// let mut arena = Arena::from(&mut backing_region[..]); 388 | /// 389 | /// let map: ArenaListSet<'_, &'static str> = arena.try_with_capacity(100).unwrap(); 390 | /// assert!(arena.try_with_capacity::<_, ArenaListSet<'_, &'static str>>(100).is_none()); 391 | /// ``` 392 | pub type ArenaListSet<'a, T, I = usize> = ListSet>, I>; 393 | /// A set based on a globally allocated array. 394 | /// 395 | /// # Examples 396 | /// ``` 397 | /// use coca::collections::AllocListSet; 398 | /// let mut map = AllocListSet::<&'static str>::with_capacity(13); 399 | /// assert_eq!(map.capacity(), 13); 400 | /// ``` 401 | #[cfg(feature = "alloc")] 402 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 403 | pub type AllocListSet = ListSet>, I>; 404 | /// A set based on any mutable array slice. 405 | /// 406 | /// # Examples 407 | /// ``` 408 | /// use core::mem::MaybeUninit; 409 | /// let mut backing_array = [MaybeUninit::::uninit(); 32]; 410 | /// let (slice1, slice2) = (&mut backing_array[..]).split_at_mut(16); 411 | /// let mut set1 = coca::collections::SliceListSet::<_>::from(slice1); 412 | /// let mut set2 = coca::collections::SliceListSet::<_>::from(slice2); 413 | /// assert_eq!(set1.capacity(), 16); 414 | /// assert_eq!(set2.capacity(), 16); 415 | /// ``` 416 | pub type SliceListSet<'a, T, I = usize> = ListSet, I>; 417 | /// A set based on an inline array. 418 | /// 419 | /// # Examples 420 | /// ``` 421 | /// use coca::collections::InlineListSet; 422 | /// let mut set = InlineListSet::<&'static str, 20, u8>::new(); 423 | /// ``` 424 | pub type InlineListSet = ListSet, I>; 425 | 426 | /// A group of up to eight [`Option`]s with the discriminants packed into a single `u8`. 427 | pub type OptionGroup8 = OptionGroup; 428 | /// A group of up to sixteen [`Option`]s with the discriminants packed into a single `u16`. 429 | pub type OptionGroup16 = OptionGroup; 430 | /// A group of up to 32 [`Option`]s with the discriminants packed into a single `u32`. 431 | pub type OptionGroup32 = OptionGroup; 432 | /// A group of up to 64 [`Option`]s with the discriminants packed into a single `u64`. 433 | pub type OptionGroup64 = OptionGroup; 434 | 435 | /// A direct-mapped pool that stores its contents in an arena-allocated memory block. 436 | /// 437 | /// # Examples 438 | /// ``` 439 | /// use coca::arena::Arena; 440 | /// use coca::collections::{DirectArenaPool, pool::DefaultHandle}; 441 | /// use core::mem::MaybeUninit; 442 | /// 443 | /// let mut backing_region = [MaybeUninit::uninit(); 1024]; 444 | /// let mut arena = Arena::from(&mut backing_region[..]); 445 | /// 446 | /// let pool: DirectArenaPool<'_, i64, DefaultHandle> = arena.try_with_capacity(50).unwrap(); 447 | /// assert!(arena.try_with_capacity::<_, DirectArenaPool<'_, i64, DefaultHandle>>(50).is_none()); 448 | /// ``` 449 | pub type DirectArenaPool<'src, T, H = DefaultHandle> = 450 | DirectPool>, H>; 451 | 452 | /// A direct-mapped pool that stores its content in globally allocated memory. 453 | /// 454 | /// # Examples 455 | /// ``` 456 | /// # use coca::collections::DirectAllocPool; 457 | /// let mut pool = DirectAllocPool::::with_capacity(4); 458 | /// assert_eq!(pool.capacity(), 4); 459 | /// 460 | /// pool.insert(1); 461 | /// pool.insert(2); 462 | /// pool.insert(3); 463 | /// pool.insert(4); 464 | /// assert_eq!(pool.try_insert(5), Err(5)); 465 | /// ``` 466 | #[cfg(feature = "alloc")] 467 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 468 | pub type DirectAllocPool = 469 | DirectPool>, H>; 470 | 471 | /// A direct-mapped pool that stores its contents in an inline array, 472 | /// indexed by the specified custom [`Handle`](pool::Handle). 473 | /// 474 | /// # Examples 475 | /// ``` 476 | /// # use coca::handle_type; 477 | /// # use coca::collections::DirectInlinePool; 478 | /// handle_type! { CustomHandle: 8 / 32; } 479 | /// 480 | /// const A: u128 = 0x0123_4567_89AB_CDEF_0123_4567_89AB_CDEF; 481 | /// const B: u128 = 0xFEDC_BA98_7654_3210_FEDC_BA98_7654_3210; 482 | /// 483 | /// let mut pool = DirectInlinePool::::new(); 484 | /// let a: CustomHandle = pool.insert(A); 485 | /// let b = pool.insert(B); 486 | /// assert_eq!(pool.len(), 2); 487 | /// assert_eq!(pool.remove(a), Some(A)); 488 | /// assert_eq!(pool.remove(b), Some(B)); 489 | /// assert!(pool.is_empty()); 490 | /// ``` 491 | pub type DirectInlinePool = 492 | DirectPool, H>; 493 | 494 | /// A densely packed pool that stores its contents in a arena-allocated memory block. 495 | /// 496 | /// # Examples 497 | /// ``` 498 | /// use coca::arena::Arena; 499 | /// use coca::collections::{PackedArenaPool, pool::DefaultHandle}; 500 | /// use core::mem::MaybeUninit; 501 | /// 502 | /// let mut backing_region = [MaybeUninit::uninit(); 1024]; 503 | /// let mut arena = Arena::from(&mut backing_region[..]); 504 | /// 505 | /// let pool: PackedArenaPool<'_, i64, DefaultHandle> = arena.try_with_capacity(30).unwrap(); 506 | /// assert!(arena.try_with_capacity::<_, PackedArenaPool<'_, i64, DefaultHandle>>(30).is_none()); 507 | /// ``` 508 | pub type PackedArenaPool<'src, T, H> = PackedPool>, H>; 509 | 510 | /// A densely packed pool that stores its contents in globally allocated memory. 511 | /// 512 | /// # Examples 513 | /// ``` 514 | /// # use coca::collections::pool::DefaultHandle; 515 | /// # use coca::collections::PackedAllocPool; 516 | /// const A: u128 = 0x0123_4567_89AB_CDEF_0123_4567_89AB_CDEF; 517 | /// const B: u128 = 0xFEDC_BA98_7654_3210_FEDC_BA98_7654_3210; 518 | /// 519 | /// let mut pool = PackedAllocPool::::with_capacity(8); 520 | /// let a = pool.insert(A); 521 | /// let b = pool.insert(B); 522 | /// assert_eq!(pool.len(), 2); 523 | /// assert_eq!(pool.remove(a), Some(A)); 524 | /// assert_eq!(pool.remove(b), Some(B)); 525 | /// assert!(pool.is_empty()); 526 | /// ``` 527 | #[cfg(feature = "alloc")] 528 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 529 | pub type PackedAllocPool = 530 | PackedPool>, H>; 531 | 532 | /// A densely packed pool that stores its contents inline, indexed by the specified custom [`Handle`](pool::Handle). 533 | /// 534 | /// # Examples 535 | /// ``` 536 | /// # use coca::handle_type; 537 | /// # use coca::collections::PackedInlinePool; 538 | /// handle_type! { CustomHandle: 8 / 32; } 539 | /// 540 | /// const A: u128 = 0x0123_4567_89AB_CDEF_0123_4567_89AB_CDEF; 541 | /// const B: u128 = 0xFEDC_BA98_7654_3210_FEDC_BA98_7654_3210; 542 | /// 543 | /// let mut pool = PackedInlinePool::::new(); 544 | /// let a: CustomHandle = pool.insert(A); 545 | /// let b = pool.insert(B); 546 | /// assert_eq!(pool.len(), 2); 547 | /// assert_eq!(pool.remove(a), Some(A)); 548 | /// assert_eq!(pool.remove(b), Some(B)); 549 | /// assert!(pool.is_empty()); 550 | /// ``` 551 | pub type PackedInlinePool = 552 | PackedPool, H>; 553 | 554 | /// A vector using any mutable slice for storage. 555 | /// 556 | /// # Examples 557 | /// ``` 558 | /// use core::mem::MaybeUninit; 559 | /// let mut backing_array = [MaybeUninit::::uninit(); 32]; 560 | /// let (slice1, slice2) = (&mut backing_array[..]).split_at_mut(16); 561 | /// let mut vec1 = coca::collections::SliceVec::<_>::from(slice1); 562 | /// let mut vec2 = coca::collections::SliceVec::<_>::from(slice2); 563 | /// assert_eq!(vec1.capacity(), 16); 564 | /// assert_eq!(vec2.capacity(), 16); 565 | /// ``` 566 | pub type SliceVec<'a, T, I = usize> = Vec, I>; 567 | /// A vector using an arena-allocated slice for storage. 568 | /// 569 | /// # Examples 570 | /// ``` 571 | /// use coca::arena::Arena; 572 | /// use coca::collections::ArenaVec; 573 | /// use core::mem::MaybeUninit; 574 | /// 575 | /// let mut backing_region = [MaybeUninit::uninit(); 1024]; 576 | /// let mut arena = Arena::from(&mut backing_region[..]); 577 | /// 578 | /// let v: ArenaVec<'_, i64, usize> = arena.try_with_capacity(100).unwrap(); 579 | /// assert!(arena.try_with_capacity::<_, ArenaVec<'_, i64, usize>>(100).is_none()); 580 | /// ``` 581 | pub type ArenaVec<'a, T, I = usize> = Vec>, I>; 582 | 583 | #[cfg(feature = "alloc")] 584 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 585 | /// A vector using a heap-allocated slice for storage. 586 | /// 587 | /// Note this still has a fixed capacity, and will never reallocate. 588 | /// 589 | /// # Examples 590 | /// ``` 591 | /// let mut vec = coca::collections::AllocVec::::with_capacity(3); 592 | /// vec.push('a'); 593 | /// vec.push('b'); 594 | /// vec.push('c'); 595 | /// assert!(vec.try_push('d').is_err()); 596 | /// ``` 597 | pub type AllocVec = Vec>, I>; 598 | 599 | /// A vector using an inline array for storage. 600 | /// 601 | /// # Examples 602 | /// ``` 603 | /// let mut vec = coca::collections::InlineVec::::new(); 604 | /// vec.push('a'); 605 | /// assert_eq!(vec[0u8], 'a'); 606 | /// ``` 607 | pub type InlineVec = Vec, Index>; 608 | -------------------------------------------------------------------------------- /src/collections/pool/mod.rs: -------------------------------------------------------------------------------- 1 | //! Pool-based memory management. 2 | //! 3 | //! A pool takes ownership of inserted values and returns unique, stable handles 4 | //! that can be used to refer back to those same values later on. While it is 5 | //! safe to index into a pool with a handle obtained from a different one, this 6 | //! is nonsensical; to avoid this mistake, users may define custom handle types 7 | //! using the [`handle_type!`] macro. 8 | //! 9 | //! Each handle contains a generation identifier, so that, should a value be 10 | //! removed and a new one be inserted at the same location, the old handle 11 | //! remains invalid. 12 | //! 13 | //! Note that the generation count may overflow, so this cannot be strictly 14 | //! guaranteed for arbitrarily long sequences of insertions and deletions. 15 | //! 64-bit handles use 32-bit generation identifiers, making such errors highly 16 | //! unlikely. Care must be taken with 32-bit handles, however, which may use as 17 | //! few as 16 bits: 18 | //! 19 | //! ``` 20 | //! # // This test is super slow in Miri, but we can't conditionally 21 | //! # // ignore doc-tests, so just make it always pass: 22 | //! # #[cfg(not(miri))] { 23 | //! # use coca::handle_type; 24 | //! handle_type! { TinyHandle: 16 / 32; } 25 | //! 26 | //! # let mut storage = [core::mem::MaybeUninit::uninit(); 128]; 27 | //! # let mut arena = coca::arena::Arena::from(&mut storage[..]); 28 | //! let mut pool: coca::collections::DirectArenaPool<&'static str, TinyHandle> = arena.with_capacity(4); 29 | //! let first = pool.insert("this was first"); 30 | //! 31 | //! let mut last_handle = first; 32 | //! for _ in 0..0x8000 { 33 | //! pool.remove(last_handle); 34 | //! last_handle = pool.insert("this is not first"); 35 | //! } 36 | //! 37 | //! assert_eq!(pool[first], "this is not first"); 38 | //! # } 39 | //! ``` 40 | 41 | pub mod direct; 42 | pub mod packed; 43 | 44 | use core::fmt::Debug; 45 | use core::hash::Hash; 46 | 47 | use crate::storage::Capacity; 48 | 49 | /// Stable references to values stored in a pool. 50 | /// 51 | /// # Safety 52 | /// Implementors must ensure that the following restrictions are met: 53 | /// 54 | /// * `Handle::new(i, g).into_raw_parts() == (i, g)` for all `i <= MAX_INDEX, g <= MAX_GENERATION`, 55 | /// * `MAX_INDEX` must be less than or equal to `Index::MAX_REPRESENTABLE`, 56 | /// * `MAX_GENERATION` must be one less than a power of two. 57 | /// 58 | /// Using [`handle_type!`] should be preferred over implementing this manually. 59 | pub unsafe trait Handle: Copy + Debug + Eq + Hash + Ord { 60 | /// The type pools should use for indices into their storage space. 61 | type Index: Capacity; 62 | /// The maximum representable index; the maximum capacity of a pool using 63 | /// this handle type is equal to `MAX_INDEX - 1` because the maximum index 64 | /// is reserved as a sentinel value. 65 | const MAX_INDEX: usize; 66 | /// The maximum representable generation count. 67 | const MAX_GENERATION: u32; 68 | 69 | /// Constructs a new handle from the storage location and generation count. 70 | /// 71 | /// # Safety 72 | /// Implementors may assume the following preconditions: 73 | /// 74 | /// * `index` is less than or equal to `MAX_INDEX`, 75 | /// * `generation` is less than or equal to `MAX_GENERATION`, 76 | /// * `generation` is odd. 77 | /// 78 | /// Violating these requirements may cause undefined behavior. 79 | unsafe fn new(index: usize, generation: u32) -> Self; 80 | /// Returns the storage location and generation count packed into the handle. 81 | fn into_raw_parts(self) -> (usize, u32); 82 | } 83 | 84 | #[cold] 85 | #[inline(never)] 86 | #[track_caller] 87 | pub(crate) fn buffer_too_large_for_handle_type() { 88 | panic!( 89 | "provided storage block cannot be fully indexed by type {} (max capacity is {})", 90 | core::any::type_name::(), 91 | H::MAX_INDEX - 1, 92 | ); 93 | } 94 | 95 | #[allow(dead_code)] // "unused" fields are actually used by the derived Debug impl 96 | #[derive(Debug)] 97 | enum DebugEntry<'a, T: Debug, H: Handle> { 98 | Occupied { 99 | generation: u32, 100 | value: &'a T, 101 | }, 102 | Vacant { 103 | generation: u32, 104 | next_free_slot: H::Index, 105 | }, 106 | } 107 | 108 | /// Generates one or more new types implementing [`Handle`]. 109 | /// 110 | /// This can help in avoiding use of the wrong handle with an object pool. 111 | /// 112 | /// This macro takes a semicolon-separated list, where each entry must match 113 | /// one of two formats: 114 | /// 115 | /// * `($meta)* ($vis)? $name: 64` generates a handle type that is identical to 116 | /// [`DefaultHandle`], with 32 bits each for the index and generation. 117 | /// * `($meta)* ($vis)? $name: $i / 32` generates a 32-bit handle with `$i` 118 | /// bits used for the index, and the remainder used for the generation count. 119 | /// 120 | /// `($meta)*` stands for any number of attributes, including doc comments, and 121 | /// `($vis)?` is an optional visibility specifier (i.e. `pub` or `pub(crate)`). 122 | /// 123 | /// # Examples 124 | /// ``` 125 | /// use coca::{handle_type, collections::pool::Handle}; 126 | /// handle_type! { 127 | /// A: 12 / 32; 128 | /// /// Documentation for `B` goes here. 129 | /// pub B: 16 / 32; 130 | /// D: 64; 131 | /// } 132 | /// 133 | /// assert_eq!(A::MAX_INDEX, 4095); 134 | /// assert_eq!(A::MAX_GENERATION, 1_048_575); 135 | /// 136 | /// assert_eq!(B::MAX_INDEX, 65535); 137 | /// assert_eq!(B::MAX_GENERATION, 65535); 138 | /// 139 | /// assert_eq!(D::MAX_INDEX, 4_294_967_295); 140 | /// assert_eq!(D::MAX_GENERATION, 4_294_967_295); 141 | /// ``` 142 | /// Note that the maximum number of bits you can reserve for the index is 16. 143 | /// This is enforced with a static assertion: 144 | /// ```compile_fail 145 | /// # use coca::handle_type; 146 | /// handle_type!{ C: 20 / 32; } // attempt to compute `0_usize - 1_usize`, which would overflow 147 | /// ``` 148 | #[macro_export] 149 | macro_rules! handle_type { 150 | ( $(#[$attrs:meta])* $v:vis $name:ident: 64 ; $($rest:tt)* ) => { 151 | $(#[$attrs])* 152 | #[derive( 153 | core::marker::Copy, 154 | core::clone::Clone, 155 | core::fmt::Debug, 156 | core::hash::Hash, 157 | core::cmp::PartialEq, 158 | core::cmp::Eq, 159 | core::cmp::PartialOrd, 160 | core::cmp::Ord)] 161 | #[repr(transparent)] 162 | $v struct $name(core::num::NonZeroU64); 163 | 164 | impl $name { 165 | /// Returns a handle that is guaranteed to always be invalid. 166 | #[allow(dead_code)] 167 | $v const fn null() -> Self { 168 | unsafe { $name(core::num::NonZeroU64::new_unchecked(0xFFFF_FFFF)) } 169 | } 170 | } 171 | 172 | unsafe impl $crate::collections::pool::Handle for $name { 173 | type Index = u32; 174 | const MAX_INDEX: usize = 0xFFFF_FFFF; 175 | const MAX_GENERATION: u32 = 0xFFFF_FFFF; 176 | unsafe fn new(index: usize, generation: u32) -> Self { 177 | debug_assert!(index <= Self::MAX_INDEX); 178 | debug_assert_eq!(generation % 2, 1); 179 | let assembled = index as u64 & 0xFFFF_FFFF | ((generation as u64) << 32); 180 | $name(core::num::NonZeroU64::new_unchecked(assembled)) 181 | } 182 | fn into_raw_parts(self) -> (usize, u32) { 183 | let raw: u64 = self.0.into(); 184 | let index = (raw & 0xFFFF_FFFF) as usize; 185 | let generation = (raw >> 32) as u32; 186 | (index, generation) 187 | } 188 | } 189 | 190 | handle_type!($($rest)*); 191 | }; 192 | ( $(#[$attrs:meta])* $v:vis $name:ident: $n:literal / 32 ; $($rest:tt)* ) => { 193 | #[allow(unknown_lints, eq_op)] 194 | const _: [(); 0 - !{ const ASSERT: bool = $n <= 16; ASSERT } as usize] = []; // static assertion 195 | 196 | $(#[$attrs])* 197 | #[derive( 198 | core::marker::Copy, 199 | core::clone::Clone, 200 | core::fmt::Debug, 201 | core::hash::Hash, 202 | core::cmp::PartialEq, 203 | core::cmp::Eq, 204 | core::cmp::PartialOrd, 205 | core::cmp::Ord)] 206 | #[repr(transparent)] 207 | $v struct $name(core::num::NonZeroU32); 208 | 209 | impl $name { 210 | /// Returns a handle that is guaranteed to always be invalid. 211 | #[allow(dead_code)] 212 | $v const fn null() -> Self { 213 | unsafe { $name(core::num::NonZeroU32::new_unchecked( 214 | ::MAX_INDEX as u32 215 | )) } 216 | } 217 | } 218 | 219 | unsafe impl $crate::collections::pool::Handle for $name { 220 | type Index = u16; 221 | const MAX_INDEX: usize = !(!0 << $n); 222 | const MAX_GENERATION: u32 = !(!0 << (32 - $n)) as u32; 223 | unsafe fn new(index: usize, generation: u32) -> Self { 224 | debug_assert!(index <= Self::MAX_INDEX); 225 | debug_assert!(generation <= Self::MAX_GENERATION); 226 | debug_assert_eq!(generation % 2, 1); 227 | let assembled = index as u32 & (Self::MAX_INDEX as u32) | ((generation as u32) << $n); 228 | $name(core::num::NonZeroU32::new_unchecked(assembled)) 229 | } 230 | fn into_raw_parts(self) -> (usize, u32) { 231 | let raw: u32 = self.0.into(); 232 | let index = (raw & (Self::MAX_INDEX as u32)) as usize; 233 | let generation = (raw >> $n) as u32; 234 | (index, generation) 235 | } 236 | } 237 | 238 | handle_type!($($rest)*); 239 | 240 | }; 241 | () => {} 242 | } 243 | 244 | handle_type! { 245 | /// The default pool handle type, with 32 bits each for the index and generation count. 246 | pub DefaultHandle: 64; 247 | } 248 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![no_std] 2 | #![doc(html_root_url = "https://docs.rs/coca/0.3.0")] 3 | #![cfg_attr(docs_rs, feature(doc_cfg))] 4 | #![cfg_attr(feature = "unstable", feature(unsize))] 5 | #![cfg_attr(feature = "unstable", feature(set_ptr_value))] 6 | #![warn(missing_docs)] 7 | #![warn(clippy::pedantic)] 8 | #![allow( 9 | clippy::inline_always, 10 | clippy::missing_errors_doc, 11 | clippy::module_name_repetitions, 12 | clippy::must_use_candidate, 13 | clippy::wildcard_imports 14 | )] 15 | 16 | //! The `coca` crate provides collection types and other facilities for managing 17 | //! memory without using the [`alloc` crate](https://doc.rust-lang.org/alloc/index.html). 18 | //! 19 | //! ## Data Structures with Constant Capacity 20 | //! 21 | //! Typical container implementations manage their own memory behind the scenes, 22 | //! calling the global allocator (or a user-provided one, using the as yet 23 | //! unstable [`Allocator` API](alloc::alloc::Allocator)) as needed when they 24 | //! are modified. 25 | //! 26 | //! This is convenient, but it does have some drawbacks to be aware of: 27 | //! 28 | //! * reallocation may be slow, especially if the data structure is large and 29 | //! a lot of data has to be moved around, 30 | //! * memory usage is essentially unbounded and must be carefully managed, should 31 | //! this pose a problem, 32 | //! * such implementations simply cannot be used when no allocator is available, 33 | //! as may be the case in embedded systems. 34 | //! 35 | //! In contrast, the data structures provided by `coca` do **not** work this way. 36 | //! They operate on a given block of memory, and never reallocate. This means 37 | //! that operations that grow a data structure may fail if the available space 38 | //! is insufficient. For all such operations, two methods are provided: one, 39 | //! returning a [`Result`](core::result::Result), has its name prefixed with 40 | //! `try_`, the other, without the name prefix, being a wrapper that panics in 41 | //! the error case. 42 | //! 43 | //! Client code, then, has to either guarantee that the given memory block's 44 | //! capacity will never be exceeded, or handle the failure case gracefully. 45 | //! 46 | //! ## The `Storage` Abstraction 47 | //! 48 | //! In `coca`, there is no single way of supplying a data structure with working 49 | //! memory. Instead, most containers have a generic type parameter `S` bound by 50 | //! the [`Storage` trait](storage::Storage), which is the type of the memory 51 | //! block to be used. `Storage`, in turn, has a type parameter `R`, bound by 52 | //! the [`LayoutSpec` trait](storage::LayoutSpec), which is used to describe 53 | //! the memory [`Layout`](core::alloc::Layout) required by that container. 54 | //! 55 | //! For instance, data structures built on an array (such as [`Vec`](collections::vec::Vec) 56 | //! or [`String`](string::String)) require `S: Storage>`, which is 57 | //! implemented for standard arrays and slices, among others, while more complex 58 | //! data structures (such as [`DirectPool`](collections::pool::direct::DirectPool)) 59 | //! have unique layout requirements (in this case `S: Storage>`) 60 | //! which are only fulfilled by purpose-built types. 61 | //! 62 | //! No matter the layout requirements, for each data structure, the following 63 | //! storage strategies are available: 64 | //! 65 | //! * `InlineStorage`, defined as a [partially initialized array](storage::InlineStorage) 66 | //! or as purpose-built `struct`s for non-array-like data structures (e.g. 67 | //! [`collections::pool::direct::InlineStorage`]), requires the capacity to 68 | //! be truly `const`, i.e. statically known at compile time; this allows 69 | //! storing the contents inline with the top-level `struct` type, with no 70 | //! indirection, and thus entirely on the stack, for example. 71 | //! * [`ArenaStorage`] is a (pointer, capacity) pair referencing an 72 | //! [arena-allocated](arena) block of memory, bounding the lifetime of the 73 | //! data structure to the lifetime of the `Arena`, but supports dynamically 74 | //! chosen capacities. 75 | //! * Likewise, [`AllocStorage`](storage::AllocStorage) references a block of 76 | //! memory from the global allocator, requiring the `alloc` crate. 77 | //! 78 | //! Note that, depending on the choice of storage type, available functionality 79 | //! may differ slightly. For example, the [`Clone`] trait is only implemented 80 | //! on data structures using `InlineStorage` or `AllocStorage`, but not 81 | //! `ArenaStorage`. 82 | //! 83 | //! Since concrete type names quickly become unwieldy in this scheme, `coca` 84 | //! provides type aliases such as [`InlineVec`](collections::InlineVec), 85 | //! [`ArenaVec`](collections::ArenaVec), and [`AllocVec`](collections::AllocVec) 86 | //! for all of its data structures. 87 | //! 88 | //! ## The `Capacity` Trait 89 | //! 90 | //! Compared to the standard implementations, most of `coca`'s data structures 91 | //! have one more additional type parameter, which is bound by the 92 | //! [`Capacity` trait](storage::Capacity). This type is used to index into the 93 | //! data structure and to represent its size at runtime. It generally defaults 94 | //! to `usize`, but `Capacity` is also implemented for `u8`, `u16`, `u32` and 95 | //! `u64`. 96 | //! 97 | //! This gives client code even more control over the exact size of the data 98 | //! structure, but it has one additional advantage: using the [`index_type`] 99 | //! macro, new types implementing the `Capacity` trait can be generated, which 100 | //! then allows using different index types with different collections, 101 | //! potentially preventing accidental use of the wrong index. 102 | //! 103 | //! ## Cargo Features 104 | //! 105 | //! - `alloc`: Enables an optional dependency on the `alloc` crate and adds 106 | //! the [`AllocStorage`](storage::AllocStorage) type, as well as other trait 107 | //! implementations and convenience functions for using the global allocator. 108 | //! - `unstable`: Adds the [`object`] module providing a statically-sized 109 | //! container for dynamically-sized types. This relies on the unstable 110 | //! `feature(unsize)` and `feature(set_ptr_value)` and thus requires a nightly 111 | //! compiler. 112 | //! - `profile`: Adds memory profiling in arena allocators. See the 113 | //! [module-level documentation](arena#memory-profiling) for details. 114 | //! 115 | //! None of these features are enabled by default. 116 | 117 | #[cfg(feature = "alloc")] 118 | #[doc(hidden)] 119 | pub extern crate alloc; 120 | 121 | pub mod arena; 122 | pub mod collections; 123 | pub mod storage; 124 | pub mod string; 125 | 126 | #[cfg(feature = "unstable")] 127 | #[cfg_attr(docs_rs, doc(cfg(feature = "unstable")))] 128 | pub mod object; 129 | 130 | #[cfg(feature = "unstable")] 131 | #[cfg_attr(docs_rs, doc(cfg(feature = "unstable")))] 132 | pub use crate::object::InlineObject; 133 | 134 | use crate::storage::{ArenaStorage, ArrayLayout, InlineStorage, SliceStorage}; 135 | use crate::string::String; 136 | 137 | /// A string using any mutable byte slice for storage. 138 | /// 139 | /// # Examples 140 | /// ``` 141 | /// let mut buf = [core::mem::MaybeUninit::::uninit(); 8]; 142 | /// let str = coca::SliceString::<'_, usize>::from(&mut buf[..6]); 143 | /// 144 | /// assert_eq!(str.capacity(), 6); 145 | /// ``` 146 | pub type SliceString<'a, I = usize> = String, I>; 147 | /// A string using an arena-allocated byte slice for storage. 148 | /// 149 | /// # Examples 150 | /// ``` 151 | /// use coca::arena::Arena; 152 | /// use coca::ArenaString; 153 | /// use core::mem::MaybeUninit; 154 | /// 155 | /// let mut backing_region = [MaybeUninit::uninit(); 160]; 156 | /// let mut arena = Arena::from(&mut backing_region[..]); 157 | /// 158 | /// let s: ArenaString<'_, usize> = arena.try_with_capacity(100).unwrap(); 159 | /// assert!(arena.try_with_capacity::<_, ArenaString<'_, usize>>(100).is_none()); 160 | /// ``` 161 | pub type ArenaString<'a, I = usize> = String>, I>; 162 | 163 | #[cfg(feature = "alloc")] 164 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 165 | /// A string using a heap-allocated slice for storage. 166 | /// 167 | /// # Examples 168 | /// ``` 169 | /// let mut s = coca::AllocString::with_capacity(16usize); 170 | /// s.push_str("Hello, "); 171 | /// s.push_str("World!"); 172 | /// 173 | /// assert_eq!(s, "Hello, World!"); 174 | /// ``` 175 | pub type AllocString = String>, I>; 176 | 177 | /// A string using an inline array for storage. 178 | /// 179 | /// # Examples 180 | /// ``` 181 | /// let mut s = coca::InlineString::<255, u8>::new(); 182 | /// assert_eq!(s.capacity(), 255); 183 | /// assert!(s.is_empty()); 184 | /// ``` 185 | pub type InlineString = String, I>; 186 | 187 | /// The error type for many operations on data structures with constant capacity. 188 | /// 189 | /// When working with data structures of limited capacity, insertions may fail 190 | /// due to insufficient remaining space. In `coca`, insertion methods generally 191 | /// have a name starting with `try`, and return a [`Result`](core::result::Result). 192 | /// For convenience, panicking wrappers without the `try` prefix are also provided. 193 | /// 194 | /// In many cases, such as e.g. [`Vec::try_push`](crate::collections::vec::Vec::try_push), 195 | /// the value to be inserted is returned back to the caller when the operation fails; 196 | /// in some cases, this is unnecessary (e.g. when ownership is not transferred, as with 197 | /// [`Vec::try_extend_from_slice`](crate::collections::vec::Vec::try_extend_from_slice)) 198 | /// or would result in unwieldy type signatures. Such methods use this unit error type 199 | /// instead. 200 | #[derive(Copy, Clone, Debug, Default)] 201 | pub struct CapacityError; 202 | 203 | impl CapacityError { 204 | #[inline(always)] 205 | pub(crate) fn new() -> core::result::Result { 206 | Err(Self) 207 | } 208 | } 209 | 210 | /// A specialized [`Result`](core::result::Result) type for operations on data structures with constant capacity. 211 | /// 212 | /// This type is broadly used across `coca` for most operations which grow a data structure. 213 | /// 214 | /// This type definition is generally used to avoid writing out [`CapacityError`] directly and is otherwise a direct mapping to [`core::result::Result`]. 215 | pub type Result = core::result::Result; 216 | 217 | #[cfg(test)] 218 | mod test_utils { 219 | use core::cell::Cell; 220 | 221 | #[cfg(target_pointer_width = "64")] 222 | pub(crate) const RNG_SEED: [u8; 32] = [ 223 | 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 224 | 0xf0, 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 225 | 0xde, 0xf0, 226 | ]; 227 | #[cfg(not(target_pointer_width = "64"))] 228 | pub(crate) const RNG_SEED: [u8; 16] = [ 229 | 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 230 | 0xf0, 231 | ]; 232 | 233 | #[derive(Debug)] 234 | pub(crate) struct DropCounter { 235 | drop_count: Cell, 236 | } 237 | 238 | impl DropCounter { 239 | pub(crate) fn new() -> Self { 240 | DropCounter { 241 | drop_count: Cell::new(0), 242 | } 243 | } 244 | 245 | pub(crate) fn new_droppable(&self, value: T) -> Droppable<'_, T> { 246 | Droppable { 247 | counter: self, 248 | value, 249 | } 250 | } 251 | 252 | pub(crate) fn dropped(&self) -> usize { 253 | self.drop_count.get() 254 | } 255 | } 256 | 257 | #[derive(Debug)] 258 | pub(crate) struct Droppable<'a, T = ()> { 259 | counter: &'a DropCounter, 260 | pub value: T, 261 | } 262 | 263 | impl<'a, T> Drop for Droppable<'a, T> { 264 | fn drop(&mut self) { 265 | let new_drop_count = self.counter.drop_count.get() + 1; 266 | self.counter.drop_count.set(new_drop_count); 267 | } 268 | } 269 | } 270 | -------------------------------------------------------------------------------- /src/object.rs: -------------------------------------------------------------------------------- 1 | //! Statically-sized containers for dynamically-sized types. 2 | //! 3 | //! This module uses unstable features and should be considered experimental. 4 | 5 | use core::alloc::{Layout, LayoutError}; 6 | use core::fmt::{self, Debug, Formatter}; 7 | use core::marker::Unsize; 8 | use core::mem::{align_of, size_of, ManuallyDrop, MaybeUninit}; 9 | use core::ops::{Deref, DerefMut}; 10 | use core::ptr::NonNull; 11 | 12 | use crate::storage::{LayoutSpec, Storage}; 13 | 14 | /// The [`LayoutSpec`] for [`Object`] storage. 15 | pub struct ObjectLayout(()); 16 | impl LayoutSpec for ObjectLayout { 17 | fn layout_with_capacity(bytes: usize) -> Result { 18 | Layout::from_size_align(bytes, 8) 19 | } 20 | } 21 | 22 | /// Unit struct that can be used to modify the alignment of [`InlineStorage`]. 23 | #[repr(align(8))] 24 | pub struct Align8; 25 | 26 | /// Unit struct that can be used to modify the alignment of [`InlineStorage`]. 27 | #[repr(align(16))] 28 | pub struct Align16; 29 | 30 | /// Unit struct that can be used to modify the alignment of [`InlineStorage`]. 31 | #[repr(align(32))] 32 | pub struct Align32; 33 | 34 | /// Unit struct that can be used to modify the alignment of [`InlineStorage`]. 35 | #[repr(align(64))] 36 | pub struct Align64; 37 | 38 | /// Unit struct that can be used to modify the alignment of [`InlineStorage`]. 39 | #[repr(align(128))] 40 | pub struct Align128; 41 | 42 | /// An `N`-wide, partially initialized byte array with the alignment of `A`. 43 | /// 44 | /// # Examples 45 | /// Note that its size in memory is always a multiple of its alignment, so it 46 | /// is recommended to chose `N` accordingly: 47 | /// ``` 48 | /// use core::mem::{align_of, size_of}; 49 | /// use coca::object::{InlineStorage, Align16}; 50 | /// assert_eq!(align_of::>(), 16); 51 | /// 52 | /// assert_eq!(size_of::>(), 16); 53 | /// assert_eq!(size_of::>(), 16); 54 | /// assert_eq!(size_of::>(), 32); 55 | /// ``` 56 | #[repr(C)] 57 | pub union InlineStorage { 58 | force_alignment: ManuallyDrop, 59 | data: [MaybeUninit; N], 60 | } 61 | 62 | unsafe impl Storage for InlineStorage { 63 | fn capacity(&self) -> usize { 64 | N 65 | } 66 | fn get_ptr(&self) -> *const u8 { 67 | unsafe { self.data.as_ptr().cast() } 68 | } 69 | fn get_mut_ptr(&mut self) -> *mut u8 { 70 | unsafe { self.data.as_mut_ptr().cast() } 71 | } 72 | } 73 | 74 | /// A statically-sized container for dynamically-sized types. 75 | /// 76 | /// This is primarily intended for use with `dyn Trait` (see 77 | /// [`InlineObject::new`](Object::new) for example usage). 78 | /// 79 | /// [`Vec`](crate::collections::vec) should be preferred for dynamically sized arrays. 80 | /// 81 | /// While it is *possible* to store a [`Sized`] type in an `Object`, there are 82 | /// no benefits to doing so, and it adds at least one `usize` of storage overhead. 83 | pub struct Object> { 84 | meta: NonNull, 85 | buf: S, 86 | } 87 | 88 | /// An object that stores dynamically-sized values in an inline array. 89 | /// 90 | /// Note that this can only hold values with alignment less than or equal to 8. 91 | /// If you need to store values with higher alignment requirements, use 92 | /// [`InlineStorage`] explicitly; the required alignment can be specified with 93 | /// the first generic type parameter (using [`Align16`], [`Align32`], etc.). 94 | pub type InlineObject = Object>; 95 | 96 | impl> Deref for Object { 97 | type Target = T; 98 | fn deref(&self) -> &Self::Target { 99 | let dangling = self.meta.as_ptr() as *const T; 100 | let fat_ptr = dangling.set_ptr_value(self.buf.get_ptr()); 101 | unsafe { fat_ptr.as_ref().unwrap() } 102 | } 103 | } 104 | 105 | impl> DerefMut for Object { 106 | fn deref_mut(&mut self) -> &mut Self::Target { 107 | let dangling = self.meta.as_ptr() as *mut T; 108 | let fat_ptr = dangling.set_ptr_value(self.buf.get_mut_ptr()); 109 | unsafe { fat_ptr.as_mut().unwrap() } 110 | } 111 | } 112 | 113 | impl> Debug for Object { 114 | fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { 115 | self.deref().fmt(f) 116 | } 117 | } 118 | 119 | impl> Drop for Object { 120 | fn drop(&mut self) { 121 | unsafe { core::ptr::addr_of_mut!(**self).drop_in_place() }; 122 | } 123 | } 124 | 125 | impl Object> { 126 | #[inline] 127 | fn check_layout>() { 128 | #[inline(never)] 129 | #[cold] 130 | fn value_too_large(cap: usize, actual: usize) { 131 | panic!( 132 | "the object can only hold {} bytes, but the supplied value is {} bytes", 133 | cap, actual 134 | ); 135 | } 136 | 137 | #[inline(never)] 138 | #[cold] 139 | fn value_too_strictly_aligned(guaranteed: usize, required: usize) { 140 | panic!("the object can only guarantee an alignment of {}, but the supplied value requires {}", guaranteed, required); 141 | } 142 | 143 | if size_of::() > N { 144 | value_too_large(N, size_of::()); 145 | } 146 | if align_of::() > align_of::() { 147 | value_too_strictly_aligned(align_of::(), align_of::()); 148 | } 149 | } 150 | 151 | /// Constructs a new inline object containing the provided value. 152 | /// 153 | /// # Panics 154 | /// Panics if `size_of::()` is greater than the inline object's capacity, 155 | /// or if `align_of::()` is greater than `align_of::()`. 156 | /// 157 | /// # Examples 158 | /// ``` 159 | /// use coca::InlineObject; 160 | /// 161 | /// trait Greeting { fn greet(&self) -> &'static str; } 162 | /// 163 | /// struct Sailors(u32); 164 | /// impl Greeting for Sailors { 165 | /// fn greet(&self) -> &'static str { 166 | /// if self.0 == 1 { 167 | /// "Hello, Sailor!" 168 | /// } else { 169 | /// "Hello, Crew!" 170 | /// } 171 | /// } 172 | /// } 173 | /// 174 | /// struct World; 175 | /// impl Greeting for World { 176 | /// fn greet(&self) -> &'static str { 177 | /// "Hello, World!" 178 | /// } 179 | /// } 180 | /// 181 | /// let mut obj: InlineObject = InlineObject::new(Sailors(1)); 182 | /// assert_eq!(obj.greet(), "Hello, Sailor!"); 183 | /// 184 | /// obj = InlineObject::new(World); 185 | /// assert_eq!(obj.greet(), "Hello, World!"); 186 | /// ``` 187 | pub fn new>(val: U) -> Self { 188 | Self::check_layout::(); 189 | let mut result = Object { 190 | meta: NonNull::::dangling() as NonNull, 191 | buf: InlineStorage { 192 | data: [MaybeUninit::uninit(); N], 193 | }, 194 | }; 195 | 196 | let ptr = result.buf.get_mut_ptr().cast::(); 197 | unsafe { ptr.write(val) }; 198 | 199 | result 200 | } 201 | 202 | /// Drops the value currently held by the object and stores the provided value. 203 | /// 204 | /// # Panics 205 | /// Panics if `size_of::()` is greater than the inline object's capacity, 206 | /// or if `align_of::()` is greater than `align_of::()`. 207 | /// 208 | /// # Examples 209 | /// ``` 210 | /// use coca::InlineObject; 211 | /// let mut obj: InlineObject<[i32], 16> = InlineObject::new([1, 2, 3]); 212 | /// assert_eq!(*obj, [1, 2, 3]); 213 | /// 214 | /// obj.set([4, 5, 6, 7]); 215 | /// assert_eq!(*obj, [4, 5, 6, 7]); 216 | /// ``` 217 | pub fn set>(&mut self, val: U) { 218 | Self::check_layout::(); 219 | unsafe { core::ptr::addr_of_mut!(**self).drop_in_place() }; 220 | 221 | self.meta = NonNull::::dangling() as NonNull; 222 | let ptr = self.buf.get_mut_ptr().cast::(); 223 | unsafe { ptr.write(val) }; 224 | } 225 | } 226 | 227 | #[cfg(test)] 228 | mod tests { 229 | use super::*; 230 | use core::mem::align_of; 231 | 232 | #[test] 233 | fn inline_storage_layout() { 234 | fn test_layout(align: usize) { 235 | assert_eq!(align_of::(), align); 236 | assert_eq!(align_of::>(), align); 237 | 238 | let expected_size = (B + (align - 1)) & !(align - 1); 239 | assert_eq!(size_of::>(), expected_size); 240 | } 241 | 242 | test_layout::(8); 243 | test_layout::(8); 244 | test_layout::(8); 245 | 246 | test_layout::(16); 247 | test_layout::(16); 248 | 249 | test_layout::(32); 250 | test_layout::(32); 251 | 252 | test_layout::(64); 253 | test_layout::(64); 254 | 255 | test_layout::(128); 256 | test_layout::(128); 257 | } 258 | 259 | #[test] 260 | fn drops_correctly() { 261 | use crate::test_utils::*; 262 | 263 | let drop_count = DropCounter::new(); 264 | { 265 | let mut obj: InlineObject = 266 | InlineObject::new(drop_count.new_droppable(())); 267 | obj.set(drop_count.new_droppable(())); 268 | assert_eq!(drop_count.dropped(), 1); 269 | } 270 | assert_eq!(drop_count.dropped(), 2); 271 | } 272 | } 273 | -------------------------------------------------------------------------------- /src/storage.rs: -------------------------------------------------------------------------------- 1 | //! Traits abstracting over storage strategies and index types. 2 | 3 | use core::alloc::{Layout, LayoutError}; 4 | use core::convert::{TryFrom, TryInto}; 5 | use core::fmt::Debug; 6 | use core::hash::Hash; 7 | use core::marker::PhantomData; 8 | use core::mem::MaybeUninit; 9 | use core::ops::{Range, RangeBounds}; 10 | use core::ptr::NonNull; 11 | 12 | /// Types that can be used for indexing into array-like data structures. 13 | /// 14 | /// # Safety 15 | /// Implementors must ensure that `Capacity::from_usize(i).as_usize() == i` for 16 | /// all values `i <= MAX_REPRESENTABLE`. Otherwise, this expression must panic 17 | /// or evaluate to `false`. 18 | /// 19 | /// Using [`index_type!`] should be preferred over implementing this manually. 20 | pub unsafe trait Capacity: Copy + Debug + Eq + Hash + Ord { 21 | /// The maximum `usize` value that can safely be represented by this type. 22 | const MAX_REPRESENTABLE: usize; 23 | /// Convert a `usize` into `Self`. 24 | fn from_usize(i: usize) -> Self; 25 | /// Convert `self` into `usize`. 26 | fn as_usize(&self) -> usize; 27 | } 28 | 29 | #[cold] 30 | #[inline(never)] 31 | #[track_caller] 32 | pub(crate) fn buffer_too_large_for_index_type() { 33 | panic!( 34 | "provided storage block cannot be fully indexed by type {}", 35 | core::any::type_name::() 36 | ); 37 | } 38 | 39 | pub(crate) fn normalize_range>( 40 | range: R, 41 | max_end: usize, 42 | ) -> Range { 43 | use core::ops::Bound; 44 | let start = match range.start_bound() { 45 | Bound::Included(x) => x.as_usize(), 46 | Bound::Excluded(x) => x.as_usize().saturating_add(1), 47 | Bound::Unbounded => 0, 48 | }; 49 | let end = match range.end_bound() { 50 | Bound::Included(x) => x.as_usize().saturating_add(1), 51 | Bound::Excluded(x) => x.as_usize(), 52 | Bound::Unbounded => max_end, 53 | }; 54 | 55 | assert!( 56 | end <= max_end, 57 | "invalid range specifier: end (is {:?}) is greater than {:?}", 58 | end, 59 | max_end 60 | ); 61 | assert!( 62 | start <= end, 63 | "invalid range specifier: start (is {:?}) is greater than end (is {:?})", 64 | start, 65 | end 66 | ); 67 | 68 | Range { start, end } 69 | } 70 | 71 | #[allow(clippy::cast_possible_truncation)] 72 | unsafe impl Capacity for u8 { 73 | const MAX_REPRESENTABLE: usize = 0xFF; 74 | #[inline] 75 | fn from_usize(i: usize) -> Self { 76 | debug_assert!(>::try_into(i).is_ok()); 77 | i as u8 78 | } 79 | 80 | #[inline] 81 | fn as_usize(&self) -> usize { 82 | *self as usize 83 | } 84 | } 85 | 86 | #[allow(clippy::cast_possible_truncation)] 87 | unsafe impl Capacity for u16 { 88 | const MAX_REPRESENTABLE: usize = 0xFFFF; 89 | #[inline] 90 | fn from_usize(i: usize) -> Self { 91 | debug_assert!(>::try_into(i).is_ok()); 92 | i as u16 93 | } 94 | 95 | #[inline] 96 | fn as_usize(&self) -> usize { 97 | *self as usize 98 | } 99 | } 100 | 101 | #[allow(clippy::cast_possible_truncation)] 102 | unsafe impl Capacity for u32 { 103 | const MAX_REPRESENTABLE: usize = 0xFFFF_FFFF; 104 | #[inline] 105 | fn from_usize(i: usize) -> Self { 106 | debug_assert!(>::try_into(i).is_ok()); 107 | i as u32 108 | } 109 | 110 | #[inline] 111 | fn as_usize(&self) -> usize { 112 | debug_assert!(>::try_from(*self).is_ok()); 113 | *self as usize 114 | } 115 | } 116 | 117 | #[allow(clippy::cast_possible_truncation)] 118 | unsafe impl Capacity for u64 { 119 | const MAX_REPRESENTABLE: usize = usize::max_value(); 120 | #[inline] 121 | fn from_usize(i: usize) -> Self { 122 | debug_assert!(>::try_into(i).is_ok()); 123 | i as u64 124 | } 125 | 126 | #[inline] 127 | fn as_usize(&self) -> usize { 128 | debug_assert!(>::try_from(*self).is_ok()); 129 | *self as usize 130 | } 131 | } 132 | 133 | unsafe impl Capacity for usize { 134 | const MAX_REPRESENTABLE: usize = usize::max_value(); 135 | #[inline] 136 | fn from_usize(i: usize) -> Self { 137 | i 138 | } 139 | 140 | #[inline] 141 | fn as_usize(&self) -> usize { 142 | *self 143 | } 144 | } 145 | 146 | /// Generates one or more new types wrapping an implementor of [`Capacity`]. 147 | /// 148 | /// This can help in avoiding use of the wrong index with a [`Vec`](crate::collections::vec::Vec). 149 | /// 150 | /// # Examples 151 | /// ```compile_fail 152 | /// use coca::{index_type, vec::Vec}; 153 | /// use core::mem::MaybeUninit; 154 | /// 155 | /// index_type! { 156 | /// pub IndexA: u8; 157 | /// IndexB: u8; 158 | /// }; 159 | /// 160 | /// let mut backing_a = [MaybeUninit::::uninit(); 20]; 161 | /// let mut backing_b = [MaybeUninit::::uninit(); 30]; 162 | /// 163 | /// let mut vec_a = Vec::<_, _, IndexA>::from(&mut backing_a[..]); 164 | /// for i in 0..20 { vec_a.push(i); } 165 | /// 166 | /// let mut vec_b = Vec::<_, _, IndexB>::from(&mut backing_b[..]); 167 | /// for i in 0..30 { vec_b.push(i * 2); } 168 | /// 169 | /// let a = vec_a[IndexA(10)]; 170 | /// let b = vec_b[IndexB(15)]; 171 | /// let c = vec_a[IndexB(25)]; 172 | /// // ^^^^^^^^^^^^^^^^^ `coca::Vec<...>` cannot be indexed by `IndexB` 173 | /// ``` 174 | #[macro_export] 175 | macro_rules! index_type { 176 | ( $(#[$attrs:meta])* $v:vis $name:ident: $repr:ty ) => { 177 | $(#[$attrs])* 178 | #[derive( 179 | core::marker::Copy, 180 | core::clone::Clone, 181 | core::default::Default, 182 | core::fmt::Debug, 183 | core::hash::Hash, 184 | core::cmp::PartialEq, 185 | core::cmp::Eq, 186 | core::cmp::PartialOrd, 187 | core::cmp::Ord)] 188 | $v struct $name($repr); 189 | 190 | unsafe impl $crate::storage::Capacity for $name { 191 | const MAX_REPRESENTABLE: usize = <$repr as $crate::storage::Capacity>::MAX_REPRESENTABLE; 192 | #[inline] 193 | #[track_caller] 194 | fn from_usize(i: usize) -> Self { 195 | Self(<$repr as $crate::storage::Capacity>::from_usize(i)) 196 | } 197 | 198 | #[inline] 199 | #[track_caller] 200 | fn as_usize(&self) -> usize { 201 | <$repr as $crate::storage::Capacity>::as_usize(&self.0) 202 | } 203 | } 204 | }; 205 | 206 | ( $(#[$attrs:meta])* $v:vis $name:ident: $repr:ty ; $($rest:tt)* ) => { 207 | $crate::index_type!($(#[$attrs])* $v $name: $repr); 208 | $crate::index_type!($($rest)*); 209 | }; 210 | 211 | () => {} 212 | } 213 | 214 | /// Types that specify a data structure's storage layout requirements. 215 | pub trait LayoutSpec { 216 | /// Constructs a [`Layout`] for a memory block capable of holding the 217 | /// specified number of items. 218 | fn layout_with_capacity(items: usize) -> Result; 219 | } 220 | 221 | /// Inconstructible type to mark data structures that require an array-like storage layout. 222 | pub struct ArrayLayout(PhantomData); 223 | impl LayoutSpec for ArrayLayout { 224 | fn layout_with_capacity(items: usize) -> Result { 225 | Layout::array::(items) 226 | } 227 | } 228 | 229 | /// An interface to a contiguous memory block for use by data structures. 230 | #[allow(clippy::missing_safety_doc)] // individual methods _do_ have safety docs! 231 | pub unsafe trait Storage: Sized { 232 | /// Extracts a pointer to the beginning of the memory block. 233 | /// 234 | /// # Safety 235 | /// Implementors must ensure the same pointer is returned every time this 236 | /// method is called throughout the block's lifetime. 237 | fn get_ptr(&self) -> *const u8; 238 | /// Extracts a mutable pointer to the beginning of the memory block. 239 | /// 240 | /// # Safety 241 | /// Implementors must ensure the same pointer is returned every time this 242 | /// method is called throughout the block's lifetime. 243 | fn get_mut_ptr(&mut self) -> *mut u8; 244 | /// Returns the maximum number of items the memory block can hold. 245 | /// 246 | /// # Safety 247 | /// What exactly constitutes an item depends on the argument type `R`. 248 | /// When called on a memory block with a layout matching 249 | /// `R::layout_with_capacity(n)`, this must return at most `n`. 250 | /// 251 | /// Implementors must ensure the same value is returned every time this 252 | /// method is called throughout the block's lifetime. 253 | fn capacity(&self) -> usize; 254 | } 255 | 256 | #[inline(always)] 257 | pub(crate) fn ptr_at_index>>(storage: &S, index: usize) -> *const T { 258 | debug_assert!(index <= storage.capacity()); 259 | let ptr = storage.get_ptr().cast::(); 260 | ptr.wrapping_add(index) 261 | } 262 | 263 | #[inline(always)] 264 | pub(crate) fn mut_ptr_at_index>>( 265 | storage: &mut S, 266 | index: usize, 267 | ) -> *mut T { 268 | debug_assert!(index <= storage.capacity()); 269 | let ptr = storage.get_mut_ptr().cast::(); 270 | ptr.wrapping_add(index) 271 | } 272 | 273 | /// Shorthand for `&'a mut [MaybeUninit]` for use with generic data structures. 274 | pub type SliceStorage<'a, T> = &'a mut [MaybeUninit]; 275 | unsafe impl Storage> for &mut [MaybeUninit] { 276 | #[inline] 277 | fn get_ptr(&self) -> *const u8 { 278 | self.as_ptr().cast() 279 | } 280 | #[inline] 281 | fn get_mut_ptr(&mut self) -> *mut u8 { 282 | self.as_mut_ptr().cast() 283 | } 284 | #[inline] 285 | fn capacity(&self) -> usize { 286 | self.len() 287 | } 288 | } 289 | 290 | /// A fat pointer to an arena-allocated storage block conforming to a [`LayoutSpec`]. 291 | pub struct ArenaStorage<'src, R: LayoutSpec> { 292 | ptr: NonNull, 293 | cap: usize, 294 | spec: PhantomData, 295 | src: PhantomData<&'src ()>, 296 | } 297 | 298 | impl ArenaStorage<'_, R> { 299 | pub(crate) unsafe fn from_raw_parts(ptr: *mut u8, cap: usize) -> Option { 300 | let ptr = NonNull::new(ptr)?; 301 | Some(ArenaStorage { 302 | ptr, 303 | cap, 304 | spec: PhantomData, 305 | src: PhantomData, 306 | }) 307 | } 308 | } 309 | 310 | unsafe impl Storage for ArenaStorage<'_, R> { 311 | fn get_ptr(&self) -> *const u8 { 312 | self.ptr.as_ptr() as _ 313 | } 314 | 315 | fn get_mut_ptr(&mut self) -> *mut u8 { 316 | self.ptr.as_ptr() 317 | } 318 | 319 | fn capacity(&self) -> usize { 320 | self.cap 321 | } 322 | } 323 | 324 | unsafe impl> Storage for crate::arena::Box<'_, S> { 325 | #[inline] 326 | fn get_ptr(&self) -> *const u8 { 327 | (**self).get_ptr() 328 | } 329 | #[inline] 330 | fn get_mut_ptr(&mut self) -> *mut u8 { 331 | (**self).get_mut_ptr() 332 | } 333 | #[inline] 334 | fn capacity(&self) -> usize { 335 | (**self).capacity() 336 | } 337 | } 338 | 339 | unsafe impl> Storage for &mut S { 340 | fn get_ptr(&self) -> *const u8 { 341 | (**self).get_ptr() 342 | } 343 | fn get_mut_ptr(&mut self) -> *mut u8 { 344 | (**self).get_mut_ptr() 345 | } 346 | fn capacity(&self) -> usize { 347 | (**self).capacity() 348 | } 349 | } 350 | 351 | /// A fat pointer to a heap-allocated storage block conforming to a [`LayoutSpec`]. 352 | #[cfg(feature = "alloc")] 353 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 354 | pub struct AllocStorage { 355 | ptr: NonNull, 356 | cap: usize, 357 | spec: PhantomData, 358 | } 359 | 360 | #[cfg(feature = "alloc")] 361 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 362 | impl AllocStorage { 363 | /// Allocates a new storage block with the specified capacity with the 364 | /// global allocator. 365 | /// 366 | /// # Panics 367 | /// Panics if `capacity` is large enough to cause a layout error, or if 368 | /// allocation fails. 369 | pub fn with_capacity(capacity: usize) -> Self { 370 | let layout = 371 | R::layout_with_capacity(capacity).expect("layout error in AllocStorage::with_capacity"); 372 | let ptr = unsafe { alloc::alloc::alloc(layout) }; 373 | let ptr = NonNull::new(ptr).expect("allocation failure in AllocStorage::with_capacity"); 374 | AllocStorage { 375 | ptr, 376 | cap: capacity, 377 | spec: PhantomData, 378 | } 379 | } 380 | } 381 | 382 | #[cfg(feature = "alloc")] 383 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 384 | impl Drop for AllocStorage { 385 | fn drop(&mut self) { 386 | let layout = R::layout_with_capacity(self.cap).expect("dropped an invalid AllocStorage"); 387 | unsafe { alloc::alloc::dealloc(self.ptr.as_ptr(), layout) }; 388 | } 389 | } 390 | 391 | #[cfg(feature = "alloc")] 392 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 393 | unsafe impl Storage for AllocStorage { 394 | fn get_ptr(&self) -> *const u8 { 395 | self.ptr.as_ptr() as _ 396 | } 397 | fn get_mut_ptr(&mut self) -> *mut u8 { 398 | self.ptr.as_ptr() 399 | } 400 | fn capacity(&self) -> usize { 401 | self.cap 402 | } 403 | } 404 | 405 | #[cfg(feature = "alloc")] 406 | #[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))] 407 | unsafe impl> Storage for alloc::boxed::Box { 408 | fn get_ptr(&self) -> *const u8 { 409 | (**self).get_ptr() 410 | } 411 | fn get_mut_ptr(&mut self) -> *mut u8 { 412 | (**self).get_mut_ptr() 413 | } 414 | fn capacity(&self) -> usize { 415 | (**self).capacity() 416 | } 417 | } 418 | 419 | /// Shorthand for `[MaybeUninit; C]` for use with generic data structures. 420 | pub type InlineStorage = [MaybeUninit; C]; 421 | 422 | unsafe impl Storage> for InlineStorage { 423 | fn get_ptr(&self) -> *const u8 { 424 | self.as_ptr().cast() 425 | } 426 | fn get_mut_ptr(&mut self) -> *mut u8 { 427 | self.as_mut_ptr().cast() 428 | } 429 | fn capacity(&self) -> usize { 430 | C 431 | } 432 | } 433 | --------------------------------------------------------------------------------