├── .github └── workflows │ └── ci.yml ├── .gitignore ├── .vscode └── settings.json ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE ├── README.md ├── benches ├── Cargo.toml ├── README.md └── benches.rs └── src ├── lib.rs └── test.rs /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | push: 4 | branches: 5 | - master 6 | pull_request: 7 | branches: 8 | - master 9 | 10 | env: 11 | RUST_BACKTRACE: short 12 | CARGO_INCREMENTAL: 0 13 | CARGO_NET_RETRY: 10 14 | RUSTUP_MAX_RETRIES: 10 15 | RUSTFLAGS: -Dwarnings 16 | RUSTDOCFLAGS: -Dwarnings 17 | 18 | jobs: 19 | # Test on a few targets, stable, beta 20 | test: 21 | name: Tests - ${{ matrix.name }} 22 | runs-on: ${{ matrix.os }} 23 | strategy: 24 | fail-fast: false 25 | matrix: 26 | name: 27 | - linux 28 | - macos 29 | - win64 30 | - win32 31 | - beta 32 | - nightly 33 | - msrv 34 | include: 35 | - name: linux 36 | os: ubuntu-latest 37 | toolchain: stable 38 | - name: macos 39 | os: macos-latest 40 | toolchain: stable 41 | - name: win64 42 | os: windows-latest 43 | toolchain: stable 44 | - name: win32 45 | os: windows-latest 46 | toolchain: stable-i686-msvc 47 | - name: beta 48 | os: ubuntu-latest 49 | toolchain: beta 50 | - name: nightly 51 | os: ubuntu-latest 52 | toolchain: nightly 53 | - name: msrv 54 | os: ubuntu-latest 55 | toolchain: '1.42.0' 56 | steps: 57 | - uses: actions/checkout@v3 58 | - uses: dtolnay/rust-toolchain@master 59 | with: 60 | toolchain: ${{ matrix.toolchain }} 61 | - run: cargo check --verbose 62 | - run: cargo test --verbose 63 | - run: cargo build --no-default-features 64 | 65 | # Check formatting 66 | rustfmt: 67 | name: Check formatting 68 | runs-on: ubuntu-latest 69 | steps: 70 | - uses: actions/checkout@v3 71 | - uses: dtolnay/rust-toolchain@stable 72 | with: 73 | components: rustfmt 74 | - run: cargo fmt --all -- --check 75 | - run: cargo fmt --all -- --check 76 | working-directory: ./benches 77 | 78 | # linkcheck docs (we have `-Dwarnings` in RUSTFLAGS and RUSTDOCFLAGS above) 79 | doc: 80 | name: Check documentation 81 | runs-on: ubuntu-latest 82 | steps: 83 | - uses: actions/checkout@v3 84 | - uses: dtolnay/rust-toolchain@stable 85 | - run: cargo doc --all-features 86 | 87 | miri: 88 | name: Miri 89 | runs-on: ubuntu-latest 90 | steps: 91 | - uses: actions/checkout@v3 92 | - uses: dtolnay/rust-toolchain@nightly 93 | with: 94 | components: miri, rust-src 95 | - run: cargo miri test --all-features 96 | 97 | benches: 98 | name: Benchmarks 99 | runs-on: ubuntu-latest 100 | steps: 101 | - uses: actions/checkout@v3 102 | - uses: dtolnay/rust-toolchain@stable 103 | # Use a cache for this both because the builds are slow, and because it 104 | # allows criterion to take a (very low quality) guess about perf changes. 105 | - name: Cache 106 | uses: actions/cache@v3 107 | with: 108 | path: | 109 | benches/target 110 | ~/.cargo/registry 111 | ~/.cargo/git 112 | key: ${{ runner.os }}-benches-${{ hashFiles('**/Cargo.toml') }} 113 | restore-keys: | 114 | ${{ runner.os }}-benches-${{ hashFiles('**/Cargo.toml') }} 115 | ${{ runner.os }}-benches- 116 | ${{ runner.os }}- 117 | - name: Build benchmarks 118 | run: cargo bench --no-run 119 | working-directory: ./benches 120 | - name: Run benchmarks 121 | run: cargo bench 122 | working-directory: ./benches 123 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | Cargo.lock 3 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | // Benchmarks are in a separate workspace for reasons documented in 3 | // `benchmarks/README.md`, so `rust-analyzer` needs some help finding it. 4 | "rust-analyzer.linkedProjects": ["Cargo.toml", "benchmarks/Cargo.toml"] 5 | } -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## Unreleased 2 | 3 | Released YYYY/MM/DD. 4 | 5 | ### Added 6 | 7 | * TODO (or remove section if none) 8 | 9 | ### Changed 10 | 11 | * TODO (or remove section if none) 12 | 13 | ### Deprecated 14 | 15 | * TODO (or remove section if none) 16 | 17 | ### Removed 18 | 19 | * TODO (or remove section if none) 20 | 21 | ### Fixed 22 | 23 | * TODO (or remove section if none) 24 | 25 | ### Security 26 | 27 | * TODO (or remove section if none) 28 | 29 | -------------------------------------------------------------------------------- 30 | 31 | ## 2.0.2 32 | 33 | Released 2023/01/08. 34 | 35 | ### Fixed 36 | 37 | * Various fixes for use under Miri (soundness and provenance fixes). 38 | 39 | ## 2.0.1 40 | 41 | Released 2019/01/10. 42 | 43 | ### Fixed 44 | 45 | * Support `#![no_std]` on stable Rust. 46 | 47 | -------------------------------------------------------------------------------- 48 | 49 | ## 2.0.0 50 | 51 | Released 2019/12/03. 52 | 53 | ### Fixed 54 | 55 | * Fixed some intra-documentation URLs. 56 | 57 | -------------------------------------------------------------------------------- 58 | 59 | ## 2.0.0-rc1 60 | 61 | Released 2019/11/26. 62 | 63 | Unless any issues are discovered or raised, we will release version 2.0.0 soon. 64 | 65 | ### Added 66 | 67 | * Added `alloc_str` to `Arena`, to be able to allocate string slices. 68 | 69 | ### Changed 70 | 71 | * The minimum supported Rust version is now 1.36.0. 72 | * `alloc_uninitialized` returns `&mut [MaybeUninit]` instead of `*mut [T]`, 73 | which is less prone to undefined behavior. 74 | 75 | -------------------------------------------------------------------------------- 76 | 77 | ## 1.7.0 78 | 79 | Released 2019/10/31. *Spooky!* 80 | 81 | ### Added 82 | 83 | * Added a `len` method to count how many items are in an arena. 84 | 85 | ### Fixed 86 | 87 | * Fixed some theoretical overflows. 88 | 89 | -------------------------------------------------------------------------------- 90 | 91 | ## 1.6.1 92 | 93 | Released 2019/09/17. 94 | 95 | ### Fixed 96 | 97 | * Now compiles on old stable Rust versions again, instead of just new stable 98 | Rust versions. From here on out, we'll promise that 1.X will continue to 99 | compile on rustc versions >= 1.32. 100 | 101 | -------------------------------------------------------------------------------- 102 | 103 | ## 1.6.0 104 | 105 | Released 2019/09/09. 106 | 107 | ### Added 108 | 109 | * Added the `Arena::iter_mut` method for mutably iterating over an arena's 110 | contents. [See #29 for 111 | details.](https://github.com/thomcc/rust-typed-arena/pull/29) 112 | 113 | -------------------------------------------------------------------------------- 114 | 115 | ## 1.5.0 116 | 117 | Released 2019/08/02. 118 | 119 | ### Added 120 | 121 | * `Arena` now implements `Default`. 122 | 123 | ### Fixed 124 | 125 | * Introduced an internal fast path for allocation, improving performance. 126 | * Tests now run cleanly on Miri. There was previously a technicality where 127 | the stacked borrow rules were not being followed. 128 | 129 | -------------------------------------------------------------------------------- 130 | 131 | ## 1.4.1 132 | 133 | Released 2018/06/29. 134 | 135 | ### Added 136 | 137 | * Added more documentation comments and examples. 138 | 139 | -------------------------------------------------------------------------------- 140 | 141 | ## 1.4.0 142 | 143 | Released 2018/06/21. 144 | 145 | ### Added 146 | 147 | * Added a new, on-by-default feature named "std". Disabling this feature allows 148 | the crate to be used in `#![no_std]` environments. [#12][] [#15][] 149 | 150 | [#12]: https://github.com/thomcc/rust-typed-arena/pull/12 151 | [#15]: https://github.com/thomcc/rust-typed-arena/pull/15 152 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "typed-arena" 3 | version = "2.0.2" 4 | authors = ["The typed-arena developers"] 5 | license = "MIT" 6 | description = "The arena, a fast but limited type of allocator." 7 | documentation = "https://docs.rs/typed-arena" 8 | repository = "https://github.com/thomcc/rust-typed-arena" 9 | categories = ["memory-management", "no-std"] 10 | keywords = ["arena"] 11 | readme = "./README.md" 12 | exclude = ["benchmarks"] 13 | autobenches = false 14 | 15 | [lib] 16 | name = "typed_arena" 17 | path = "src/lib.rs" 18 | 19 | [features] 20 | default = ["std"] 21 | std = [] 22 | 23 | [workspace] 24 | exclude = ["benches"] 25 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 The typed-arena developers 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `typed-arena` 2 | 3 | [![](https://docs.rs/typed-arena/badge.svg)](https://docs.rs/typed-arena) 4 | [![](https://img.shields.io/crates/v/typed-arena.svg)](https://crates.io/crates/typed-arena) 5 | [![](https://img.shields.io/crates/d/typed-arena.svg)](https://crates.io/crates/typed-arena) 6 | [![GitHub Actions Build Status](https://github.com/thomcc/rust-typed-arena/workflows/CI/badge.svg)](https://github.com/thomcc/rust-typed-arena/actions) 7 | 8 | **A fast (but limited) allocation arena for values of a single type.** 9 | 10 | Allocated objects are destroyed all at once, when the arena itself is destroyed. 11 | There is no deallocation of individual objects while the arena itself is still 12 | alive. The flipside is that allocation is fast: typically just a vector push. 13 | 14 | There is also a method `into_vec()` to recover ownership of allocated objects 15 | when the arena is no longer required, instead of destroying everything. 16 | 17 | ## Example 18 | 19 | ```rust 20 | use typed_arena::Arena; 21 | 22 | struct Monster { 23 | level: u32, 24 | } 25 | 26 | let monsters = Arena::new(); 27 | 28 | let goku = monsters.alloc(Monster { level: 9001 }); 29 | assert!(goku.level > 9000); 30 | ``` 31 | 32 | ## Safe Cycles 33 | 34 | All allocated objects get the same lifetime, so you can safely create cycles 35 | between them. This can be useful for certain data structures, such as graphs and 36 | trees with parent pointers. 37 | 38 | ```rust 39 | use std::cell::Cell; 40 | use typed_arena::Arena; 41 | 42 | struct CycleParticipant<'a> { 43 | other: Cell>>, 44 | } 45 | 46 | let arena = Arena::new(); 47 | 48 | let a = arena.alloc(CycleParticipant { other: Cell::new(None) }); 49 | let b = arena.alloc(CycleParticipant { other: Cell::new(None) }); 50 | 51 | a.other.set(Some(b)); 52 | b.other.set(Some(a)); 53 | ``` 54 | 55 | ## Alternatives 56 | 57 | ### Need to allocate many different types of values? 58 | 59 | Use multiple arenas if you have only a couple different types or try 60 | [`bumpalo`](https://crates.io/crates/bumpalo), which is a bump-allocation arena 61 | that can allocate heterogenous types of values. 62 | 63 | ### Want allocation to return identifiers instead of references and dealing with references and lifetimes everywhere? 64 | 65 | Check out [`id-arena`](https://crates.io/crates/id-arena) or 66 | [`generational-arena`](https://crates.io/crates/generational-arena). 67 | 68 | ### Need to deallocate individual objects at a time? 69 | 70 | Check out [`generational-arena`](https://crates.io/crates/generational-arena) 71 | for an arena-style crate or look for a more traditional allocator. 72 | -------------------------------------------------------------------------------- /benches/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "typed-arena-benchmarks" 3 | version = "0.1.0" 4 | edition = "2021" 5 | publish = false 6 | 7 | [dependencies] 8 | criterion = "0.4.0" 9 | typed-arena = { path = ".." } 10 | 11 | [[bench]] 12 | name = "benches" 13 | path = "benches.rs" 14 | harness = false 15 | -------------------------------------------------------------------------------- /benches/README.md: -------------------------------------------------------------------------------- 1 | # `typed-arena-benchmarks` 2 | 3 | ## Why is this a separate workspace? 4 | 5 | This is in a separate workspace to avoid issues with having criterion as a 6 | dev-dependency. Specifically: 7 | 8 | 1. Criterion and its transitive dependencies have a much higher MSRV. Specifically, high enough that cargo fails to parse their manifest toml when building, so we can't even build anything that requires dev-dependencies, such as tests. 9 | 10 | 2. Criterion is slow to build. Having it as a dev-dependency means we need to build it in order to run tests. Some of the CI runners are very slow, and this dominates their time. It also slows down local builds for little benefit. 11 | 12 | In exchange, the repository setup is slightly weirder, and so users may not realize there are two things to check. 13 | -------------------------------------------------------------------------------- /benches/benches.rs: -------------------------------------------------------------------------------- 1 | use criterion::{BenchmarkId, Criterion}; 2 | 3 | #[derive(Default)] 4 | struct Small(usize); 5 | 6 | #[derive(Default)] 7 | struct Big([usize; 32]); 8 | 9 | fn allocate(n: usize) { 10 | let arena = typed_arena::Arena::new(); 11 | for _ in 0..n { 12 | let val: &mut T = arena.alloc(Default::default()); 13 | criterion::black_box(val); 14 | } 15 | } 16 | 17 | fn criterion_benchmark(c: &mut Criterion) { 18 | let mut group = c.benchmark_group("allocate"); 19 | for n in 1..5 { 20 | let n = n * 1000; 21 | group.throughput(criterion::Throughput::Elements(n as u64)); 22 | group.bench_with_input(BenchmarkId::new("allocate-small", n), &n, |b, &n| { 23 | b.iter(|| allocate::(n)) 24 | }); 25 | group.bench_with_input(BenchmarkId::new("allocate-big", n), &n, |b, &n| { 26 | b.iter(|| allocate::(n)) 27 | }); 28 | } 29 | } 30 | 31 | criterion::criterion_group!(benches, criterion_benchmark); 32 | criterion::criterion_main!(benches); 33 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! The arena, a fast but limited type of allocator. 2 | //! 3 | //! **A fast (but limited) allocation arena for values of a single type.** 4 | //! 5 | //! Allocated objects are destroyed all at once, when the arena itself is 6 | //! destroyed. There is no deallocation of individual objects while the arena 7 | //! itself is still alive. The flipside is that allocation is fast: typically 8 | //! just a vector push. 9 | //! 10 | //! There is also a method `into_vec()` to recover ownership of allocated 11 | //! objects when the arena is no longer required, instead of destroying 12 | //! everything. 13 | //! 14 | //! ## Example 15 | //! 16 | //! ``` 17 | //! use typed_arena::Arena; 18 | //! 19 | //! struct Monster { 20 | //! level: u32, 21 | //! } 22 | //! 23 | //! let monsters = Arena::new(); 24 | //! 25 | //! let goku = monsters.alloc(Monster { level: 9001 }); 26 | //! assert!(goku.level > 9000); 27 | //! ``` 28 | //! 29 | //! ## Safe Cycles 30 | //! 31 | //! All allocated objects get the same lifetime, so you can safely create cycles 32 | //! between them. This can be useful for certain data structures, such as graphs 33 | //! and trees with parent pointers. 34 | //! 35 | //! ``` 36 | //! use std::cell::Cell; 37 | //! use typed_arena::Arena; 38 | //! 39 | //! struct CycleParticipant<'a> { 40 | //! other: Cell>>, 41 | //! } 42 | //! 43 | //! let arena = Arena::new(); 44 | //! 45 | //! let a = arena.alloc(CycleParticipant { other: Cell::new(None) }); 46 | //! let b = arena.alloc(CycleParticipant { other: Cell::new(None) }); 47 | //! 48 | //! a.other.set(Some(b)); 49 | //! b.other.set(Some(a)); 50 | //! ``` 51 | 52 | // Potential optimizations: 53 | // 1) add and stabilize a method for in-place reallocation of vecs. 54 | // 2) add and stabilize placement new. 55 | // 3) use an iterator. This may add far too much unsafe code. 56 | 57 | #![deny(missing_docs)] 58 | #![cfg_attr(not(any(feature = "std", test)), no_std)] 59 | 60 | #[cfg(not(feature = "std"))] 61 | extern crate alloc; 62 | 63 | #[cfg(any(feature = "std", test))] 64 | extern crate core; 65 | 66 | #[cfg(not(feature = "std"))] 67 | use alloc::vec::Vec; 68 | 69 | use core::cell::RefCell; 70 | use core::cmp; 71 | use core::iter; 72 | use core::mem; 73 | use core::ptr; 74 | use core::slice; 75 | use core::str; 76 | 77 | use mem::MaybeUninit; 78 | 79 | #[cfg(test)] 80 | mod test; 81 | 82 | // Initial size in bytes. 83 | const INITIAL_SIZE: usize = 1024; 84 | // Minimum capacity. Must be larger than 0. 85 | const MIN_CAPACITY: usize = 1; 86 | 87 | /// An arena of objects of type `T`. 88 | /// 89 | /// ## Example 90 | /// 91 | /// ``` 92 | /// use typed_arena::Arena; 93 | /// 94 | /// struct Monster { 95 | /// level: u32, 96 | /// } 97 | /// 98 | /// let monsters = Arena::new(); 99 | /// 100 | /// let vegeta = monsters.alloc(Monster { level: 9001 }); 101 | /// assert!(vegeta.level > 9000); 102 | /// ``` 103 | pub struct Arena { 104 | chunks: RefCell>, 105 | } 106 | 107 | struct ChunkList { 108 | current: Vec, 109 | rest: Vec>, 110 | } 111 | 112 | impl Arena { 113 | /// Construct a new arena. 114 | /// 115 | /// ## Example 116 | /// 117 | /// ``` 118 | /// use typed_arena::Arena; 119 | /// 120 | /// let arena = Arena::new(); 121 | /// # arena.alloc(1); 122 | /// ``` 123 | pub fn new() -> Arena { 124 | let size = cmp::max(1, mem::size_of::()); 125 | Arena::with_capacity(INITIAL_SIZE / size) 126 | } 127 | 128 | /// Construct a new arena with capacity for `n` values pre-allocated. 129 | /// 130 | /// ## Example 131 | /// 132 | /// ``` 133 | /// use typed_arena::Arena; 134 | /// 135 | /// let arena = Arena::with_capacity(1337); 136 | /// # arena.alloc(1); 137 | /// ``` 138 | pub fn with_capacity(n: usize) -> Arena { 139 | let n = cmp::max(MIN_CAPACITY, n); 140 | Arena { 141 | chunks: RefCell::new(ChunkList { 142 | current: Vec::with_capacity(n), 143 | rest: Vec::new(), 144 | }), 145 | } 146 | } 147 | 148 | /// Return the size of the arena 149 | /// 150 | /// This is useful for using the size of previous typed arenas to build new typed arenas with large enough spaces. 151 | /// 152 | /// ## Example 153 | /// 154 | /// ``` 155 | /// use typed_arena::Arena; 156 | /// 157 | /// let arena = Arena::with_capacity(0); 158 | /// let a = arena.alloc(1); 159 | /// let b = arena.alloc(2); 160 | /// 161 | /// assert_eq!(arena.len(), 2); 162 | /// ``` 163 | pub fn len(&self) -> usize { 164 | let chunks = self.chunks.borrow(); 165 | 166 | let mut res = 0; 167 | for vec in chunks.rest.iter() { 168 | res += vec.len() 169 | } 170 | 171 | res + chunks.current.len() 172 | } 173 | 174 | /// Allocates a value in the arena, and returns a mutable reference 175 | /// to that value. 176 | /// 177 | /// ## Example 178 | /// 179 | /// ``` 180 | /// use typed_arena::Arena; 181 | /// 182 | /// let arena = Arena::new(); 183 | /// let x = arena.alloc(42); 184 | /// assert_eq!(*x, 42); 185 | /// ``` 186 | #[inline] 187 | pub fn alloc(&self, value: T) -> &mut T { 188 | self.alloc_fast_path(value) 189 | .unwrap_or_else(|value| self.alloc_slow_path(value)) 190 | } 191 | 192 | #[inline] 193 | fn alloc_fast_path(&self, value: T) -> Result<&mut T, T> { 194 | let mut chunks = self.chunks.borrow_mut(); 195 | let len = chunks.current.len(); 196 | if len < chunks.current.capacity() { 197 | chunks.current.push(value); 198 | // Avoid going through `Vec::deref_mut`, which overlaps 199 | // other references we have already handed out! 200 | debug_assert!(len < chunks.current.len()); // bounds check 201 | Ok(unsafe { &mut *chunks.current.as_mut_ptr().add(len) }) 202 | } else { 203 | Err(value) 204 | } 205 | } 206 | 207 | fn alloc_slow_path(&self, value: T) -> &mut T { 208 | &mut self.alloc_extend(iter::once(value))[0] 209 | } 210 | 211 | /// Uses the contents of an iterator to allocate values in the arena. 212 | /// Returns a mutable slice that contains these values. 213 | /// 214 | /// ## Example 215 | /// 216 | /// ``` 217 | /// use typed_arena::Arena; 218 | /// 219 | /// let arena = Arena::new(); 220 | /// let abc = arena.alloc_extend("abcdefg".chars().take(3)); 221 | /// assert_eq!(abc, ['a', 'b', 'c']); 222 | /// ``` 223 | pub fn alloc_extend(&self, iterable: I) -> &mut [T] 224 | where 225 | I: IntoIterator, 226 | { 227 | let mut iter = iterable.into_iter(); 228 | 229 | let mut chunks = self.chunks.borrow_mut(); 230 | 231 | let iter_min_len = iter.size_hint().0; 232 | let mut next_item_index; 233 | debug_assert!( 234 | chunks.current.capacity() >= chunks.current.len(), 235 | "capacity is always greater than or equal to len, so we don't need to worry about underflow" 236 | ); 237 | if iter_min_len > chunks.current.capacity() - chunks.current.len() { 238 | chunks.reserve(iter_min_len); 239 | chunks.current.extend(iter); 240 | next_item_index = 0; 241 | } else { 242 | next_item_index = chunks.current.len(); 243 | let mut i = 0; 244 | while let Some(elem) = iter.next() { 245 | if chunks.current.len() == chunks.current.capacity() { 246 | // The iterator was larger than we could fit into the current chunk. 247 | let chunks = &mut *chunks; 248 | // Create a new chunk into which we can freely push the entire iterator into 249 | chunks.reserve(i + 1); 250 | let previous_chunk = chunks.rest.last_mut().unwrap(); 251 | let previous_chunk_len = previous_chunk.len(); 252 | // Move any elements we put into the previous chunk into this new chunk 253 | chunks 254 | .current 255 | .extend(previous_chunk.drain(previous_chunk_len - i..)); 256 | chunks.current.push(elem); 257 | // And the remaining elements in the iterator 258 | chunks.current.extend(iter); 259 | next_item_index = 0; 260 | break; 261 | } else { 262 | chunks.current.push(elem); 263 | } 264 | i += 1; 265 | } 266 | } 267 | 268 | // Extend the lifetime from that of `chunks_borrow` to that of `self`. 269 | // This is OK because we’re careful to never move items 270 | // by never pushing to inner `Vec`s beyond their initial capacity. 271 | // The returned reference is unique (`&mut`): 272 | // the `Arena` never gives away references to existing items. 273 | unsafe { 274 | let new_len = chunks.current.len() - next_item_index; 275 | slice::from_raw_parts_mut(chunks.current.as_mut_ptr().add(next_item_index), new_len) 276 | } 277 | } 278 | 279 | /// Allocates space for a given number of values, but doesn't initialize it. 280 | /// 281 | /// ## Safety 282 | /// 283 | /// After calling this method, the arena considers the elements initialized. If you fail to 284 | /// initialize them (which includes because of panicking during the initialization), the arena 285 | /// will run destructors on the uninitialized memory. Therefore, you must initialize them. 286 | /// 287 | /// Considering how easy it is to cause undefined behaviour using this, you're advised to 288 | /// prefer the other (safe) methods, like [`alloc_extend`][Arena::alloc_extend]. 289 | /// 290 | /// ## Example 291 | /// 292 | /// ```rust 293 | /// use std::mem::{self, MaybeUninit}; 294 | /// use std::ptr; 295 | /// use typed_arena::Arena; 296 | /// 297 | /// // Transmute from MaybeUninit slice to slice of initialized T. 298 | /// // It is a separate function to preserve the lifetime of the reference. 299 | /// unsafe fn transmute_uninit(r: &mut [MaybeUninit]) -> &mut [A] { 300 | /// mem::transmute(r) 301 | /// } 302 | /// 303 | /// let arena: Arena = Arena::new(); 304 | /// let slice: &mut [bool]; 305 | /// unsafe { 306 | /// let uninitialized = arena.alloc_uninitialized(10); 307 | /// for elem in uninitialized.iter_mut() { 308 | /// ptr::write(elem.as_mut_ptr(), true); 309 | /// } 310 | /// slice = transmute_uninit(uninitialized); 311 | /// } 312 | /// ``` 313 | /// 314 | /// ## Alternative allocation pattern 315 | /// 316 | /// To avoid the problem of dropping assumed to be initialized elements on panic, it is also 317 | /// possible to combine the [`reserve_extend`][Arena::reserve_extend] with 318 | /// [`uninitialized_array`][Arena::uninitialized_array], initialize the elements and confirm 319 | /// them by this method. In such case, when there's a panic during initialization, the already 320 | /// initialized elements would leak but it wouldn't cause UB. 321 | /// 322 | /// ```rust 323 | /// use std::mem::{self, MaybeUninit}; 324 | /// use std::ptr; 325 | /// use typed_arena::Arena; 326 | /// 327 | /// unsafe fn transmute_uninit(r: &mut [MaybeUninit]) -> &mut [A] { 328 | /// mem::transmute(r) 329 | /// } 330 | /// 331 | /// const COUNT: usize = 2; 332 | /// 333 | /// let arena: Arena = Arena::new(); 334 | /// 335 | /// arena.reserve_extend(COUNT); 336 | /// let slice: &mut [String]; 337 | /// unsafe { 338 | /// // Perform initialization before we claim the memory. 339 | /// let uninitialized = arena.uninitialized_array(); 340 | /// assert!((*uninitialized).len() >= COUNT); // Ensured by the reserve_extend 341 | /// for elem in &mut (*uninitialized)[..COUNT] { 342 | /// ptr::write(elem.as_mut_ptr(), "Hello".to_owned()); 343 | /// } 344 | /// let addr = (*uninitialized).as_ptr() as usize; 345 | /// 346 | /// // The alloc_uninitialized returns the same memory, but "confirms" its allocation. 347 | /// slice = transmute_uninit(arena.alloc_uninitialized(COUNT)); 348 | /// assert_eq!(addr, slice.as_ptr() as usize); 349 | /// assert_eq!(slice, &["Hello".to_owned(), "Hello".to_owned()]); 350 | /// } 351 | /// ``` 352 | pub unsafe fn alloc_uninitialized(&self, num: usize) -> &mut [MaybeUninit] { 353 | let mut chunks = self.chunks.borrow_mut(); 354 | 355 | debug_assert!( 356 | chunks.current.capacity() >= chunks.current.len(), 357 | "capacity is always greater than or equal to len, so we don't need to worry about underflow" 358 | ); 359 | if num > chunks.current.capacity() - chunks.current.len() { 360 | chunks.reserve(num); 361 | } 362 | 363 | // At this point, the current chunk must have free capacity. 364 | let next_item_index = chunks.current.len(); 365 | chunks.current.set_len(next_item_index + num); 366 | 367 | // Go through pointers, to make sure we never create a reference to uninitialized T. 368 | let start = chunks.current.as_mut_ptr().offset(next_item_index as isize); 369 | let start_uninit = start as *mut MaybeUninit; 370 | slice::from_raw_parts_mut(start_uninit, num) 371 | } 372 | 373 | /// Makes sure there's enough continuous space for at least `num` elements. 374 | /// 375 | /// This may save some work if called before [`alloc_extend`][Arena::alloc_extend]. It also 376 | /// allows somewhat safer use pattern of [`alloc_uninitialized`][Arena::alloc_uninitialized]. 377 | /// On the other hand this might waste up to `n - 1` elements of space. In case new allocation 378 | /// is needed, the unused ones in current chunk are never used. 379 | pub fn reserve_extend(&self, num: usize) { 380 | let mut chunks = self.chunks.borrow_mut(); 381 | 382 | debug_assert!( 383 | chunks.current.capacity() >= chunks.current.len(), 384 | "capacity is always greater than or equal to len, so we don't need to worry about underflow" 385 | ); 386 | if num > chunks.current.capacity() - chunks.current.len() { 387 | chunks.reserve(num); 388 | } 389 | } 390 | 391 | /// Returns unused space. 392 | /// 393 | /// *This unused space is still not considered "allocated".* Therefore, it 394 | /// won't be dropped unless there are further calls to `alloc`, 395 | /// [`alloc_uninitialized`][Arena::alloc_uninitialized], or 396 | /// [`alloc_extend`][Arena::alloc_extend] which is why the method is safe. 397 | /// 398 | /// It returns a raw pointer to avoid creating multiple mutable references to the same place. 399 | /// It is up to the caller not to dereference it after any of the `alloc_` methods are called. 400 | pub fn uninitialized_array(&self) -> *mut [MaybeUninit] { 401 | let mut chunks = self.chunks.borrow_mut(); 402 | let len = chunks.current.capacity() - chunks.current.len(); 403 | let next_item_index = chunks.current.len(); 404 | 405 | unsafe { 406 | // Go through pointers, to make sure we never create a reference to uninitialized T. 407 | let start = chunks.current.as_mut_ptr().offset(next_item_index as isize); 408 | let start_uninit = start as *mut MaybeUninit; 409 | ptr::slice_from_raw_parts_mut(start_uninit, len) 410 | } 411 | } 412 | 413 | /// Convert this `Arena` into a `Vec`. 414 | /// 415 | /// Items in the resulting `Vec` appear in the order that they were 416 | /// allocated in. 417 | /// 418 | /// ## Example 419 | /// 420 | /// ``` 421 | /// use typed_arena::Arena; 422 | /// 423 | /// let arena = Arena::new(); 424 | /// 425 | /// arena.alloc("a"); 426 | /// arena.alloc("b"); 427 | /// arena.alloc("c"); 428 | /// 429 | /// let easy_as_123 = arena.into_vec(); 430 | /// 431 | /// assert_eq!(easy_as_123, vec!["a", "b", "c"]); 432 | /// ``` 433 | pub fn into_vec(self) -> Vec { 434 | let mut chunks = self.chunks.into_inner(); 435 | // keep order of allocation in the resulting Vec 436 | let n = chunks 437 | .rest 438 | .iter() 439 | .fold(chunks.current.len(), |a, v| a + v.len()); 440 | let mut result = Vec::with_capacity(n); 441 | for mut vec in chunks.rest { 442 | result.append(&mut vec); 443 | } 444 | result.append(&mut chunks.current); 445 | result 446 | } 447 | 448 | /// Returns an iterator that allows modifying each value. 449 | /// 450 | /// Items are yielded in the order that they were allocated. 451 | /// 452 | /// ## Example 453 | /// 454 | /// ``` 455 | /// use typed_arena::Arena; 456 | /// 457 | /// #[derive(Debug, PartialEq, Eq)] 458 | /// struct Point { x: i32, y: i32 }; 459 | /// 460 | /// let mut arena = Arena::new(); 461 | /// 462 | /// arena.alloc(Point { x: 0, y: 0 }); 463 | /// arena.alloc(Point { x: 1, y: 1 }); 464 | /// 465 | /// for point in arena.iter_mut() { 466 | /// point.x += 10; 467 | /// } 468 | /// 469 | /// let points = arena.into_vec(); 470 | /// 471 | /// assert_eq!(points, vec![Point { x: 10, y: 0 }, Point { x: 11, y: 1 }]); 472 | /// 473 | /// ``` 474 | /// 475 | /// ## Immutable Iteration 476 | /// 477 | /// Note that there is no corresponding `iter` method. Access to the arena's contents 478 | /// requries mutable access to the arena itself. 479 | /// 480 | /// ```compile_fail 481 | /// use typed_arena::Arena; 482 | /// 483 | /// let mut arena = Arena::new(); 484 | /// let x = arena.alloc(1); 485 | /// 486 | /// // borrow error! 487 | /// for i in arena.iter_mut() { 488 | /// println!("i: {}", i); 489 | /// } 490 | /// 491 | /// // borrow error! 492 | /// *x = 2; 493 | /// ``` 494 | #[inline] 495 | pub fn iter_mut(&mut self) -> IterMut { 496 | let chunks = self.chunks.get_mut(); 497 | let position = if !chunks.rest.is_empty() { 498 | let index = 0; 499 | let inner_iter = chunks.rest[index].iter_mut(); 500 | // Extend the lifetime of the individual elements to that of the arena. 501 | // This is OK because we borrow the arena mutably to prevent new allocations 502 | // and we take care here to never move items inside the arena while the 503 | // iterator is alive. 504 | let inner_iter = unsafe { mem::transmute(inner_iter) }; 505 | IterMutState::ChunkListRest { index, inner_iter } 506 | } else { 507 | // Extend the lifetime of the individual elements to that of the arena. 508 | let iter = unsafe { mem::transmute(chunks.current.iter_mut()) }; 509 | IterMutState::ChunkListCurrent { iter } 510 | }; 511 | IterMut { 512 | chunks, 513 | state: position, 514 | } 515 | } 516 | } 517 | 518 | impl Arena { 519 | /// Allocates a string slice and returns a mutable reference to it. 520 | /// 521 | /// This is on `Arena`, because string slices use byte slices (`[u8]`) as their backing 522 | /// storage. 523 | /// 524 | /// # Example 525 | /// 526 | /// ``` 527 | /// use typed_arena::Arena; 528 | /// 529 | /// let arena: Arena = Arena::new(); 530 | /// let hello = arena.alloc_str("Hello world"); 531 | /// assert_eq!("Hello world", hello); 532 | /// ``` 533 | #[inline] 534 | pub fn alloc_str(&self, s: &str) -> &mut str { 535 | let buffer = self.alloc_extend(s.bytes()); 536 | // Can't fail the utf8 validation, it already came in as utf8 537 | unsafe { str::from_utf8_unchecked_mut(buffer) } 538 | } 539 | } 540 | 541 | impl Default for Arena { 542 | fn default() -> Self { 543 | Self::new() 544 | } 545 | } 546 | 547 | impl ChunkList { 548 | #[inline(never)] 549 | #[cold] 550 | fn reserve(&mut self, additional: usize) { 551 | let double_cap = self 552 | .current 553 | .capacity() 554 | .checked_mul(2) 555 | .expect("capacity overflow"); 556 | let required_cap = additional 557 | .checked_next_power_of_two() 558 | .expect("capacity overflow"); 559 | let new_capacity = cmp::max(double_cap, required_cap); 560 | let chunk = mem::replace(&mut self.current, Vec::with_capacity(new_capacity)); 561 | self.rest.push(chunk); 562 | } 563 | } 564 | 565 | enum IterMutState<'a, T> { 566 | ChunkListRest { 567 | index: usize, 568 | inner_iter: slice::IterMut<'a, T>, 569 | }, 570 | ChunkListCurrent { 571 | iter: slice::IterMut<'a, T>, 572 | }, 573 | } 574 | 575 | /// Mutable arena iterator. 576 | /// 577 | /// This struct is created by the [`iter_mut`](struct.Arena.html#method.iter_mut) method on [Arenas](struct.Arena.html). 578 | pub struct IterMut<'a, T: 'a> { 579 | chunks: &'a mut ChunkList, 580 | state: IterMutState<'a, T>, 581 | } 582 | 583 | impl<'a, T> Iterator for IterMut<'a, T> { 584 | type Item = &'a mut T; 585 | fn next(&mut self) -> Option<&'a mut T> { 586 | loop { 587 | self.state = match self.state { 588 | IterMutState::ChunkListRest { 589 | mut index, 590 | ref mut inner_iter, 591 | } => { 592 | match inner_iter.next() { 593 | Some(item) => return Some(item), 594 | None => { 595 | index += 1; 596 | if index < self.chunks.rest.len() { 597 | let inner_iter = self.chunks.rest[index].iter_mut(); 598 | // Extend the lifetime of the individual elements to that of the arena. 599 | let inner_iter = unsafe { mem::transmute(inner_iter) }; 600 | IterMutState::ChunkListRest { index, inner_iter } 601 | } else { 602 | let iter = self.chunks.current.iter_mut(); 603 | // Extend the lifetime of the individual elements to that of the arena. 604 | let iter = unsafe { mem::transmute(iter) }; 605 | IterMutState::ChunkListCurrent { iter } 606 | } 607 | } 608 | } 609 | } 610 | IterMutState::ChunkListCurrent { ref mut iter } => return iter.next(), 611 | }; 612 | } 613 | } 614 | 615 | fn size_hint(&self) -> (usize, Option) { 616 | let current_len = self.chunks.current.len(); 617 | let current_cap = self.chunks.current.capacity(); 618 | if self.chunks.rest.is_empty() { 619 | (current_len, Some(current_len)) 620 | } else { 621 | let rest_len = self.chunks.rest.len(); 622 | let last_chunk_len = self 623 | .chunks 624 | .rest 625 | .last() 626 | .map(|chunk| chunk.len()) 627 | .unwrap_or(0); 628 | 629 | let min = current_len + last_chunk_len; 630 | let max = min + (rest_len * current_cap / rest_len); 631 | 632 | (min, Some(max)) 633 | } 634 | } 635 | } 636 | -------------------------------------------------------------------------------- /src/test.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | use std::cell::Cell; 3 | use std::mem; 4 | use std::panic::{self, AssertUnwindSafe}; 5 | use std::ptr; 6 | 7 | struct DropTracker<'a>(&'a Cell); 8 | impl<'a> Drop for DropTracker<'a> { 9 | fn drop(&mut self) { 10 | self.0.set(self.0.get() + 1); 11 | } 12 | } 13 | 14 | struct Node<'a, 'b: 'a>(Option<&'a Node<'a, 'b>>, u32, DropTracker<'b>); 15 | 16 | #[test] 17 | fn arena_as_intended() { 18 | let drop_counter = Cell::new(0); 19 | { 20 | let arena = Arena::with_capacity(2); 21 | 22 | let mut node: &Node = arena.alloc(Node(None, 1, DropTracker(&drop_counter))); 23 | assert_eq!(arena.chunks.borrow().rest.len(), 0); 24 | 25 | node = arena.alloc(Node(Some(node), 2, DropTracker(&drop_counter))); 26 | assert_eq!(arena.chunks.borrow().rest.len(), 0); 27 | 28 | node = arena.alloc(Node(Some(node), 3, DropTracker(&drop_counter))); 29 | assert_eq!(arena.chunks.borrow().rest.len(), 1); 30 | 31 | node = arena.alloc(Node(Some(node), 4, DropTracker(&drop_counter))); 32 | assert_eq!(arena.chunks.borrow().rest.len(), 1); 33 | 34 | assert_eq!(node.1, 4); 35 | assert_eq!(node.0.unwrap().1, 3); 36 | assert_eq!(node.0.unwrap().0.unwrap().1, 2); 37 | assert_eq!(node.0.unwrap().0.unwrap().0.unwrap().1, 1); 38 | assert!(node.0.unwrap().0.unwrap().0.unwrap().0.is_none()); 39 | 40 | assert_eq!(arena.len(), 4); 41 | 42 | mem::drop(node); 43 | assert_eq!(drop_counter.get(), 0); 44 | 45 | let mut node: &Node = arena.alloc(Node(None, 5, DropTracker(&drop_counter))); 46 | assert_eq!(arena.chunks.borrow().rest.len(), 1); 47 | 48 | node = arena.alloc(Node(Some(node), 6, DropTracker(&drop_counter))); 49 | assert_eq!(arena.chunks.borrow().rest.len(), 1); 50 | 51 | node = arena.alloc(Node(Some(node), 7, DropTracker(&drop_counter))); 52 | assert_eq!(arena.chunks.borrow().rest.len(), 2); 53 | 54 | assert_eq!(drop_counter.get(), 0); 55 | 56 | assert_eq!(node.1, 7); 57 | assert_eq!(node.0.unwrap().1, 6); 58 | assert_eq!(node.0.unwrap().0.unwrap().1, 5); 59 | assert!(node.0.unwrap().0.unwrap().0.is_none()); 60 | 61 | assert_eq!(drop_counter.get(), 0); 62 | } 63 | assert_eq!(drop_counter.get(), 7); 64 | } 65 | 66 | #[test] 67 | fn ensure_into_vec_maintains_order_of_allocation() { 68 | let arena = Arena::with_capacity(1); // force multiple inner vecs 69 | for &s in &["t", "e", "s", "t"] { 70 | arena.alloc(String::from(s)); 71 | } 72 | let vec = arena.into_vec(); 73 | assert_eq!(vec, vec!["t", "e", "s", "t"]); 74 | } 75 | 76 | #[test] 77 | fn test_zero_cap() { 78 | let arena = Arena::with_capacity(0); 79 | let a = arena.alloc(1); 80 | let b = arena.alloc(2); 81 | assert_eq!(*a, 1); 82 | assert_eq!(*b, 2); 83 | assert_eq!(arena.len(), 2); 84 | } 85 | 86 | #[test] 87 | fn test_alloc_extend() { 88 | let arena = Arena::with_capacity(2); 89 | for i in 0..15 { 90 | let slice = arena.alloc_extend(0..i); 91 | for (j, &elem) in slice.iter().enumerate() { 92 | assert_eq!(j, elem); 93 | } 94 | } 95 | } 96 | 97 | #[test] 98 | fn test_alloc_uninitialized() { 99 | const LIMIT: usize = 15; 100 | let drop_counter = Cell::new(0); 101 | unsafe { 102 | let arena: Arena = Arena::with_capacity(4); 103 | for i in 0..LIMIT { 104 | let slice = arena.alloc_uninitialized(i); 105 | for (j, elem) in slice.iter_mut().enumerate() { 106 | ptr::write( 107 | elem.as_mut_ptr(), 108 | Node(None, j as u32, DropTracker(&drop_counter)), 109 | ); 110 | } 111 | assert_eq!(drop_counter.get(), 0); 112 | } 113 | } 114 | assert_eq!(drop_counter.get(), (0..LIMIT).fold(0, |a, e| a + e) as u32); 115 | } 116 | 117 | #[test] 118 | fn test_alloc_extend_with_drop_counter() { 119 | let drop_counter = Cell::new(0); 120 | { 121 | let arena = Arena::with_capacity(2); 122 | let iter = (0..100).map(|j| Node(None, j as u32, DropTracker(&drop_counter))); 123 | let older_ref = Some(&arena.alloc_extend(iter)[0]); 124 | assert_eq!(drop_counter.get(), 0); 125 | let iter = (0..100).map(|j| Node(older_ref, j as u32, DropTracker(&drop_counter))); 126 | arena.alloc_extend(iter); 127 | assert_eq!(drop_counter.get(), 0); 128 | } 129 | assert_eq!(drop_counter.get(), 200); 130 | } 131 | 132 | /// Test with bools. 133 | /// 134 | /// Bools, unlike integers, have invalid bit patterns. Therefore, ever having an uninitialized bool 135 | /// is insta-UB. Make sure miri doesn't find any such thing. 136 | #[test] 137 | fn test_alloc_uninitialized_bools() { 138 | const LEN: usize = 20; 139 | unsafe { 140 | let arena: Arena = Arena::with_capacity(2); 141 | let slice = arena.alloc_uninitialized(LEN); 142 | for elem in slice.iter_mut() { 143 | ptr::write(elem.as_mut_ptr(), true); 144 | } 145 | // Now it is fully initialized, we can safely transmute the slice. 146 | let slice: &mut [bool] = mem::transmute(slice); 147 | assert_eq!(&[true; LEN], slice); 148 | } 149 | } 150 | 151 | /// Check nothing bad happens by panicking during initialization of borrowed slice. 152 | #[test] 153 | fn alloc_uninitialized_with_panic() { 154 | struct Dropper(bool); 155 | 156 | impl Drop for Dropper { 157 | fn drop(&mut self) { 158 | // Just make sure we touch the value, to make sure miri would bite if it was 159 | // unitialized 160 | if self.0 { 161 | panic!(); 162 | } 163 | } 164 | } 165 | let mut reached_first_init = false; 166 | panic::catch_unwind(AssertUnwindSafe(|| unsafe { 167 | let arena: Arena = Arena::new(); 168 | arena.reserve_extend(2); 169 | let uninitialized = arena.uninitialized_array(); 170 | assert!((*uninitialized).len() >= 2); 171 | ptr::write((*uninitialized)[0].as_mut_ptr(), Dropper(false)); 172 | reached_first_init = true; 173 | panic!("To drop the arena"); 174 | // If it didn't panic, we would continue by initializing the second one and confirming by 175 | // .alloc_uninitialized(); 176 | })) 177 | .unwrap_err(); 178 | assert!(reached_first_init); 179 | } 180 | 181 | #[test] 182 | fn test_uninitialized_array() { 183 | let arena = Arena::with_capacity(2); 184 | let uninit = arena.uninitialized_array(); 185 | arena.alloc_extend(0..2); 186 | unsafe { 187 | for (&a, b) in (&*uninit).iter().zip(0..2) { 188 | assert_eq!(a.assume_init(), b); 189 | } 190 | assert!((&*arena.uninitialized_array()).as_ptr() != (&*uninit).as_ptr()); 191 | arena.alloc(0); 192 | let uninit = arena.uninitialized_array(); 193 | assert_eq!((&*uninit).len(), 3); 194 | } 195 | } 196 | 197 | #[test] 198 | fn dont_trust_the_iterator_size() { 199 | use std::iter::repeat; 200 | 201 | struct WrongSizeIter(I); 202 | impl Iterator for WrongSizeIter 203 | where 204 | I: Iterator, 205 | { 206 | type Item = I::Item; 207 | 208 | fn next(&mut self) -> Option { 209 | self.0.next() 210 | } 211 | 212 | fn size_hint(&self) -> (usize, Option) { 213 | (0, Some(0)) 214 | } 215 | } 216 | 217 | impl ExactSizeIterator for WrongSizeIter where I: Iterator {} 218 | 219 | let arena = Arena::with_capacity(2); 220 | arena.alloc(0); 221 | let slice = arena.alloc_extend(WrongSizeIter(repeat(1).take(1_000))); 222 | // Allocation of 1000 elements should have created a new chunk 223 | assert_eq!(arena.chunks.borrow().rest.len(), 1); 224 | assert_eq!(slice.len(), 1000); 225 | } 226 | 227 | #[test] 228 | fn arena_is_send() { 229 | fn assert_is_send(_: T) {} 230 | 231 | // If `T` is `Send`, ... 232 | assert_is_send(42_u32); 233 | 234 | // Then `Arena` is also `Send`. 235 | let arena: Arena = Arena::new(); 236 | assert_is_send(arena); 237 | } 238 | 239 | #[test] 240 | fn iter_mut_low_capacity() { 241 | #[derive(Debug, PartialEq, Eq)] 242 | struct NonCopy(usize); 243 | 244 | const MAX: usize = 1_000; 245 | const CAP: usize = 16; 246 | 247 | let mut arena = Arena::with_capacity(CAP); 248 | for i in 1..MAX { 249 | arena.alloc(NonCopy(i)); 250 | } 251 | 252 | assert!( 253 | arena.chunks.borrow().rest.len() > 1, 254 | "expected multiple chunks" 255 | ); 256 | 257 | let mut iter = arena.iter_mut(); 258 | for i in 1..MAX { 259 | assert_eq!(Some(&mut NonCopy(i)), iter.next()); 260 | } 261 | 262 | assert_eq!(None, iter.next()); 263 | } 264 | 265 | #[test] 266 | fn iter_mut_high_capacity() { 267 | #[derive(Debug, PartialEq, Eq)] 268 | struct NonCopy(usize); 269 | 270 | const MAX: usize = 1_000; 271 | const CAP: usize = 8192; 272 | 273 | let mut arena = Arena::with_capacity(CAP); 274 | for i in 1..MAX { 275 | arena.alloc(NonCopy(i)); 276 | } 277 | 278 | assert!( 279 | arena.chunks.borrow().rest.is_empty(), 280 | "expected single chunk" 281 | ); 282 | 283 | let mut iter = arena.iter_mut(); 284 | for i in 1..MAX { 285 | assert_eq!(Some(&mut NonCopy(i)), iter.next()); 286 | } 287 | 288 | assert_eq!(None, iter.next()); 289 | } 290 | 291 | fn assert_size_hint(arena_len: usize, iter: IterMut<'_, T>) { 292 | let (min, max) = iter.size_hint(); 293 | 294 | assert!(max.is_some()); 295 | let max = max.unwrap(); 296 | 297 | // Check that the actual arena length lies between the estimated min and max 298 | assert!(min <= arena_len); 299 | assert!(max >= arena_len); 300 | 301 | // Check that the min and max estimates are within a factor of 3 302 | assert!(min >= arena_len / 3); 303 | assert!(max <= arena_len * 3); 304 | } 305 | 306 | #[test] 307 | fn size_hint() { 308 | #[derive(Debug, PartialEq, Eq)] 309 | struct NonCopy(usize); 310 | 311 | const MAX: usize = 32; 312 | const CAP: usize = 0; 313 | 314 | for cap in CAP..(CAP + 16/* check some non-power-of-two capacities */) { 315 | let mut arena = Arena::with_capacity(cap); 316 | for i in 1..MAX { 317 | arena.alloc(NonCopy(i)); 318 | let iter = arena.iter_mut(); 319 | assert_size_hint(i, iter); 320 | } 321 | } 322 | } 323 | 324 | // Ensure that `alloc_extend` doesn't violate provenance of 325 | // existing references. (Note: This test is pointless except 326 | // under miri). 327 | #[test] 328 | fn check_extend_provenance() { 329 | let arena = Arena::new(); 330 | let a = arena.alloc(0); 331 | arena.alloc_extend(core::iter::once(1)); 332 | *a = 1; 333 | } 334 | 335 | #[test] 336 | fn size_hint_low_initial_capacities() { 337 | #[derive(Debug, PartialEq, Eq)] 338 | struct NonCopy(usize); 339 | #[cfg(miri)] 340 | const MAX: usize = 100; 341 | #[cfg(not(miri))] 342 | const MAX: usize = 25_000; 343 | const CAP: usize = 0; 344 | 345 | for cap in CAP..(CAP + 128/* check some non-power-of-two capacities */) { 346 | let mut arena = Arena::with_capacity(cap); 347 | for i in 1..MAX { 348 | arena.alloc(NonCopy(i)); 349 | let iter = arena.iter_mut(); 350 | assert_size_hint(i, iter); 351 | } 352 | } 353 | } 354 | 355 | #[test] 356 | fn size_hint_high_initial_capacities() { 357 | #[derive(Debug, PartialEq, Eq)] 358 | struct NonCopy(usize); 359 | 360 | #[cfg(miri)] 361 | const MAX: usize = 100; 362 | #[cfg(not(miri))] 363 | const MAX: usize = 25_000; 364 | const CAP: usize = 8164; 365 | 366 | for cap in CAP..(CAP + 128/* check some non-power-of-two capacities */) { 367 | let mut arena = Arena::with_capacity(cap); 368 | for i in 1..MAX { 369 | arena.alloc(NonCopy(i)); 370 | let iter = arena.iter_mut(); 371 | assert_size_hint(i, iter); 372 | } 373 | } 374 | } 375 | 376 | #[test] 377 | fn size_hint_many_items() { 378 | #[derive(Debug, PartialEq, Eq)] 379 | struct NonCopy(usize); 380 | 381 | #[cfg(miri)] 382 | const MAX: usize = 500; 383 | #[cfg(not(miri))] 384 | const MAX: usize = 5_000_000; 385 | const CAP: usize = 16; 386 | 387 | let mut arena = Arena::with_capacity(CAP); 388 | for i in 1..MAX { 389 | arena.alloc(NonCopy(i)); 390 | let iter = arena.iter_mut(); 391 | assert_size_hint(i, iter); 392 | } 393 | } 394 | --------------------------------------------------------------------------------