├── .github └── workflows │ ├── badge.yml │ ├── check-platforms.yml │ ├── check-targets.yml │ ├── check-toolchains.yml │ ├── fast-pr-check.yml │ ├── lints.yml │ └── security.yml ├── .gitignore ├── CHANGELOG.md ├── COPYING ├── Cargo.toml ├── README.md ├── benches └── bench.rs ├── examples ├── basic.rs └── ring.rs ├── license ├── APACHE └── MIT └── src ├── allocator_api.rs ├── bucket.rs ├── drop.rs └── lib.rs /.github/workflows/badge.yml: -------------------------------------------------------------------------------- 1 | name: badge 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | test: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v2 15 | - name: Install nightly toolchain 16 | uses: actions-rs/toolchain@v1 17 | with: 18 | profile: minimal 19 | toolchain: nightly 20 | - name: Run cargo test 21 | run: cargo +nightly test --all-features 22 | -------------------------------------------------------------------------------- /.github/workflows/check-platforms.yml: -------------------------------------------------------------------------------- 1 | name: Check multiple platforms 2 | 3 | on: 4 | pull_request: 5 | types: [ labeled ] 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | check-targets: 12 | if: ${{ github.event.label.name == 'ready-to-merge' }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-latest, windows-latest, macOS-latest] 16 | runs-on: ${{ matrix.os }} 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Install stable toolchain 20 | uses: actions-rs/toolchain@v1 21 | with: 22 | profile: minimal 23 | toolchain: stable 24 | - name: Run cargo check 25 | uses: actions-rs/cargo@v1 26 | with: 27 | command: check 28 | args: --all 29 | -------------------------------------------------------------------------------- /.github/workflows/check-targets.yml: -------------------------------------------------------------------------------- 1 | name: Check multiple targets 2 | 3 | on: 4 | pull_request: 5 | types: [ labeled ] 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | check-targets: 12 | if: ${{ github.event.label.name == 'ready-to-merge' }} 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | target: 17 | - i686-pc-windows-gnu 18 | - i686-pc-windows-msvc 19 | - i686-unknown-linux-gnu 20 | - x86_64-apple-darwin 21 | - x86_64-pc-windows-gnu 22 | - x86_64-pc-windows-msvc 23 | - x86_64-unknown-linux-gnu 24 | - wasm32-unknown-unknown 25 | steps: 26 | - uses: actions/checkout@v2 27 | - name: Install stable toolchain 28 | uses: actions-rs/toolchain@v1 29 | with: 30 | profile: minimal 31 | toolchain: stable 32 | target: ${{ matrix.target }} 33 | - name: Run cargo check 34 | uses: actions-rs/cargo@v1 35 | with: 36 | command: check 37 | args: --all 38 | -------------------------------------------------------------------------------- /.github/workflows/check-toolchains.yml: -------------------------------------------------------------------------------- 1 | name: Check multiple toolchains 2 | 3 | on: 4 | pull_request: 5 | types: [ labeled ] 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | check-toolchains: 12 | if: ${{ github.event.label.name == 'ready-to-merge' }} 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | rust-toolchain: [stable, nightly] 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Install ${{ matrix.rust-toolchain }} toolchain 20 | uses: actions-rs/toolchain@v1 21 | with: 22 | profile: minimal 23 | toolchain: ${{ matrix.rust-toolchain }} 24 | - name: Run cargo check 25 | if: matrix.rust-toolchain != "allocator_api" 26 | run: cargo check --all 27 | - name: Run cargo check 28 | if: matrix.rust-toolchain == "allocator_api" 29 | run: cargo +nightly check --all --features="allocator_api" 30 | 31 | -------------------------------------------------------------------------------- /.github/workflows/fast-pr-check.yml: -------------------------------------------------------------------------------- 1 | name: Fast PR check 2 | 3 | on: 4 | pull_request: 5 | types: [ opened, edited ] 6 | branches: [ master ] 7 | paths: 8 | - '**.rs' 9 | - '**/Cargo.toml' 10 | 11 | env: 12 | CARGO_TERM_COLOR: always 13 | 14 | jobs: 15 | test: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Install stable toolchain 20 | uses: actions-rs/toolchain@v1 21 | with: 22 | profile: minimal 23 | toolchain: stable 24 | - name: Run cargo test 25 | uses: actions-rs/cargo@v1 26 | with: 27 | command: test 28 | args: --all 29 | -------------------------------------------------------------------------------- /.github/workflows/lints.yml: -------------------------------------------------------------------------------- 1 | name: Lints 2 | 3 | on: 4 | pull_request: 5 | types: [ opened, edited ] 6 | branches: [ master ] 7 | paths: 8 | - '**.rs' 9 | - '**/Cargo.toml' 10 | 11 | env: 12 | CARGO_TERM_COLOR: always 13 | 14 | jobs: 15 | fmt: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Install nightly toolchain 20 | uses: actions-rs/toolchain@v1 21 | with: 22 | profile: minimal 23 | toolchain: nightly 24 | - name: Run cargo fmt 25 | uses: actions-rs/cargo@v1 26 | with: 27 | command: fmt 28 | args: --all --all-features -- --check 29 | 30 | cllippy: 31 | runs-on: ubuntu-latest 32 | steps: 33 | - uses: actions/checkout@v2 34 | - name: Install nightly toolchain with clippy available 35 | uses: actions-rs/toolchain@v1 36 | with: 37 | profile: minimal 38 | toolchain: nightly 39 | components: rustfmt 40 | - name: Run cargo clippy 41 | uses: actions-rs/clippy-check@v1 42 | with: 43 | token: ${{ secrets.GITHUB_TOKEN }} 44 | args: --all --all-features -- -D warnings 45 | -------------------------------------------------------------------------------- /.github/workflows/security.yml: -------------------------------------------------------------------------------- 1 | name: Security audit 2 | 3 | on: 4 | pull_request: 5 | types: [ labeled ] 6 | branches: [ master ] 7 | paths: 8 | - '**/Cargo.toml' 9 | 10 | env: 11 | CARGO_TERM_COLOR: always 12 | 13 | jobs: 14 | security_audit: 15 | if: ${{ github.event.label.name == 'ready-to-merge' }} 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v2 19 | - uses: actions-rs/audit-check@v1 20 | with: 21 | token: ${{ secrets.GITHUB_TOKEN }} 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | Cargo.lock 4 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [Unreleased] 8 | 9 | ## [0.3.0] - 2021-08-24 10 | 11 | ### Changed 12 | - Switched direction of memory block consumption 13 | - Scope can allocate more memory than requested due to alignment 14 | 15 | ## [0.2.1] - 2021-08-22 16 | 17 | ### Added 18 | - Methods to move closure returns to scope 19 | 20 | ## [0.2.0] - 2021-08-20 21 | 22 | ### Changed 23 | - "nightly" feature renamed to "allocator_api" to reflect rust feature it uses. 24 | - `Arena` type is removed. `Scope` gets methods of the `Arena`. 25 | 26 | ## [0.1.1] - 2021-08-19 27 | 28 | ### Added 29 | - Reset method for scopes 30 | - Ring example 31 | 32 | ### Fixed 33 | - Debug output 34 | 35 | ## [0.1.0] - 2021-08-19 36 | ### Added 37 | - Re-exports of allocator-api types and traits when "nightly" feature is enabled. 38 | - Copies of allocator-api types and traits when "nightly" feature is disabled 39 | - Arena allocator that allocates with simple increment and deallocates all memory at once on reset 40 | - Scope wrapper for Arena that allows moving value onto scope and drops all values on scope drop. Also frees any memory allocated within the scope. 41 | - Methods to move whole iterators onto scope returning slices. 42 | - Basic usage example. -------------------------------------------------------------------------------- /COPYING: -------------------------------------------------------------------------------- 1 | Copyright 2021 The Scoped-Arena Project Developers 2 | 3 | Licensed under the Apache License, Version 2.0, or the MIT license , at your option. This file may not be 6 | copied, modified, or distributed except according to those terms. 7 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "scoped-arena" 3 | version = "0.4.1" 4 | edition = "2018" 5 | authors = ["Zakarum "] 6 | license = "MIT OR Apache-2.0" 7 | documentation = "https://docs.rs/scoped-arena" 8 | homepage = "https://github.com/zakarumych/scoped-arena" 9 | repository = "https://github.com/zakarumych/scoped-arena" 10 | readme = "README.md" 11 | description = "Arena allocator with optional scopes" 12 | categories = ["memory-management", "rust-patterns", "no-std"] 13 | keywords = ["allocator", "alloc", "arena", "no-std"] 14 | 15 | [features] 16 | allocator_api = [] 17 | alloc = [] 18 | default = ["alloc"] 19 | 20 | [dev-dependencies] 21 | quickcheck = "1.0" 22 | criterion = "0.3.0" 23 | rand = "0.8" 24 | lazy_static = "1.0" 25 | bumpalo = { version = "3.7", features = ["allocator_api"] } 26 | 27 | [[bench]] 28 | name = "benches" 29 | path = "benches/bench.rs" 30 | harness = false 31 | required-features = ["allocator_api"] 32 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Scoped-Arena 2 | 3 | [![crates](https://img.shields.io/crates/v/scoped-arena.svg?style=for-the-badge&label=scoped-arena)](https://crates.io/crates/scoped-arena) 4 | [![docs](https://img.shields.io/badge/docs.rs-scoped--arena-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white)](https://docs.rs/scoped-arena) 5 | [![actions](https://img.shields.io/github/workflow/status/zakarumych/scoped-arena/badge/master?style=for-the-badge)](https://github.com/zakarumych/scoped-arena/actions?query=workflow%3ARust) 6 | [![MIT/Apache](https://img.shields.io/badge/license-MIT%2FApache-blue.svg?style=for-the-badge)](COPYING) 7 | ![loc](https://img.shields.io/tokei/lines/github/zakarumych/scoped-arena?style=for-the-badge) 8 | 9 | Scoped-Arena provides arena allocator with explicit scopes. 10 | 11 | ## Arena allocation 12 | 13 | Arena allocators are simple and provides ludicrously fast allocation.\ 14 | Basically allocation requires only increment of internal pointer in the memory block to alignment of allocated object and then to size of allocated object and that's it.\ 15 | When memory block is exhausted arena will allocate new bigger memory block.\ 16 | Then arena can be reset after all allocated objects are not used anymore, keeping only last memory block and reuse it.\ 17 | After several warmup iterations the only memory block is large enough to handle all allocations until next reset. 18 | 19 | 20 | ### Example 21 | 22 | ```rust 23 | use scoped_arena::Scope; 24 | 25 | struct Cat { 26 | name: String, 27 | hungry: bool, 28 | } 29 | 30 | /// Create new arena with `Global` allocator. 31 | let mut scope = Scope::new(); 32 | 33 | /// Construct a cat and move it to the scope. 34 | let cat: &mut Cat = scope.to_scope(Cat { 35 | name: "Fluffy".to_owned(), 36 | hungry: true, 37 | }); 38 | 39 | // Now `cat` is a mutable reference bound to scope borrow lifetime. 40 | 41 | assert_eq!(&cat.name, "Fluffy"); 42 | assert!(cat.hungry); 43 | 44 | cat.hungry = false; 45 | 46 | // This cat instance on scope will be automatically dropped when `scope` is dropped or reset. 47 | // It is impossible to reset before last usage of `cat`. 48 | 49 | // Next line will drop cat value and free memory occupied by it. 50 | scope.reset(); 51 | 52 | // If there were more cats or any other objects put on scope they all would be dropped and memory freed. 53 | ``` 54 | 55 | ## Scopes 56 | 57 | To reuse memory earlier this crates provides `Scope` with methods to create sub-`Scope`s.\ 58 | When sub-`Scope` is reset or dropped it will `Drop` all stored values and free memory allocated by the scope and flush last of new allocated memory block into parent.\ 59 | While objects allocated with parent `Scope` are unchanged and still valid. 60 | 61 | Well placed scopes can significantly reduce memory consumption.\ 62 | For example if few function calls use a lot of dynamic memory but don't need it to be available in caller\ 63 | they can be provided with sub-scope.\ 64 | At the same time any memory allocated in parent scope stays allocated. 65 | 66 | Creating sub-scope is cheap and allocating within sub-scope is as fast as allocating in parent scope.\ 67 | 68 | ### Example 69 | 70 | ```rust 71 | use scoped_arena::{Scope, ScopeProxy}; 72 | 73 | 74 | fn heavy_on_memory(mut scope: Scope<'_>, foobar: &String) { 75 | for _ in 0 .. 42 { 76 | let foobar: &mut String = scope.to_scope(foobar.clone()); 77 | } 78 | 79 | // new `scope` is dropped here and drops all allocated strings and frees memory. 80 | } 81 | 82 | let mut scope = Scope::new(); 83 | 84 | // Proxy is required to be friends with borrow checker. 85 | // Creating sub-scope must lock parent `Scope` from being used, which requires mutable borrow, but any allocation borrows `Scope`. 86 | // `Proxy` relaxes this a bit. `Proxy` borrows `Scope` mutably and tie allocated objects lifetime to scopes' borrow lifetime. 87 | // So sub-scope can borrow proxy mutably while there are objects allocated from it. 88 | let mut proxy = scope.proxy(); 89 | 90 | let foobar: &mut String = proxy.to_scope("foobar".to_owned()); 91 | 92 | // Make sub-scope for the call. 93 | heavy_on_memory(proxy.scope(), &*foobar); 94 | 95 | // If `heavy_on_memory` didn't trigger new memory object allocation in the scope, 96 | // sub-scope drop would rewind scope's internals to exactly the same state. 97 | // Otherwise last of new blocks will become current block in parent scope. 98 | // 99 | // Note that `foobar` is still alive. 100 | 101 | heavy_on_memory(proxy.scope(), &*foobar); 102 | heavy_on_memory(proxy.scope(), &*foobar); 103 | heavy_on_memory(proxy.scope(), &*foobar); 104 | heavy_on_memory(proxy.scope(), &*foobar); 105 | 106 | // Once peak memory consumption is reached, any number of `heavy_on_memory` calls would not require new memory blocks to be allocated. 107 | // Even `loop { heavy_on_memory(proxy.scope(), &*foobar) }` will settle on some big enough block. 108 | ``` 109 | 110 | ## Dropping 111 | 112 | `to_scope` and `try_to_scope` methods store drop-glue for values that `needs_drop`. 113 | On reset or drop scope iterates and properly drops all values. 114 | No drop-glue is added for types that doesn't need drop. `Scope` allocates enough memory and writes value there, no bookkeeping overhead. 115 | 116 | ## Iterator collecting 117 | 118 | `to_scope_from_iter` method acts as `to_scope` but works on iterators and returns slices. 119 | The limitation is that `to_scope_from_iter` need to allocate memory enough for upper bound of what iterator can yield. 120 | If upper bound is too large or iterator is unbounded it will always fail. 121 | One can use `try_to_scope_from_iter` so fail is `Err` and not panic. 122 | It is safe for iterator to yield more items then upper bound it reports, `to_scope_from_iter` would not iterate past upper bound. 123 | On success it returns mutable reference to slice with items from iterator in order. 124 | All values will be dropped on scope reset or drop, same as with `to_scope`. 125 | 126 | This method is especially useful to deal with API that requires slices (*glares at FFI*), collecting into temporary `Vec` would cost much more. 127 | 128 | ## #![no_std] Support 129 | 130 | Scoped-Arena is a no_std crate. It depends only core crates and optionally on `alloc` crate. 131 | 132 | ## Nightly Rust feature(allocator_api) Support 133 | 134 | Scoped-Arena uses copy of `allocator_api` traits and types for underlying allocator.\ 135 | On nightly channel it is possible to enable `allocator_api` feature for this crate. Then actual `allocator_api` traits and types will be used instead, 136 | enabling using any compatible allocator. 137 | Additionally it `&Arena`, `&Scope` and `ScopeProxy` will implement `core::alloc::Allocator` making them suitable for standard rust collections. 138 | 139 | Note that as rust `allocator_api` feature is unstable, this crate's `allocator_api` feature is also considered unstable and may not follow semver. 140 | That is, changes to follow `allocator_api` modifications in rust can be published with patch version, although they must not break code that does not use the feature. 141 | 142 | ## License 143 | 144 | Licensed under either of 145 | 146 | * Apache License, Version 2.0, ([license/APACHE](license/APACHE) or http://www.apache.org/licenses/LICENSE-2.0) 147 | * MIT license ([license/MIT](license/MIT) or http://opensource.org/licenses/MIT) 148 | 149 | at your option. 150 | 151 | ## Contributions 152 | 153 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. 154 | -------------------------------------------------------------------------------- /benches/bench.rs: -------------------------------------------------------------------------------- 1 | #![feature(allocator_api)] 2 | 3 | use bumpalo::Bump; 4 | use criterion::*; 5 | use scoped_arena::Scope; 6 | use std::alloc::{Allocator, Global}; 7 | 8 | #[derive(Default)] 9 | struct Dummy(u128); 10 | 11 | const VEC_COUNT: usize = 100; 12 | 13 | fn vec_alloc(alloc: A, count: usize) { 14 | let mut vec = Vec::new_in(alloc); 15 | vec.resize_with(count, T::default); 16 | std::mem::forget(black_box(vec)); 17 | } 18 | 19 | fn from_iter(alloc: &Scope<'static>, count: usize) { 20 | let slice = alloc.to_scope_from_iter((0..count).map(|_| T::default())); 21 | black_box(slice); 22 | } 23 | 24 | fn many(alloc: &Scope<'static>, count: usize) { 25 | let slice = alloc.to_scope_many_with(count, T::default); 26 | black_box(slice); 27 | } 28 | 29 | fn vec_full(alloc: A, count: usize) { 30 | let mut vec = Vec::new_in(alloc); 31 | vec.resize_with(count, T::default); 32 | drop(black_box(vec)); 33 | } 34 | 35 | fn bench_vec_alloc(c: &mut Criterion) { 36 | let mut group = c.benchmark_group("alloc"); 37 | group.throughput(Throughput::Elements(VEC_COUNT as u64)); 38 | group.bench_function("global", |b| { 39 | b.iter(|| vec_alloc::(Global, VEC_COUNT)) 40 | }); 41 | 42 | let mut scope: Scope = Scope::with_capacity(1 << 10); 43 | let mut bump = Bump::new(); 44 | 45 | bump.alloc([0u8; 1024]); 46 | bump.reset(); 47 | 48 | group.bench_function("bump", |b| { 49 | b.iter(|| { 50 | vec_alloc::(&bump, VEC_COUNT); 51 | bump.reset(); 52 | }) 53 | }); 54 | 55 | group.bench_function("scope", |b| { 56 | b.iter(|| { 57 | vec_alloc::(&scope, VEC_COUNT); 58 | scope.reset(); 59 | }) 60 | }); 61 | 62 | group.bench_function("scope iter", |b| { 63 | b.iter(|| { 64 | from_iter::(&scope, VEC_COUNT); 65 | scope.reset(); 66 | }) 67 | }); 68 | 69 | group.bench_function("scope many", |b| { 70 | b.iter(|| { 71 | many::(&scope, VEC_COUNT); 72 | scope.reset(); 73 | }) 74 | }); 75 | } 76 | 77 | fn bench_vec_full(c: &mut Criterion) { 78 | let mut group = c.benchmark_group("full"); 79 | group.throughput(Throughput::Elements(VEC_COUNT as u64)); 80 | group.bench_function("global", |b| { 81 | b.iter(|| vec_full::(Global, VEC_COUNT)) 82 | }); 83 | 84 | let mut scope = scoped_arena::Scope::with_capacity(1 << 10); 85 | let mut bump = Bump::new(); 86 | 87 | bump.alloc([0u8; 1024]); 88 | bump.reset(); 89 | 90 | group.bench_function("bump", |b| { 91 | b.iter(|| { 92 | vec_full::(&bump, VEC_COUNT); 93 | bump.reset(); 94 | }) 95 | }); 96 | 97 | group.bench_function("scope", |b| { 98 | b.iter(|| { 99 | vec_full::(&scope, VEC_COUNT); 100 | scope.reset(); 101 | }) 102 | }); 103 | } 104 | 105 | criterion_group!(benches, bench_vec_alloc, bench_vec_full); 106 | criterion_main!(benches); 107 | -------------------------------------------------------------------------------- /examples/basic.rs: -------------------------------------------------------------------------------- 1 | use scoped_arena::Scope; 2 | 3 | #[derive(Debug, PartialEq, Eq)] 4 | struct Foo(u32); 5 | 6 | impl Drop for Foo { 7 | fn drop(&mut self) { 8 | println!("Drop Foo at {:p} with {}", self, self.0); 9 | } 10 | } 11 | 12 | fn main() { 13 | // Creating root scope. 14 | let mut scope = Scope::new(); 15 | let mut proxy = scope.proxy(); 16 | 17 | let value_in_root_scope = proxy.to_scope(Foo(42)); 18 | 19 | for i in 0..3 { 20 | // Creating sub-scope that inherits arena and uses same bucket allocator, 21 | // but drops values moved to the sub-scope, 22 | // and frees memory allocated for them, 23 | // keeping values on parent scope intact. 24 | let scope = proxy.scope(); 25 | 26 | // Move value with `Drop` onto scope. 27 | let value_in_scope = scope.to_scope(Foo(i)); 28 | 29 | // Ensure that value is moved properly. 30 | assert_eq!(value_in_scope.0, i); 31 | 32 | // Ensure that value in root scope is not affected. 33 | assert_eq!(value_in_root_scope.0, 42); 34 | } 35 | 36 | drop(proxy); 37 | let slice = scope.to_scope_from_iter([Foo(100), Foo(101)]); 38 | 39 | assert_eq!(slice, &[Foo(100), Foo(101)]); 40 | 41 | scope.reset(); 42 | println!("Total memory usage: {} bytes", scope.total_memory_usage()); 43 | } 44 | -------------------------------------------------------------------------------- /examples/ring.rs: -------------------------------------------------------------------------------- 1 | use scoped_arena::Scope; 2 | 3 | struct Foo(&'static str); 4 | 5 | impl Drop for Foo { 6 | fn drop(&mut self) { 7 | print!("{}", self.0); 8 | } 9 | } 10 | 11 | fn use_scopes(a: &mut Scope, b: &mut Scope, c: &mut Scope) { 12 | a.to_scope(Foo("a")); 13 | b.to_scope(Foo("b")); 14 | c.to_scope(Foo("c")); 15 | } 16 | 17 | fn main() { 18 | let mut scope_a = Scope::new(); 19 | let mut scope_b = Scope::new(); 20 | let mut scope_c = Scope::new(); 21 | 22 | for i in 0..10 { 23 | match i % 3 { 24 | 0 => { 25 | scope_a.reset(); 26 | use_scopes(&mut scope_a, &mut scope_b, &mut scope_c) 27 | } 28 | 1 => { 29 | scope_b.reset(); 30 | use_scopes(&mut scope_b, &mut scope_c, &mut scope_a) 31 | } 32 | 2 => { 33 | scope_c.reset(); 34 | use_scopes(&mut scope_c, &mut scope_a, &mut scope_b) 35 | } 36 | _ => {} 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /license/APACHE: -------------------------------------------------------------------------------- 1 | Copyright 2021 The Scoped-Arena project developers 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. -------------------------------------------------------------------------------- /license/MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2021 The Scoped-Arena project developers 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /src/allocator_api.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "allocator_api")] 2 | pub use core::alloc::{AllocError, Allocator}; 3 | 4 | #[cfg(not(feature = "allocator_api"))] 5 | use core::{ 6 | alloc::Layout, 7 | fmt::{self, Display}, 8 | ptr::NonNull, 9 | }; 10 | 11 | #[cfg(all(feature = "allocator_api", feature = "alloc"))] 12 | pub use alloc::alloc::Global; 13 | 14 | /// Same as [`core::alloc::AllocError`], but stable. 15 | /// When nightly feature is enabled, this is re-export of [`core::alloc::AllocError`] instead. 16 | #[cfg(not(feature = "allocator_api"))] 17 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] 18 | pub struct AllocError; 19 | 20 | #[cfg(not(feature = "allocator_api"))] 21 | impl Display for AllocError { 22 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 23 | f.write_str("memory allocation failed") 24 | } 25 | } 26 | 27 | /// Same as [`core::alloc::Allocator`], but stable. 28 | /// When nightly feature is enabled, this is re-export of [`core::alloc::Allocator`] instead. 29 | #[cfg(not(feature = "allocator_api"))] 30 | pub unsafe trait Allocator { 31 | /// See [`core::alloc::Allocator::allocate`]. 32 | fn allocate(&self, layout: Layout) -> Result, AllocError>; 33 | 34 | /// See [`core::alloc::Allocator::deallocate`]. 35 | unsafe fn deallocate(&self, ptr: NonNull, layout: Layout); 36 | } 37 | 38 | #[cfg(not(feature = "allocator_api"))] 39 | unsafe impl Allocator for &'_ A 40 | where 41 | A: Allocator, 42 | { 43 | fn allocate(&self, layout: Layout) -> Result, AllocError> { 44 | A::allocate(*self, layout) 45 | } 46 | 47 | unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { 48 | A::deallocate(*self, ptr, layout) 49 | } 50 | } 51 | 52 | /// Same as [`alloc::alloc::Global`], but stable. 53 | /// When nightly feature is enabled, this is re-export of [`alloc::alloc::Global`] instead. 54 | #[cfg(all(not(feature = "allocator_api"), feature = "alloc"))] 55 | #[derive(Clone, Copy, Debug, Default)] 56 | pub struct Global; 57 | 58 | #[cfg(all(not(feature = "allocator_api"), feature = "alloc"))] 59 | unsafe impl Allocator for Global { 60 | fn allocate(&self, layout: Layout) -> Result, AllocError> { 61 | if layout.size() == 0 { 62 | Ok(NonNull::from(&mut [])) 63 | } else { 64 | let ptr = unsafe { alloc::alloc::alloc(layout) }; 65 | if ptr.is_null() { 66 | Err(AllocError) 67 | } else { 68 | Ok(unsafe { 69 | NonNull::new_unchecked(core::slice::from_raw_parts_mut(ptr, layout.size())) 70 | }) 71 | } 72 | } 73 | } 74 | 75 | unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { 76 | alloc::alloc::dealloc(ptr.as_ptr(), layout) 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/bucket.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | alloc::Layout, 3 | cell::Cell, 4 | fmt::{self, Debug}, 5 | mem::{align_of, size_of, MaybeUninit}, 6 | ptr::{write, NonNull}, 7 | }; 8 | 9 | use crate::allocator_api::{AllocError, Allocator}; 10 | 11 | struct BucketFooter { 12 | prev: Option>, 13 | start: NonNull, 14 | free_end: usize, 15 | size: usize, 16 | } 17 | 18 | impl Debug for BucketFooter { 19 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 20 | f.debug_struct("Bucket") 21 | .field("start", &self.start) 22 | .field("size", &self.size) 23 | .field("free_end", &(self.free_end as *mut u8)) 24 | .field("free", &(self.free_end - self.start.as_ptr() as usize)) 25 | .finish() 26 | } 27 | } 28 | 29 | impl BucketFooter { 30 | unsafe fn init<'a>(ptr: NonNull<[u8]>, list: &Buckets) -> &'a mut Self { 31 | let slice = &mut *(ptr.as_ptr() as *mut [MaybeUninit]); 32 | let size = slice.len(); 33 | 34 | let start = NonNull::new_unchecked(slice.as_mut_ptr().cast::()); 35 | 36 | let cap = (size - size_of::()) & !(align_of::() - 1); 37 | let end = start.as_ptr() as usize + cap; 38 | 39 | let footer_ptr = start.as_ptr().add(cap).cast::(); 40 | write( 41 | footer_ptr, 42 | BucketFooter { 43 | prev: list.tail.get(), 44 | start, 45 | free_end: end, 46 | size, 47 | }, 48 | ); 49 | 50 | list.tail.set(Some(NonNull::new_unchecked(footer_ptr))); 51 | list.buckets_added.set(list.buckets_added.get() + 1); 52 | 53 | list.last_bucket_size.set(size); 54 | list.total_memory_usage 55 | .set(list.total_memory_usage.get() + size); 56 | 57 | &mut *footer_ptr 58 | } 59 | 60 | #[inline(always)] 61 | fn allocate(&mut self, layout: Layout) -> Option> { 62 | let aligned = self.free_end.checked_sub(layout.size())? & !(layout.align() - 1); 63 | 64 | if aligned >= self.start.as_ptr() as usize { 65 | let aligned_ptr = aligned as *mut u8; 66 | let slice = core::ptr::slice_from_raw_parts_mut(aligned_ptr, self.free_end - aligned); 67 | self.free_end = aligned; 68 | 69 | Some(unsafe { NonNull::new_unchecked(slice) }) 70 | } else { 71 | None 72 | } 73 | } 74 | 75 | #[inline(always)] 76 | fn reset(&mut self) { 77 | let cap = (self.size - size_of::()) & !(align_of::() - 1); 78 | self.free_end = self.start.as_ptr() as usize + cap; 79 | } 80 | } 81 | 82 | pub struct Buckets<'a> { 83 | tail: Cell>>, 84 | buckets_added: Cell, 85 | last_bucket_size: Cell, 86 | total_memory_usage: Cell, 87 | parent: Option<&'a Buckets<'a>>, 88 | parent_tail_free_end: usize, 89 | } 90 | 91 | /// This type does not automatically implement `Send` because of `NonNull` pointer and reference to Self which is not Sync. 92 | /// NonNull pointer is tail of the list of allocated buckets owned by the bucket and parent. 93 | /// When buckets instance with parent is constructed parent is borrowed mutably, so parent cannot be used from another thread. 94 | /// This also grants exclusive access to the buckets list. 95 | unsafe impl Send for Buckets<'_> {} 96 | 97 | #[cfg(debug_assertions)] 98 | impl Drop for Buckets<'_> { 99 | fn drop(&mut self) { 100 | if self.parent.is_none() { 101 | assert!(self.tail.get().is_none()); 102 | } 103 | } 104 | } 105 | 106 | impl Debug for Buckets<'_> { 107 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 108 | let mut f = f.debug_struct("BucketAlloc"); 109 | f.field("buckets_added", &self.buckets_added.get()) 110 | .field("last_bucket_size", &self.last_bucket_size.get()) 111 | .field("total_memory_usage", &self.total_memory_usage.get()); 112 | 113 | if let Some(parent) = &self.parent { 114 | f.field("parent", &parent) 115 | .field("parent_tail_used", &self.parent_tail_free_end); 116 | } 117 | 118 | let mut tail = self.tail.get(); 119 | while let Some(ptr) = tail { 120 | let b = unsafe { ptr.as_ref() }; 121 | f.field("elem", b); 122 | tail = b.prev; 123 | } 124 | 125 | f.finish() 126 | } 127 | } 128 | 129 | impl Buckets<'static> { 130 | pub fn new(capacity: usize, alloc: &A) -> Result 131 | where 132 | A: Allocator, 133 | { 134 | let buckets = Buckets { 135 | tail: Cell::new(None), 136 | buckets_added: Cell::new(0), 137 | last_bucket_size: Cell::new(0), 138 | total_memory_usage: Cell::new(0), 139 | parent: None, 140 | parent_tail_free_end: 0, 141 | }; 142 | 143 | if capacity != 0 { 144 | let bucket_layout = layout_with_capacity(capacity).ok_or(AllocError)?; 145 | let ptr = alloc.allocate(bucket_layout)?; 146 | 147 | unsafe { BucketFooter::init(ptr, &buckets) }; 148 | } 149 | 150 | Ok(buckets) 151 | } 152 | } 153 | 154 | impl<'a> Buckets<'a> { 155 | /// # Safety 156 | /// 157 | /// At least one clone of `alloc` must live for `'a` 158 | #[inline(always)] 159 | pub unsafe fn allocate( 160 | &self, 161 | layout: Layout, 162 | alloc: &A, 163 | ) -> Result<&'a mut [MaybeUninit], AllocError> 164 | where 165 | A: Allocator, 166 | { 167 | if layout.size() == 0 { 168 | return Ok(core::slice::from_raw_parts_mut( 169 | layout.align() as *mut MaybeUninit, 170 | 0, 171 | )); 172 | } 173 | 174 | if let Some(bucket) = self.tail() { 175 | if let Some(ptr) = bucket.allocate(layout) { 176 | let mut ptr = NonNull::new_unchecked(ptr.as_ptr() as *mut [MaybeUninit]); 177 | let slice = ptr.as_mut(); 178 | return Ok(slice); 179 | } 180 | } 181 | 182 | // Allocate new bucket. 183 | let bucket_layout = next_layout(self.last_bucket_size.get(), layout).ok_or(AllocError)?; 184 | let ptr = alloc.allocate(bucket_layout)?; 185 | self.last_bucket_size.set(bucket_layout.size()); 186 | let bucket = BucketFooter::init(ptr, self); 187 | 188 | let ptr = bucket 189 | .allocate(layout) 190 | .expect("Allocation from new bucket must succeed"); 191 | 192 | let mut ptr = NonNull::new_unchecked(ptr.as_ptr() as *mut [MaybeUninit]); 193 | let slice = ptr.as_mut(); 194 | 195 | Ok(slice) 196 | } 197 | 198 | unsafe fn tail(&self) -> Option<&mut BucketFooter> { 199 | let ptr = self.tail.get()?; 200 | Some(&mut *ptr.as_ptr()) 201 | } 202 | 203 | #[inline(always)] 204 | pub fn fork(&mut self) -> Buckets<'_> { 205 | Buckets { 206 | tail: Cell::new(self.tail.get()), 207 | buckets_added: Cell::new(0), 208 | last_bucket_size: Cell::new(self.last_bucket_size.get()), 209 | total_memory_usage: Cell::new(self.total_memory_usage.get()), 210 | parent_tail_free_end: unsafe { self.tail().map_or(0, |tail| tail.free_end) }, 211 | parent: Some(self), 212 | } 213 | } 214 | 215 | // Resets buckets added to the fork 216 | #[inline(always)] 217 | pub unsafe fn reset(&mut self, alloc: &A, keep_tail: bool) 218 | where 219 | A: Allocator, 220 | { 221 | use core::hint::unreachable_unchecked; 222 | 223 | match &self.parent { 224 | None => { 225 | // Resetting root. 226 | let mut tail = self.tail.get(); 227 | let pre_reset_total_memory_usage = self.total_memory_usage.get(); 228 | 229 | if keep_tail { 230 | if let Some(mut ptr) = tail { 231 | let prev = ptr.as_ref().prev; 232 | ptr.as_mut().prev = None; 233 | ptr.as_mut().reset(); 234 | tail = prev; 235 | 236 | self.total_memory_usage.set(ptr.as_ref().size); 237 | } else { 238 | debug_assert_eq!(self.total_memory_usage.get(), 0); 239 | } 240 | } else { 241 | self.tail.set(None); 242 | self.total_memory_usage.set(0); 243 | } 244 | 245 | let post_reset_total_memory_usage = self.total_memory_usage.get(); 246 | let mut memory_freed = 0; 247 | 248 | while let Some(ptr) = tail { 249 | let bucket = ptr.as_ref(); 250 | let layout = 251 | Layout::from_size_align_unchecked(bucket.size, align_of::()); 252 | tail = bucket.prev; 253 | alloc.deallocate(bucket.start, layout); 254 | memory_freed += layout.size(); 255 | } 256 | 257 | debug_assert_eq!( 258 | post_reset_total_memory_usage + memory_freed, 259 | pre_reset_total_memory_usage 260 | ); 261 | } 262 | Some(parent) => { 263 | // Resetting scoped arena 264 | match self.buckets_added.get() { 265 | 0 => { 266 | if let Some(tail) = parent.tail() { 267 | tail.free_end = self.parent_tail_free_end; 268 | } 269 | } 270 | _ => { 271 | match self.tail() { 272 | None => unreachable_unchecked(), 273 | Some(tail) => { 274 | tail.reset(); 275 | let mut excess_bucket = tail.prev; 276 | 277 | let mut memory_freed = 0; 278 | 279 | // Drop all added buckets except tail. 280 | for _ in 1..self.buckets_added.get() { 281 | match excess_bucket { 282 | None => unreachable_unchecked(), 283 | Some(ptr) => { 284 | let bucket = ptr.as_ref(); 285 | let layout = Layout::from_size_align_unchecked( 286 | bucket.size, 287 | align_of::(), 288 | ); 289 | excess_bucket = bucket.prev; 290 | alloc.deallocate(bucket.start, layout); 291 | memory_freed += layout.size(); 292 | } 293 | } 294 | } 295 | 296 | tail.prev = excess_bucket; 297 | let tail_free_end = tail.free_end; 298 | 299 | let total_memory_usage = 300 | parent.total_memory_usage.get() + tail.size; 301 | 302 | debug_assert_eq!( 303 | total_memory_usage + memory_freed, 304 | self.total_memory_usage.get() 305 | ); 306 | 307 | parent.total_memory_usage.set(total_memory_usage); 308 | parent.buckets_added.set(parent.buckets_added.get() + 1); 309 | parent.last_bucket_size.set(self.last_bucket_size.get()); 310 | parent.tail.set(Some(NonNull::from(tail))); 311 | 312 | self.total_memory_usage.set(total_memory_usage); 313 | self.buckets_added.set(0); 314 | self.parent_tail_free_end = tail_free_end; 315 | } 316 | } 317 | } 318 | } 319 | } 320 | } 321 | } 322 | 323 | // Flushes buckets added to the fork. 324 | #[inline(always)] 325 | pub unsafe fn flush_fork(&mut self) { 326 | use core::hint::unreachable_unchecked; 327 | 328 | debug_assert!( 329 | self.parent.is_some(), 330 | "Must be called only on non-root bucket list owned by `Scope`" 331 | ); 332 | 333 | match &self.parent { 334 | None => unreachable_unchecked(), 335 | Some(parent) => { 336 | parent.tail.set(self.tail.get()); 337 | parent 338 | .buckets_added 339 | .set(parent.buckets_added.get() + self.buckets_added.get()); 340 | parent.last_bucket_size.set(self.last_bucket_size.get()); 341 | parent.total_memory_usage.set(self.total_memory_usage.get()); 342 | } 343 | } 344 | 345 | self.tail.set(None); 346 | } 347 | 348 | #[inline(always)] 349 | pub fn total_memory_usage(&self) -> usize { 350 | self.total_memory_usage.get() 351 | } 352 | } 353 | 354 | fn layout_with_capacity(capacity: usize) -> Option { 355 | let (layout, _) = Layout::new::() 356 | .extend(Layout::array::(capacity).ok()?) 357 | .ok()?; 358 | Some(layout) 359 | } 360 | 361 | fn next_layout(last_size: usize, item_layout: Layout) -> Option { 362 | const ALIGN: usize = 1 + ((align_of::() - 1) | 7); 363 | const BIG_ALIGN: usize = 1 + (((1 << 12) - 1) | (ALIGN - 1)); 364 | const FOOTER_OVERHEAD: usize = size_of::() + ALIGN; 365 | const MIN_CAP: usize = 32; 366 | 367 | let min_grow = (item_layout.size() + item_layout.align() - 1) 368 | .max(MIN_CAP) 369 | .checked_add(FOOTER_OVERHEAD)?; 370 | let grow = last_size.max(min_grow); 371 | let size = last_size.checked_add(grow)?; 372 | 373 | let aligned_size = if size > BIG_ALIGN { 374 | size.checked_add(BIG_ALIGN - 1)? & !(BIG_ALIGN - 1) 375 | } else { 376 | size.checked_add(ALIGN - 1)? & !(ALIGN - 1) 377 | }; 378 | 379 | Layout::from_size_align(aligned_size, ALIGN).ok() 380 | } 381 | -------------------------------------------------------------------------------- /src/drop.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | alloc::Layout, 3 | cell::Cell, 4 | mem::{needs_drop, size_of, ManuallyDrop, MaybeUninit}, 5 | ptr::{drop_in_place, NonNull}, 6 | }; 7 | 8 | use crate::{cast_buf, cast_buf_array}; 9 | 10 | #[repr(C)] 11 | pub struct WithDrop { 12 | to_drop: ToDrop, 13 | value: ManuallyDrop, 14 | } 15 | 16 | impl WithDrop { 17 | pub unsafe fn init<'a>( 18 | uninit: &'a mut [MaybeUninit], 19 | value: T, 20 | drop_list: &DropList, 21 | ) -> &'a mut T { 22 | debug_assert!(needs_drop::()); 23 | let uninit = cast_buf::>(&mut uninit[..size_of::()]); 24 | 25 | uninit.write(WithDrop { 26 | to_drop: ToDrop { 27 | prev: drop_list.tail.get(), 28 | count: 1, 29 | drop: |_, _| {}, 30 | }, 31 | value: ManuallyDrop::new(value), 32 | }); 33 | 34 | // Now initialized. 35 | let with_drop = uninit.assume_init_mut(); 36 | 37 | // Setup drop glue. 38 | with_drop.to_drop.drop = drop_glue::; 39 | 40 | drop_list 41 | .tail 42 | .set(Some(NonNull::from(&mut with_drop.to_drop))); 43 | 44 | &mut *with_drop.value 45 | } 46 | 47 | pub fn array_layout(count: usize) -> Option { 48 | Some( 49 | Layout::new::() 50 | .extend(Layout::array::(count).ok()?) 51 | .ok()? 52 | .0, 53 | ) 54 | } 55 | 56 | pub unsafe fn init_iter<'a>( 57 | uninit: &'a mut [MaybeUninit], 58 | iter: impl Iterator, 59 | drop_list: &DropList, 60 | ) -> &'a mut [T] { 61 | debug_assert!(needs_drop::()); 62 | 63 | let (_, values_offset) = Layout::new::().extend(Layout::new::()).unwrap(); 64 | let (to_drop_uninit, values_uninit) = uninit.split_at_mut(values_offset); 65 | 66 | let to_drop_uninit = cast_buf::(&mut to_drop_uninit[..size_of::()]); 67 | let to_drop = to_drop_uninit.write(ToDrop { 68 | prev: drop_list.tail.get(), 69 | count: 0, 70 | drop: |_, _| {}, 71 | }); 72 | 73 | let (values_uninit, _) = cast_buf_array::(values_uninit); 74 | 75 | let item_count = iter.take(values_uninit.len()).fold(0, |idx, item| { 76 | values_uninit[idx].write(item); 77 | idx + 1 78 | }); 79 | 80 | // Setup drop glue. 81 | to_drop.drop = drop_glue::; 82 | to_drop.count = item_count; 83 | 84 | drop_list.tail.set(Some(NonNull::from(to_drop))); 85 | 86 | core::slice::from_raw_parts_mut(values_uninit.as_mut_ptr() as *mut T, item_count) 87 | } 88 | 89 | pub unsafe fn init_many<'a>( 90 | uninit: &'a mut [MaybeUninit], 91 | count: usize, 92 | mut f: impl FnMut() -> T, 93 | drop_list: &DropList, 94 | ) -> &'a mut [T] { 95 | debug_assert!(needs_drop::()); 96 | 97 | let (_, values_offset) = Layout::new::().extend(Layout::new::()).unwrap(); 98 | let (to_drop_uninit, values_uninit) = uninit.split_at_mut(values_offset); 99 | 100 | let to_drop_uninit = cast_buf::(&mut to_drop_uninit[..size_of::()]); 101 | let to_drop = to_drop_uninit.write(ToDrop { 102 | prev: drop_list.tail.get(), 103 | count: 0, 104 | drop: |_, _| {}, 105 | }); 106 | 107 | let (values_uninit, _) = cast_buf_array::(values_uninit); 108 | let values_uninit = &mut values_uninit[..count]; 109 | 110 | for i in 0..count { 111 | values_uninit[i].write(f()); 112 | } 113 | 114 | // Setup drop glue. 115 | to_drop.drop = drop_glue::; 116 | to_drop.count = count; 117 | 118 | drop_list.tail.set(Some(NonNull::from(to_drop))); 119 | 120 | core::slice::from_raw_parts_mut(values_uninit.as_mut_ptr() as *mut T, count) 121 | } 122 | } 123 | 124 | pub struct DropList<'a> { 125 | tail: Cell>>, 126 | parent: Option<&'a Self>, 127 | } 128 | 129 | #[cfg(debug_assertions)] 130 | impl Drop for DropList<'_> { 131 | fn drop(&mut self) { 132 | assert!(self.tail.get().is_none()); 133 | } 134 | } 135 | 136 | impl DropList<'static> { 137 | #[inline(always)] 138 | pub const fn new() -> Self { 139 | DropList { 140 | tail: Cell::new(None), 141 | parent: None, 142 | } 143 | } 144 | 145 | #[inline(always)] 146 | pub fn fork<'a>(&'a mut self) -> DropList<'a> { 147 | DropList { 148 | tail: Cell::new(self.tail.get()), 149 | parent: Some(self), 150 | } 151 | } 152 | 153 | #[inline(always)] 154 | pub unsafe fn reset(&mut self) { 155 | debug_assert!(self.parent.is_none()); 156 | 157 | let mut tail = self.tail.get(); 158 | while let Some(ptr) = tail.take() { 159 | let to_drop = ptr.as_ref(); 160 | (to_drop.drop)(ptr, to_drop.count); 161 | tail = to_drop.prev; 162 | } 163 | 164 | self.tail.set(None); 165 | } 166 | } 167 | 168 | impl DropList<'_> { 169 | // Flushes drops added to the fork. 170 | #[inline(always)] 171 | pub unsafe fn flush_fork(&mut self) { 172 | use core::hint::unreachable_unchecked; 173 | 174 | debug_assert!( 175 | self.parent.is_some(), 176 | "Must be called only on non-root bucket list owned by `Scope`" 177 | ); 178 | 179 | match self.parent { 180 | None => unreachable_unchecked(), 181 | Some(parent) => { 182 | parent.tail.set(self.tail.get()); 183 | } 184 | } 185 | 186 | self.tail.set(None); 187 | } 188 | } 189 | 190 | struct ToDrop { 191 | prev: Option>, 192 | count: usize, 193 | drop: unsafe fn(NonNull, usize), 194 | } 195 | 196 | unsafe fn drop_glue(ptr: NonNull, count: usize) { 197 | // `ptr` is `ToDrop` field of `WithDrop` 198 | // `value` field is next to `ToDrop` field. 199 | 200 | let offset = Layout::new::() 201 | .extend(Layout::new::()) 202 | .unwrap() 203 | .1; 204 | 205 | drop_in_place(core::slice::from_raw_parts_mut( 206 | ptr.as_ptr().cast::().add(offset).cast::(), 207 | count, 208 | )) 209 | } 210 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! 2 | //! Scoped-Arena provides arena allocator with explicit scopes. 3 | //! 4 | //! ## Arena allocation 5 | //! 6 | //! Arena allocators are simple and provides ludicrously fast allocation.\ 7 | //! Basically allocation requires only increment of internal pointer in the memory block to alignment of allocated object and then to size of allocated object and that's it.\ 8 | //! When memory block is exhausted arena will allocate new bigger memory block.\ 9 | //! Then arena can be reset after all allocated objects are not used anymore, keeping only last memory block and reuse it.\ 10 | //! After several warmup iterations the only memory block is large enough to handle all allocations until next reset. 11 | //! 12 | //! 13 | //! ### Example 14 | //! 15 | //! ```rust 16 | //! use scoped_arena::Scope; 17 | //! 18 | //! struct Cat { 19 | //! name: String, 20 | //! hungry: bool, 21 | //! } 22 | //! 23 | //! /// Create new arena with `Global` allocator. 24 | //! let mut scope = Scope::new(); 25 | //! 26 | //! /// Construct a cat and move it to the scope. 27 | //! let cat: &mut Cat = scope.to_scope(Cat { 28 | //! name: "Fluffy".to_owned(), 29 | //! hungry: true, 30 | //! }); 31 | //! 32 | //! // Now `cat` is a mutable reference bound to scope borrow lifetime. 33 | //! 34 | //! assert_eq!(&cat.name, "Fluffy"); 35 | //! assert!(cat.hungry); 36 | //! 37 | //! cat.hungry = false; 38 | //! 39 | //! // This cat instance on scope will be automatically dropped when `scope` is dropped or reset. 40 | //! // It is impossible to reset before last usage of `cat`. 41 | //! 42 | //! // Next line will drop cat value and free memory occupied by it. 43 | //! scope.reset(); 44 | //! 45 | //! // If there were more cats or any other objects put on scope they all would be dropped and memory freed. 46 | //! ``` 47 | //! 48 | //! ## Scopes 49 | //! 50 | //! To reuse memory earlier this crates provides `Scope` with methods to create sub-`Scope`s.\ 51 | //! When sub-`Scope` is reset or dropped it will `Drop` all stored values and free memory allocated by the scope and flush last of new allocated memory block into parent.\ 52 | //! While objects allocated with parent `Scope` are unchanged and still valid. 53 | //! 54 | //! Well placed scopes can significantly reduce memory consumption.\ 55 | //! For example if few function calls use a lot of dynamic memory but don't need it to be available in caller\ 56 | //! they can be provided with sub-scope.\ 57 | //! At the same time any memory allocated in parent scope stays allocated. 58 | //! 59 | //! Creating sub-scope is cheap and allocating within sub-scope is as fast as allocating in parent scope.\ 60 | //! 61 | //! ### Example 62 | //! 63 | //! ```rust 64 | //! use scoped_arena::{Scope, ScopeProxy}; 65 | //! 66 | //! 67 | //! fn heavy_on_memory(mut scope: Scope<'_>, foobar: &String) { 68 | //! for _ in 0 .. 42 { 69 | //! let foobar: &mut String = scope.to_scope(foobar.clone()); 70 | //! } 71 | //! 72 | //! // new `scope` is dropped here and drops all allocated strings and frees memory. 73 | //! } 74 | //! 75 | //! let mut scope = Scope::new(); 76 | //! 77 | //! // Proxy is required to be friends with borrow checker. 78 | //! // Creating sub-scope must lock parent `Scope` from being used, which requires mutable borrow, but any allocation borrows `Scope`. 79 | //! // `Proxy` relaxes this a bit. `Proxy` borrows `Scope` mutably and tie allocated objects lifetime to scopes' borrow lifetime. 80 | //! // So sub-scope can borrow proxy mutably while there are objects allocated from it. 81 | //! let mut proxy = scope.proxy(); 82 | //! 83 | //! let foobar: &mut String = proxy.to_scope("foobar".to_owned()); 84 | //! 85 | //! // Make sub-scope for the call. 86 | //! heavy_on_memory(proxy.scope(), &*foobar); 87 | //! 88 | //! // If `heavy_on_memory` didn't trigger new memory object allocation in the scope, 89 | //! // sub-scope drop would rewind scope's internals to exactly the same state. 90 | //! // Otherwise last of new blocks will become current block in parent scope. 91 | //! // 92 | //! // Note that `foobar` is still alive. 93 | //! 94 | //! heavy_on_memory(proxy.scope(), &*foobar); 95 | //! heavy_on_memory(proxy.scope(), &*foobar); 96 | //! heavy_on_memory(proxy.scope(), &*foobar); 97 | //! heavy_on_memory(proxy.scope(), &*foobar); 98 | //! 99 | //! // Once peak memory consumption is reached, any number of `heavy_on_memory` calls would not require new memory blocks to be allocated. 100 | //! // Even `loop { heavy_on_memory(proxy.scope(), &*foobar) }` will settle on some big enough block. 101 | //! ``` 102 | //! 103 | //! ## Dropping 104 | //! 105 | //! `to_scope` and `try_to_scope` methods store drop-glue for values that `needs_drop`. 106 | //! On reset or drop scope iterates and properly drops all values. 107 | //! No drop-glue is added for types that doesn't need drop. `Scope` allocates enough memory and writes value there, no bookkeeping overhead. 108 | //! 109 | //! ## Iterator collecting 110 | //! 111 | //! `to_scope_from_iter` method acts as `to_scope` but works on iterators and returns slices. 112 | //! The limitation is that `to_scope_from_iter` need to allocate memory enough for upper bound of what iterator can yield. 113 | //! If upper bound is too large or iterator is unbounded it will always fail. 114 | //! One can use `try_to_scope_from_iter` so fail is `Err` and not panic. 115 | //! It is safe for iterator to yield more items then upper bound it reports, `to_scope_from_iter` would not iterate past upper bound. 116 | //! On success it returns mutable reference to slice with items from iterator in order. 117 | //! All values will be dropped on scope reset or drop, same as with `to_scope`. 118 | //! 119 | //! This method is especially useful to deal with API that requires slices (*glares at FFI*), collecting into temporary `Vec` would cost much more. 120 | //! 121 | 122 | #![no_std] 123 | #![cfg(any(feature = "allocator_api", feature = "alloc"))] 124 | #![cfg_attr(feature = "allocator_api", feature(allocator_api))] 125 | 126 | #[cfg(feature = "alloc")] 127 | extern crate alloc; 128 | 129 | mod allocator_api; 130 | mod bucket; 131 | mod drop; 132 | 133 | use core::{ 134 | alloc::Layout, 135 | fmt::{self, Debug}, 136 | iter::IntoIterator, 137 | mem::{align_of, needs_drop, MaybeUninit}, 138 | ptr::{self, write, NonNull}, 139 | slice, 140 | }; 141 | 142 | #[cfg(all(not(no_global_oom_handling), feature = "alloc"))] 143 | use alloc::alloc::handle_alloc_error; 144 | 145 | use self::{ 146 | bucket::Buckets, 147 | drop::{DropList, WithDrop}, 148 | }; 149 | 150 | use self::allocator_api::{AllocError, Allocator}; 151 | 152 | #[cfg(feature = "alloc")] 153 | use self::allocator_api::Global; 154 | 155 | /// Scope associated with `Scope` allocator. 156 | /// Allows placing values on the scope returning reference bound to scope borrow. 157 | /// On drop scope drops all values placed onto it. 158 | /// On drop scope frees all memory allocated from it. 159 | #[cfg(not(feature = "alloc"))] 160 | pub struct Scope<'arena, A: Allocator> { 161 | buckets: Buckets<'arena>, 162 | alloc: &'arena A, 163 | drop_list: DropList<'static>, 164 | } 165 | 166 | /// Scope associated with `Scope` allocator. 167 | /// Allows placing values on the scope returning reference bound to scope borrow. 168 | /// On drop scope drops all values placed onto it. 169 | /// On drop scope frees all memory allocated from it. 170 | #[cfg(feature = "alloc")] 171 | pub struct Scope<'arena, A: Allocator = Global> { 172 | buckets: Buckets<'arena>, 173 | alloc: A, 174 | drop_list: DropList<'static>, 175 | } 176 | 177 | impl Debug for Scope<'_, A> 178 | where 179 | A: Allocator, 180 | { 181 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 182 | f.debug_struct("Scope") 183 | .field("buckets", &self.buckets) 184 | .finish_non_exhaustive() 185 | } 186 | } 187 | 188 | impl Drop for Scope<'_, A> 189 | where 190 | A: Allocator, 191 | { 192 | #[inline(always)] 193 | fn drop(&mut self) { 194 | unsafe { 195 | self.drop_list.reset(); 196 | self.buckets.reset(&self.alloc, false); 197 | } 198 | } 199 | } 200 | 201 | #[cfg(feature = "alloc")] 202 | impl Scope<'_, Global> { 203 | /// Returns new instance of arena allocator based on [`Global`] allocator. 204 | #[inline(always)] 205 | pub fn new() -> Self { 206 | Scope::new_in(Global) 207 | } 208 | 209 | /// Returns new instance of arena allocator based on [`Global`] allocator 210 | /// with preallocated capacity in bytes. 211 | #[inline(always)] 212 | pub fn with_capacity(capacity: usize) -> Self { 213 | Scope::with_capacity_in(capacity, Global) 214 | } 215 | } 216 | 217 | impl Scope<'_, A> 218 | where 219 | A: Allocator, 220 | { 221 | /// Returns new instance of arena allocator based on provided allocator. 222 | #[inline(always)] 223 | pub fn new_in(alloc: A) -> Self { 224 | Scope::with_capacity_in(0, alloc) 225 | } 226 | 227 | /// Returns new instance of arena allocator based on provided allocator 228 | /// with preallocated capacity in bytes. 229 | #[inline(always)] 230 | pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { 231 | Scope { 232 | buckets: Buckets::new(capacity, &alloc).expect(ALLOCATOR_CAPACITY_OVERFLOW), 233 | alloc, 234 | drop_list: DropList::new(), 235 | } 236 | } 237 | } 238 | 239 | impl Scope<'_, A> 240 | where 241 | A: Allocator, 242 | { 243 | #[inline(always)] 244 | pub fn reset(&mut self) { 245 | unsafe { 246 | self.drop_list.reset(); 247 | self.buckets.reset(&self.alloc, true); 248 | } 249 | } 250 | 251 | /// Allocates a block of memory. 252 | /// Returns a [`&mut [MaybeUninit]`] meeting the size and alignment guarantees of layout. 253 | /// Actual size of the returned size MAY be larger than requested. 254 | /// The returned block should be initialized before use. 255 | /// 256 | /// Returned block will be deallocated when scope is dropped. 257 | #[cfg(all(not(no_global_oom_handling), feature = "alloc"))] 258 | #[inline(always)] 259 | pub fn alloc(&self, layout: Layout) -> &mut [MaybeUninit] { 260 | match self.try_alloc(layout) { 261 | Ok(buf) => buf, 262 | Err(_) => handle_alloc_error(layout), 263 | } 264 | } 265 | 266 | /// Attempts to allocate a block of memory. 267 | /// On success, returns a [`&mut [MaybeUninit]`] meeting the size and alignment guarantees of layout. 268 | /// Actual size of the returned size MAY be larger than requested. 269 | /// The returned block should be initialized before use. 270 | /// 271 | /// Returned block will be deallocated when scope is dropped. 272 | /// 273 | /// # Errors 274 | /// 275 | /// Returning `Err` indicates that memory is exhausted. 276 | #[inline(always)] 277 | pub fn try_alloc(&self, layout: Layout) -> Result<&mut [MaybeUninit], AllocError> { 278 | unsafe { self.buckets.allocate(layout, &self.alloc) } 279 | } 280 | 281 | /// Allocates a block of memory. 282 | /// Returns a [`&mut [u8]`] meeting the size and alignment guarantees of layout. 283 | /// Actual size of the returned size MAY be larger than requested. 284 | /// The returned block contents is zero-initialized. 285 | /// 286 | /// Returned block will be deallocated when scope is dropped. 287 | #[cfg(all(not(no_global_oom_handling), feature = "alloc"))] 288 | #[inline(always)] 289 | pub fn alloc_zeroed(&self, layout: Layout) -> &mut [u8] { 290 | match self.try_alloc_zeroed(layout) { 291 | Ok(buf) => buf, 292 | Err(_) => handle_alloc_error(layout), 293 | } 294 | } 295 | 296 | /// Attempts to allocate a block of memory. 297 | /// On success, returns a [`&mut [u8]`] meeting the size and alignment guarantees of layout. 298 | /// Actual size of the returned size MAY be larger than requested. 299 | /// The returned block contents is zero-initialized. 300 | /// 301 | /// Returned block will be deallocated when scope is dropped. 302 | /// 303 | /// # Errors 304 | /// 305 | /// Returning `Err` indicates that memory is exhausted. 306 | #[inline(always)] 307 | pub fn try_alloc_zeroed(&self, layout: Layout) -> Result<&mut [u8], AllocError> { 308 | let buf = unsafe { self.buckets.allocate(layout, &self.alloc) }?; 309 | 310 | let buf = unsafe { 311 | // Zeroing bytes buffer should be safe. 312 | ptr::write_bytes(buf.as_mut_ptr(), 0, buf.len()); 313 | 314 | // Zero-initialized. 315 | slice::from_raw_parts_mut(buf.as_mut_ptr() as *mut u8, buf.len()) 316 | }; 317 | 318 | Ok(buf) 319 | } 320 | 321 | /// Move value onto the scope. 322 | /// Returns mutable reference to value with lifetime equal to scope borrow lifetime. 323 | /// Value on scope will be dropped when scope is dropped. 324 | /// 325 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 326 | #[cfg(all(not(no_global_oom_handling), feature = "alloc"))] 327 | #[inline(always)] 328 | pub fn to_scope(&self, value: T) -> &mut T { 329 | self.to_scope_with(|| value) 330 | } 331 | 332 | /// Places value returned from function onto the scope. 333 | /// Returns mutable reference to value with lifetime equal to scope borrow lifetime. 334 | /// Value on scope will be dropped when scope is dropped. 335 | /// 336 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 337 | #[cfg(all(not(no_global_oom_handling), feature = "alloc"))] 338 | #[inline(always)] 339 | pub fn to_scope_with(&self, f: F) -> &mut T 340 | where 341 | F: FnOnce() -> T, 342 | { 343 | match self.try_to_scope_with(f) { 344 | Ok(value) => value, 345 | Err(_) => handle_alloc_error(Layout::new::()), 346 | } 347 | } 348 | 349 | /// Tries to move value onto the scope. 350 | /// On success, returns mutable reference to value with lifetime equal to scope borrow lifetime. 351 | /// Value on scope will be dropped when scope is dropped. 352 | /// 353 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 354 | /// 355 | /// # Errors 356 | /// 357 | /// Returning `Err` indicates that memory is exhausted. 358 | /// Returning `Err` contains original value. 359 | #[inline(always)] 360 | pub fn try_to_scope(&self, value: T) -> Result<&mut T, (AllocError, T)> { 361 | self.try_to_scope_with(|| value) 362 | .map_err(|(err, f)| (err, f())) 363 | } 364 | 365 | /// Tries to place value return from function onto the scope. 366 | /// On success, returns mutable reference to value with lifetime equal to scope borrow lifetime. 367 | /// Value on scope will be dropped when scope is dropped. 368 | /// 369 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 370 | /// 371 | /// # Errors 372 | /// 373 | /// Returning `Err` indicates that memory is exhausted. 374 | /// Returning `Err` contains original value. 375 | #[inline(always)] 376 | pub fn try_to_scope_with(&self, f: F) -> Result<&mut T, (AllocError, F)> 377 | where 378 | F: FnOnce() -> T, 379 | { 380 | try_to_scope_with(|layout| self.try_alloc(layout), &self.drop_list, f) 381 | } 382 | 383 | /// Move values from iterator onto the scope. 384 | /// Returns mutable reference to slice with lifetime equal to scope borrow lifetime. 385 | /// Values on scope will be dropped when scope is dropped. 386 | /// 387 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 388 | /// 389 | /// This method allocates memory to hold iterator's upper bound number of items. See [`core::iter::Iterator::size_hint`]. 390 | /// It will not consume more items. 391 | /// This method will always fail for unbound iterators. 392 | #[cfg(all(not(no_global_oom_handling), feature = "alloc"))] 393 | #[inline(always)] 394 | pub fn to_scope_from_iter(&self, iter: I) -> &mut [T] 395 | where 396 | I: IntoIterator, 397 | { 398 | let too_large_layout = unsafe { 399 | Layout::from_size_align_unchecked(usize::MAX - align_of::(), align_of::()) 400 | }; 401 | let iter = iter.into_iter(); 402 | let upper_bound = iter 403 | .size_hint() 404 | .1 405 | .unwrap_or_else(|| handle_alloc_error(too_large_layout)); 406 | 407 | match self.try_to_scope_from_iter(iter) { 408 | Ok(slice) => slice, 409 | Err(_) => { 410 | handle_alloc_error(Layout::array::(upper_bound).unwrap_or(too_large_layout)) 411 | } 412 | } 413 | } 414 | 415 | /// Tries to move values from iterator onto the scope. 416 | /// On success, returns mutable reference to slice with lifetime equal to scope borrow lifetime. 417 | /// Values on scope will be dropped when scope is dropped. 418 | /// 419 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 420 | /// 421 | /// This method allocates memory to hold iterator's upper bound number of items. See [`core::iter::Iterator::size_hint`]. 422 | /// It will not consume more items. 423 | /// This method will always fail for unbound iterators. 424 | /// 425 | /// # Errors 426 | /// 427 | /// Returning `Err` indicates that memory is exhausted. 428 | /// Returning `Err` contains original iterator. 429 | #[inline(always)] 430 | pub fn try_to_scope_from_iter( 431 | &self, 432 | iter: I, 433 | ) -> Result<&mut [T], (AllocError, I::IntoIter)> 434 | where 435 | I: IntoIterator, 436 | { 437 | try_to_scope_from_iter(|layout| self.try_alloc(layout), &self.drop_list, iter) 438 | } 439 | 440 | /// Put multiple clones of the value onto the scope. 441 | /// Returns mutable reference to slice with lifetime equal to scope borrow lifetime. 442 | /// Values on scope will be dropped when scope is dropped. 443 | /// 444 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 445 | #[inline(always)] 446 | pub fn to_scope_many(&self, count: usize, value: T) -> &mut [T] 447 | where 448 | T: Clone, 449 | { 450 | let too_large_layout = unsafe { 451 | Layout::from_size_align_unchecked(usize::MAX - align_of::(), align_of::()) 452 | }; 453 | match self.try_to_scope_many(count, value) { 454 | Ok(slice) => slice, 455 | Err(_) => handle_alloc_error(Layout::array::(count).unwrap_or(too_large_layout)), 456 | } 457 | } 458 | 459 | /// Tries to put multiple clones of the value onto the scope. 460 | /// On success, returns mutable reference to slice with lifetime equal to scope borrow lifetime. 461 | /// Returns mutable reference to slice with lifetime equal to scope borrow lifetime. 462 | /// Values on scope will be dropped when scope is dropped. 463 | /// 464 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 465 | #[inline(always)] 466 | pub fn try_to_scope_many(&self, count: usize, value: T) -> Result<&mut [T], AllocError> 467 | where 468 | T: Clone, 469 | { 470 | self.try_to_scope_many_with(count, || value.clone()) 471 | } 472 | 473 | /// Put multiple values created by calls to the specified function onto the scope. 474 | /// Returns mutable reference to slice with lifetime equal to scope borrow lifetime. 475 | /// Values on scope will be dropped when scope is dropped. 476 | /// 477 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 478 | #[inline(always)] 479 | pub fn to_scope_many_with(&self, count: usize, f: F) -> &mut [T] 480 | where 481 | F: FnMut() -> T, 482 | { 483 | let too_large_layout = unsafe { 484 | Layout::from_size_align_unchecked(usize::MAX - align_of::(), align_of::()) 485 | }; 486 | match self.try_to_scope_many_with(count, f) { 487 | Ok(slice) => slice, 488 | Err(_) => handle_alloc_error(Layout::array::(count).unwrap_or(too_large_layout)), 489 | } 490 | } 491 | 492 | /// Tries to put multiple values created by calls to the specified function onto the scope. 493 | /// On success, returns mutable reference to slice with lifetime equal to scope borrow lifetime. 494 | /// Returns mutable reference to slice with lifetime equal to scope borrow lifetime. 495 | /// Values on scope will be dropped when scope is dropped. 496 | /// 497 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 498 | #[inline(always)] 499 | pub fn try_to_scope_many_with(&self, count: usize, f: F) -> Result<&mut [T], AllocError> 500 | where 501 | F: FnMut() -> T, 502 | { 503 | try_to_scope_many_with(|layout| self.try_alloc(layout), &self.drop_list, count, f) 504 | } 505 | 506 | /// Reports total memory allocated from underlying allocator by associated arena. 507 | #[inline(always)] 508 | pub fn total_memory_usage(&self) -> usize { 509 | self.buckets.total_memory_usage() 510 | } 511 | 512 | /// Creates scope proxy bound to the scope. 513 | /// Any objects allocated through proxy will be attached to the scope. 514 | /// Returned proxy will use reference to the underlying allocator. 515 | #[inline(always)] 516 | pub fn proxy_ref<'a>(&'a mut self) -> ScopeProxy<'a, &'a A> { 517 | ScopeProxy { 518 | buckets: self.buckets.fork(), 519 | alloc: &self.alloc, 520 | drop_list: self.drop_list.fork(), 521 | } 522 | } 523 | } 524 | 525 | impl Scope<'_, A> 526 | where 527 | A: Allocator + Clone, 528 | { 529 | /// Creates scope proxy bound to the scope. 530 | /// Any objects allocated through proxy will be attached to the scope. 531 | /// Returned proxy will use clone of the underlying allocator. 532 | #[inline(always)] 533 | pub fn proxy<'a>(&'a mut self) -> ScopeProxy<'a, A> { 534 | ScopeProxy { 535 | buckets: self.buckets.fork(), 536 | alloc: self.alloc.clone(), 537 | drop_list: self.drop_list.fork(), 538 | } 539 | } 540 | } 541 | 542 | /// Proxy for `Scope` which allocates memory bound to the scope lifetime and not itself. 543 | /// This allows to create sub-scopes while keeping references to scoped values. 544 | /// Does not frees memory and does not drops values moved on scope when dropped. 545 | /// Parent `Scope` will do this. 546 | #[cfg(not(feature = "alloc"))] 547 | pub struct ScopeProxy<'scope, A: Allocator> { 548 | buckets: Buckets<'scope>, 549 | alloc: &'scope A, 550 | drop_list: DropList<'scope>, 551 | } 552 | 553 | /// Proxy for `Scope` which allocates memory bound to the scope lifetime and not itself. 554 | /// This allows to create sub-scopes while keeping references to scoped values. 555 | /// Does not frees memory and does not drops values moved on scope when dropped. 556 | /// Parent `Scope` will do this. 557 | #[cfg(feature = "alloc")] 558 | pub struct ScopeProxy<'scope, A: Allocator = Global> { 559 | buckets: Buckets<'scope>, 560 | alloc: A, 561 | drop_list: DropList<'scope>, 562 | } 563 | 564 | impl Debug for ScopeProxy<'_, A> 565 | where 566 | A: Allocator, 567 | { 568 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 569 | f.debug_struct("ScopeProxy") 570 | .field("buckets", &self.buckets) 571 | .finish_non_exhaustive() 572 | } 573 | } 574 | 575 | impl Drop for ScopeProxy<'_, A> 576 | where 577 | A: Allocator, 578 | { 579 | #[inline(always)] 580 | fn drop(&mut self) { 581 | unsafe { 582 | self.drop_list.flush_fork(); 583 | self.buckets.flush_fork(); 584 | } 585 | } 586 | } 587 | 588 | impl<'scope, A> ScopeProxy<'scope, A> 589 | where 590 | A: Allocator, 591 | { 592 | /// Allocates a block of memory. 593 | /// Returns a [`&mut [MaybeUninit]`] meeting the size and alignment guarantees of layout. 594 | /// The returned block should be initialized before use. 595 | /// 596 | /// Returned block will be deallocated when scope is dropped. 597 | #[cfg(all(not(no_global_oom_handling), feature = "alloc"))] 598 | #[inline(always)] 599 | pub fn alloc(&self, layout: Layout) -> &'scope mut [MaybeUninit] { 600 | match self.try_alloc(layout) { 601 | Ok(buf) => buf, 602 | Err(_) => handle_alloc_error(layout), 603 | } 604 | } 605 | 606 | /// Attempts to allocate a block of memory. 607 | /// On success, returns a [`&mut [MaybeUninit]`] meeting the size and alignment guarantees of layout. 608 | /// The returned block should be initialized before use. 609 | /// 610 | /// Returned block will be deallocated when scope is dropped. 611 | /// 612 | /// # Errors 613 | /// 614 | /// Returning `Err` indicates that memory is exhausted. 615 | #[inline(always)] 616 | pub fn try_alloc(&self, layout: Layout) -> Result<&'scope mut [MaybeUninit], AllocError> { 617 | unsafe { self.buckets.allocate(layout, &self.alloc) } 618 | } 619 | 620 | /// Allocates a block of memory. 621 | /// Returns a [`&mut [u8]`] meeting the size and alignment guarantees of layout. 622 | /// The returned block contents is zero-initialized. 623 | /// 624 | /// Returned block will be deallocated when scope is dropped. 625 | #[cfg(all(not(no_global_oom_handling), feature = "alloc"))] 626 | #[inline(always)] 627 | pub fn alloc_zeroed(&self, layout: Layout) -> &mut [u8] { 628 | match self.try_alloc_zeroed(layout) { 629 | Ok(buf) => buf, 630 | Err(_) => handle_alloc_error(layout), 631 | } 632 | } 633 | 634 | /// Attempts to allocate a block of memory. 635 | /// On success, returns a [`&mut [u8]`] meeting the size and alignment guarantees of layout. 636 | /// The returned block contents is zero-initialized. 637 | /// 638 | /// Returned block will be deallocated when scope is dropped. 639 | /// 640 | /// # Errors 641 | /// 642 | /// Returning `Err` indicates that memory is exhausted. 643 | #[inline(always)] 644 | pub fn try_alloc_zeroed(&self, layout: Layout) -> Result<&mut [u8], AllocError> { 645 | let buf = unsafe { self.buckets.allocate(layout, &self.alloc) }?; 646 | 647 | let buf = unsafe { 648 | // Zeroing bytes buffer should be safe. 649 | ptr::write_bytes(buf.as_mut_ptr(), 0, buf.len()); 650 | 651 | // Zero-initialized. 652 | slice::from_raw_parts_mut(buf.as_mut_ptr() as *mut u8, buf.len()) 653 | }; 654 | 655 | Ok(buf) 656 | } 657 | 658 | /// Move value onto the scope. 659 | /// Returns mutable reference to value with lifetime equal to 'scope lifetime. 660 | /// Value on scope will be dropped when scope is dropped. 661 | /// 662 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 663 | #[cfg(all(not(no_global_oom_handling), feature = "alloc"))] 664 | #[inline(always)] 665 | pub fn to_scope(&self, value: T) -> &'scope mut T { 666 | self.to_scope_with(|| value) 667 | } 668 | 669 | /// Places value returned from function onto the scope. 670 | /// Returns mutable reference to value with lifetime equal to scope borrow lifetime. 671 | /// Value on scope will be dropped when scope is dropped. 672 | /// 673 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 674 | #[cfg(all(not(no_global_oom_handling), feature = "alloc"))] 675 | #[inline(always)] 676 | pub fn to_scope_with(&self, f: F) -> &'scope mut T 677 | where 678 | F: FnOnce() -> T, 679 | { 680 | match self.try_to_scope_with(f) { 681 | Ok(value) => value, 682 | Err(_) => handle_alloc_error(Layout::new::()), 683 | } 684 | } 685 | 686 | /// Tries to move value onto the scope. 687 | /// On success, returns mutable reference to value with lifetime to equal 'scope lifetime. 688 | /// Value on scope will be dropped when scope is dropped. 689 | /// 690 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 691 | /// 692 | /// # Errors 693 | /// 694 | /// Returning `Err` indicates that memory is exhausted. 695 | /// Returning `Err` contains original value. 696 | #[inline(always)] 697 | pub fn try_to_scope(&self, value: T) -> Result<&'scope mut T, (AllocError, T)> { 698 | self.try_to_scope_with(|| value) 699 | .map_err(|(err, f)| (err, f())) 700 | } 701 | 702 | /// Tries to place value return from function onto the scope. 703 | /// On success, returns mutable reference to value with lifetime equal to scope borrow lifetime. 704 | /// Value on scope will be dropped when scope is dropped. 705 | /// 706 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 707 | /// 708 | /// # Errors 709 | /// 710 | /// Returning `Err` indicates that memory is exhausted. 711 | /// Returning `Err` contains original value. 712 | #[inline(always)] 713 | pub fn try_to_scope_with(&self, f: F) -> Result<&'scope mut T, (AllocError, F)> 714 | where 715 | F: FnOnce() -> T, 716 | { 717 | try_to_scope_with(|layout| self.try_alloc(layout), &self.drop_list, f) 718 | } 719 | 720 | /// Move values from iterator onto the scope. 721 | /// Returns mutable reference to slice with lifetime equal to 'scope lifetime. 722 | /// Values on scope will be dropped when scope is dropped. 723 | /// 724 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 725 | /// 726 | /// This method allocates memory to hold iterator's upper bound number of items. See [`core::iter::Iterator::size_hint`]. 727 | /// It will not consume more items. 728 | /// This method will always fail for unbound iterators. 729 | #[cfg(all(not(no_global_oom_handling), feature = "alloc"))] 730 | #[inline(always)] 731 | pub fn to_scope_from_iter(&self, iter: I) -> &'scope mut [T] 732 | where 733 | I: IntoIterator, 734 | { 735 | let too_large_layout = unsafe { 736 | Layout::from_size_align_unchecked(usize::MAX - align_of::(), align_of::()) 737 | }; 738 | let iter = iter.into_iter(); 739 | let upper_bound = iter 740 | .size_hint() 741 | .1 742 | .unwrap_or_else(|| handle_alloc_error(too_large_layout)); 743 | 744 | match self.try_to_scope_from_iter(iter) { 745 | Ok(slice) => slice, 746 | Err(_) => { 747 | handle_alloc_error(Layout::array::(upper_bound).unwrap_or(too_large_layout)) 748 | } 749 | } 750 | } 751 | 752 | /// Tries to move values from iterator onto the scope. 753 | /// On success, returns mutable reference to slice with lifetime equal to 'scope lifetime. 754 | /// Values on scope will be dropped when scope is dropped. 755 | /// 756 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 757 | /// 758 | /// This method allocates memory to hold iterator's upper bound number of items. See [`core::iter::Iterator::size_hint`]. 759 | /// It will not consume more items. 760 | /// This method will always fail for unbound iterators. 761 | /// 762 | /// # Errors 763 | /// 764 | /// Returning `Err` indicates that memory is exhausted. 765 | /// Returning `Err` contains original iterator. 766 | #[inline(always)] 767 | pub fn try_to_scope_from_iter( 768 | &self, 769 | iter: I, 770 | ) -> Result<&'scope mut [T], (AllocError, I::IntoIter)> 771 | where 772 | I: IntoIterator, 773 | { 774 | try_to_scope_from_iter(|layout| self.try_alloc(layout), &self.drop_list, iter) 775 | } 776 | 777 | /// Put multiple clones of the value onto the scope. 778 | /// Returns mutable reference to slice with lifetime equal to scope borrow lifetime. 779 | /// Values on scope will be dropped when scope is dropped. 780 | /// 781 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 782 | #[inline(always)] 783 | pub fn to_scope_many(&self, count: usize, value: T) -> &'scope mut [T] 784 | where 785 | T: Clone, 786 | { 787 | let too_large_layout = unsafe { 788 | Layout::from_size_align_unchecked(usize::MAX - align_of::(), align_of::()) 789 | }; 790 | match self.try_to_scope_many(count, value) { 791 | Ok(slice) => slice, 792 | Err(_) => handle_alloc_error(Layout::array::(count).unwrap_or(too_large_layout)), 793 | } 794 | } 795 | 796 | /// Tries to put multiple clones of the value onto the scope. 797 | /// On success, returns mutable reference to slice with lifetime equal to scope borrow lifetime. 798 | /// Returns mutable reference to slice with lifetime equal to scope borrow lifetime. 799 | /// Values on scope will be dropped when scope is dropped. 800 | /// 801 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 802 | #[inline(always)] 803 | pub fn try_to_scope_many( 804 | &self, 805 | count: usize, 806 | value: T, 807 | ) -> Result<&'scope mut [T], AllocError> 808 | where 809 | T: Clone, 810 | { 811 | self.try_to_scope_many_with(count, || value.clone()) 812 | } 813 | 814 | /// Put multiple values created by calls to the specified function onto the scope. 815 | /// Returns mutable reference to slice with lifetime equal to scope borrow lifetime. 816 | /// Values on scope will be dropped when scope is dropped. 817 | /// 818 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 819 | #[inline(always)] 820 | pub fn to_scope_many_with(&self, count: usize, f: F) -> &'scope mut [T] 821 | where 822 | F: FnMut() -> T, 823 | { 824 | let too_large_layout = unsafe { 825 | Layout::from_size_align_unchecked(usize::MAX - align_of::(), align_of::()) 826 | }; 827 | match self.try_to_scope_many_with(count, f) { 828 | Ok(slice) => slice, 829 | Err(_) => handle_alloc_error(Layout::array::(count).unwrap_or(too_large_layout)), 830 | } 831 | } 832 | 833 | /// Tries to put multiple values created by calls to the specified function onto the scope. 834 | /// On success, returns mutable reference to slice with lifetime equal to scope borrow lifetime. 835 | /// Returns mutable reference to slice with lifetime equal to scope borrow lifetime. 836 | /// Values on scope will be dropped when scope is dropped. 837 | /// 838 | /// This method is as cheap as allocation if value does not needs dropping as reported by [`core::mem::needs_drop`]. 839 | #[inline(always)] 840 | pub fn try_to_scope_many_with( 841 | &self, 842 | count: usize, 843 | f: F, 844 | ) -> Result<&'scope mut [T], AllocError> 845 | where 846 | F: FnMut() -> T, 847 | { 848 | try_to_scope_many_with(|layout| self.try_alloc(layout), &self.drop_list, count, f) 849 | } 850 | 851 | /// Reports total memory allocated from underlying allocator by associated arena. 852 | #[inline(always)] 853 | pub fn total_memory_usage(&self) -> usize { 854 | self.buckets.total_memory_usage() 855 | } 856 | 857 | /// Creates new scope which inherits from the proxy's scope. 858 | /// This scope becomes locked until returned scope is dropped. 859 | /// Returned scope will use reference to the underlying allocator. 860 | #[inline(always)] 861 | pub fn scope_ref<'a>(&'a mut self) -> Scope<'a, &'a A> { 862 | Scope { 863 | buckets: self.buckets.fork(), 864 | alloc: &self.alloc, 865 | drop_list: DropList::new(), 866 | } 867 | } 868 | } 869 | 870 | impl ScopeProxy<'_, A> 871 | where 872 | A: Allocator + Clone, 873 | { 874 | /// Creates new scope which inherits from the proxy's scope. 875 | /// This scope becomes locked until returned scope is dropped. 876 | /// Returned scope will use clone of the underlying allocator. 877 | #[inline(always)] 878 | pub fn scope<'a>(&'a mut self) -> Scope<'a, A> { 879 | Scope { 880 | buckets: self.buckets.fork(), 881 | alloc: self.alloc.clone(), 882 | drop_list: DropList::new(), 883 | } 884 | } 885 | } 886 | 887 | const ALLOCATOR_CAPACITY_OVERFLOW: &'static str = "Allocator capacity overflow"; 888 | 889 | unsafe impl Allocator for &'_ Scope<'_, A> 890 | where 891 | A: Allocator, 892 | { 893 | #[inline(always)] 894 | fn allocate(&self, layout: Layout) -> Result, AllocError> { 895 | let buf = self.try_alloc(layout)?; 896 | let ptr = unsafe { 897 | NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut( 898 | buf.as_mut_ptr() as *mut u8, 899 | buf.len(), 900 | )) 901 | }; 902 | Ok(ptr) 903 | } 904 | 905 | #[inline(always)] 906 | unsafe fn deallocate(&self, _ptr: NonNull, _layout: Layout) { 907 | // Will be deallocated on scope drop. 908 | } 909 | 910 | #[cfg(feature = "allocator_api")] 911 | #[inline(always)] 912 | unsafe fn shrink( 913 | &self, 914 | ptr: NonNull, 915 | old_layout: Layout, 916 | new_layout: Layout, 917 | ) -> Result, AllocError> { 918 | debug_assert!( 919 | new_layout.size() <= old_layout.size(), 920 | "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" 921 | ); 922 | 923 | // Returns same memory unchanged. 924 | // This is valid behavior as change in layout won't affect deallocation 925 | // and for `grow{_zeroed}` methods new layout with smaller size will only affect numbers of bytes copied. 926 | Ok(NonNull::new_unchecked(core::slice::from_raw_parts_mut( 927 | ptr.as_ptr(), 928 | old_layout.size(), 929 | ))) 930 | } 931 | } 932 | 933 | unsafe impl Allocator for ScopeProxy<'_, A> 934 | where 935 | A: Allocator, 936 | { 937 | #[inline(always)] 938 | fn allocate(&self, layout: Layout) -> Result, AllocError> { 939 | let buf = self.try_alloc(layout)?; 940 | let ptr = unsafe { 941 | NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut( 942 | buf.as_mut_ptr() as *mut u8, 943 | buf.len(), 944 | )) 945 | }; 946 | Ok(ptr) 947 | } 948 | 949 | #[inline(always)] 950 | unsafe fn deallocate(&self, _ptr: NonNull, _layout: Layout) { 951 | // Will be deallocated on scope drop. 952 | } 953 | 954 | #[cfg(feature = "allocator_api")] 955 | #[inline(always)] 956 | unsafe fn shrink( 957 | &self, 958 | ptr: NonNull, 959 | old_layout: Layout, 960 | new_layout: Layout, 961 | ) -> Result, AllocError> { 962 | debug_assert!( 963 | new_layout.size() <= old_layout.size(), 964 | "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" 965 | ); 966 | 967 | // Returns same memory unchanged. 968 | // This is valid behavior as change in layout won't affect deallocation 969 | // and for `grow{_zeroed}` methods new layout with smaller size will only affect numbers of bytes copied. 970 | Ok(NonNull::new_unchecked(core::slice::from_raw_parts_mut( 971 | ptr.as_ptr(), 972 | old_layout.size(), 973 | ))) 974 | } 975 | } 976 | 977 | #[inline(always)] 978 | fn try_to_scope_with<'a, F, T>( 979 | try_alloc: impl FnOnce(Layout) -> Result<&'a mut [MaybeUninit], AllocError>, 980 | drop_list: &DropList, 981 | f: F, 982 | ) -> Result<&'a mut T, (AllocError, F)> 983 | where 984 | F: FnOnce() -> T, 985 | { 986 | if needs_drop::() { 987 | match try_alloc(Layout::new::>()) { 988 | Ok(buf) => { 989 | let value = unsafe { WithDrop::init(buf, f(), drop_list) }; 990 | Ok(value) 991 | } 992 | Err(err) => Err((err, f)), 993 | } 994 | } else { 995 | match try_alloc(Layout::new::()) { 996 | Ok(buf) => { 997 | let uninit = unsafe { cast_buf(buf) }; 998 | unsafe { write(uninit.as_mut_ptr(), f()) }; 999 | Ok(unsafe { uninit.assume_init_mut() }) 1000 | } 1001 | Err(err) => Err((err, f)), 1002 | } 1003 | } 1004 | } 1005 | 1006 | fn try_to_scope_from_iter<'a, T, I>( 1007 | try_alloc: impl FnOnce(Layout) -> Result<&'a mut [MaybeUninit], AllocError>, 1008 | drop_list: &DropList, 1009 | iter: I, 1010 | ) -> Result<&'a mut [T], (AllocError, I::IntoIter)> 1011 | where 1012 | I: IntoIterator, 1013 | { 1014 | let iter = iter.into_iter(); 1015 | let upper_bound = match iter.size_hint().1 { 1016 | Some(upper_bound) => upper_bound, 1017 | None => return Err((AllocError, iter)), 1018 | }; 1019 | 1020 | if needs_drop::() { 1021 | match WithDrop::::array_layout(upper_bound) { 1022 | Some(layout) => match try_alloc(layout) { 1023 | Ok(buf) => { 1024 | let slice = unsafe { WithDrop::init_iter(buf, iter, drop_list) }; 1025 | Ok(slice) 1026 | } 1027 | Err(err) => Err((err, iter)), 1028 | }, 1029 | None => Err((AllocError, iter)), 1030 | } 1031 | } else { 1032 | match Layout::array::(upper_bound) { 1033 | Ok(layout) => match try_alloc(layout) { 1034 | Ok(buf) => { 1035 | let (uninit, _) = unsafe { 1036 | // Buffer with layout for `[T; upper_bound]` was requested. 1037 | cast_buf_array::(buf) 1038 | }; 1039 | 1040 | let item_count = iter.take(uninit.len()).fold(0, |idx, item| { 1041 | uninit[idx].write(item); 1042 | idx + 1 1043 | }); 1044 | 1045 | let slice = unsafe { 1046 | // First `item_count` elements of the array were initialized from iterator 1047 | core::slice::from_raw_parts_mut(uninit.as_mut_ptr() as *mut T, item_count) 1048 | }; 1049 | Ok(slice) 1050 | } 1051 | Err(err) => Err((err, iter)), 1052 | }, 1053 | Err(_) => Err((AllocError, iter)), 1054 | } 1055 | } 1056 | } 1057 | 1058 | fn try_to_scope_many_with<'a, T>( 1059 | try_alloc: impl FnOnce(Layout) -> Result<&'a mut [MaybeUninit], AllocError>, 1060 | drop_list: &DropList, 1061 | count: usize, 1062 | mut f: impl FnMut() -> T, 1063 | ) -> Result<&'a mut [T], AllocError> { 1064 | if needs_drop::() { 1065 | match WithDrop::::array_layout(count) { 1066 | Some(layout) => match try_alloc(layout) { 1067 | Ok(buf) => { 1068 | let slice = unsafe { WithDrop::init_many(buf, count, f, drop_list) }; 1069 | Ok(slice) 1070 | } 1071 | Err(err) => Err(err), 1072 | }, 1073 | None => Err(AllocError), 1074 | } 1075 | } else { 1076 | match Layout::array::(count) { 1077 | Ok(layout) => match try_alloc(layout) { 1078 | Ok(buf) => { 1079 | let (uninit, _) = unsafe { 1080 | // Buffer with layout for `[T; upper_bound]` was requested. 1081 | cast_buf_array::(buf) 1082 | }; 1083 | 1084 | for i in 0..count { 1085 | uninit[i].write(f()); 1086 | } 1087 | 1088 | let slice = unsafe { 1089 | // First `item_count` elements of the array were initialized from iterator 1090 | core::slice::from_raw_parts_mut(uninit.as_mut_ptr() as *mut T, count) 1091 | }; 1092 | Ok(slice) 1093 | } 1094 | Err(err) => Err(err), 1095 | }, 1096 | Err(_) => Err(AllocError), 1097 | } 1098 | } 1099 | } 1100 | 1101 | unsafe fn cast_buf(buf: &mut [MaybeUninit]) -> &mut MaybeUninit { 1102 | let layout = Layout::new::(); 1103 | debug_assert_eq!(0, buf.as_mut_ptr() as usize % layout.align()); 1104 | debug_assert!(buf.len() >= layout.size()); 1105 | &mut *(buf.as_mut_ptr() as *mut MaybeUninit) 1106 | } 1107 | 1108 | unsafe fn cast_buf_array( 1109 | buf: &mut [MaybeUninit], 1110 | ) -> (&mut [MaybeUninit], &mut [MaybeUninit]) { 1111 | let layout = Layout::new::(); 1112 | debug_assert_eq!(0, buf.as_mut_ptr() as usize % layout.align()); 1113 | let len = buf.len() / layout.size(); 1114 | 1115 | let (head, tail) = buf.split_at_mut(len * layout.size()); 1116 | let head = slice::from_raw_parts_mut(head.as_mut_ptr() as *mut MaybeUninit, len); 1117 | (head, tail) 1118 | } 1119 | 1120 | /// An extension trait that provides a postfix version of `Scope::to_scope_from_iter`. 1121 | /// This may lead to more readable code in some instances. 1122 | pub trait CollectIntoScope { 1123 | /// A posfix version of `Scope::to_scope_from_iter`. 1124 | /// Analogous to `Iterator::collect`. 1125 | /// # Examples 1126 | /// ```rust 1127 | /// use scoped_arena::Scope; 1128 | /// use scoped_arena::CollectIntoScope; 1129 | /// 1130 | /// let scope = Scope::new(); 1131 | /// 1132 | /// let a = [1, 2, 3]; 1133 | /// 1134 | /// let doubled: &[i32] = a.iter().map(|&x| x * 2).collect_into_scope(&scope); 1135 | /// 1136 | /// assert_eq!(&[2, 4, 6], doubled); 1137 | /// ``` 1138 | #[allow(clippy::mut_from_ref)] 1139 | fn collect_into_scope<'a>(self, scope: &'a Scope<'a>) -> &'a mut [T]; 1140 | } 1141 | 1142 | impl CollectIntoScope for I { 1143 | #[inline(always)] 1144 | fn collect_into_scope<'a>(self, scope: &'a Scope<'a>) -> &'a mut [I::Item] { 1145 | scope.to_scope_from_iter(self) 1146 | } 1147 | } 1148 | --------------------------------------------------------------------------------